hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
79559b221c2a62c6c74adef9b0d9012371d9055d | 770 | py | Python | scripts/get_docs_version.py | Kobzol/hyperqueue | 24059cc6b8499af54ad4e9db3f764275a1767c28 | [
"MIT"
] | null | null | null | scripts/get_docs_version.py | Kobzol/hyperqueue | 24059cc6b8499af54ad4e9db3f764275a1767c28 | [
"MIT"
] | 21 | 2021-03-31T16:00:39.000Z | 2021-05-06T08:41:26.000Z | scripts/get_docs_version.py | Kobzol/hyperqueue | 24059cc6b8499af54ad4e9db3f764275a1767c28 | [
"MIT"
] | 1 | 2021-05-04T07:40:58.000Z | 2021-05-04T07:40:58.000Z | import json
import subprocess
def latest_version():
return {
"type": "latest"
}
def get_version(output: str):
if not output:
return latest_version()
else:
tags = [t.strip() for t in output.splitlines(keepends=False) if t.startswith("v")]
if not tags:
return latest_version()
return {
"type": "stable",
"version": sorted(tags, reverse=True)[0]
}
if __name__ == "__main__":
"""
Calculates whether the current commit is a stable version (=there is some tag pointing to it) or an unstable one.
"""
output = subprocess.check_output(["git", "tag", "--points-at", "HEAD"]).decode().strip()
version = get_version(output)
print(json.dumps(version))
| 24.83871 | 117 | 0.597403 |
79559c010dfbe88f5f0206186df1b696e15626d6 | 9,778 | py | Python | assists/bmcmake_metadata_xlnx.py | kedareswararao/lopper | efc4fa776e7dd6ad0b234c81dd34af5fab3e0283 | [
"BSD-3-Clause"
] | null | null | null | assists/bmcmake_metadata_xlnx.py | kedareswararao/lopper | efc4fa776e7dd6ad0b234c81dd34af5fab3e0283 | [
"BSD-3-Clause"
] | null | null | null | assists/bmcmake_metadata_xlnx.py | kedareswararao/lopper | efc4fa776e7dd6ad0b234c81dd34af5fab3e0283 | [
"BSD-3-Clause"
] | null | null | null | #/*
# * Copyright (c) 2020 Xilinx Inc. All rights reserved.
# *
# * Author:
# * Appana Durga Kedareswara rao <appana.durga.rao@xilinx.com>
# *
# * SPDX-License-Identifier: BSD-3-Clause
# */
import struct
import sys
import types
import os
import getopt
import re
from pathlib import Path
from pathlib import PurePath
from lopper import Lopper
from lopper import LopperFmt
import lopper
from lopper_tree import *
from re import *
import yaml
import glob
from collections import OrderedDict
sys.path.append(os.path.dirname(__file__))
from baremetalconfig_xlnx import *
def generate_drvcmake_metadata(sdt, node_list, src_dir, options):
driver_compatlist = []
drvname = src_dir.split('/')[-3]
yaml_file = Path( src_dir + "../data/" + drvname + ".yaml")
try:
yaml_file_abs = yaml_file.resolve()
except FileNotFoundError:
yaml_file_abs = ""
if yaml_file_abs:
yamlfile = str(yaml_file_abs)
else:
print("Driver doesn't have yaml file")
return False
# Get the example_schema
with open(yamlfile, 'r') as stream:
schema = yaml.safe_load(stream)
driver_compatlist = compat_list(schema)
try:
example_schema = schema['examples']
except KeyError:
example_schema = {}
driver_nodes = []
for compat in driver_compatlist:
for node in node_list:
compatlist = node['compatible'].value
for compat_string in compatlist:
if compat in compat_string:
driver_nodes.append(node)
driver_nodes = get_mapped_nodes(sdt, driver_nodes, options)
driver_nodes = list(dict.fromkeys(driver_nodes))
nodename_list = []
reg_list = []
example_dict = {}
depreg_dict = {}
for node in driver_nodes:
depreg_list = []
reg, size = scan_reg_size(node, node['reg'].value, 0)
nodename_list.append(node.name)
reg_list.append(hex(reg))
validex_list = []
for example,prop in example_schema.items():
valid_ex = 0
match_list = []
for p in prop:
if isinstance(p, dict):
for e,prop_val in p.items():
valid_phandle = 0
try:
val = node[e].value
if '' in val:
val = 1
if e == "axistream-connected":
reg = get_phandle_regprop(sdt, e, val)
val = reg & 0xF
if prop_val == "phandle":
depreg_list.append(hex(get_phandle_regprop(sdt, e, val)))
valid_phandle = 1
except KeyError:
val = 0
if prop_val == val:
match_list.append(True)
elif prop_val == "phandle" and valid_phandle == 1:
match_list.append(True)
elif isinstance(val, list):
if prop_val == val[0]:
match_list.append(True)
else:
match_list.append(False)
else:
match_list.append(False)
else:
try:
valid_ex = node[prop[0]].value
except KeyError:
valid_ex = 0
if valid_ex or not False in match_list:
validex_list.append(example)
example_dict.update({node.name:validex_list})
depreg_dict.update({node.name:depreg_list})
drvname = yamlfile.rsplit('/', 1)[-1]
drvname = drvname.replace('.yaml', '')
cmake_file = drvname.capitalize() + str("Example.cmake")
with open(cmake_file, 'a') as fd:
fd.write("set(NUM_DRIVER_INSTANCES %s)\n" % to_cmakelist(nodename_list))
fd.write("set(REG_LIST %s)\n" % to_cmakelist(reg_list))
for index,name in enumerate(nodename_list):
fd.write("set(EXAMPLE_LIST%s %s)\n" % (index, to_cmakelist(example_dict[name])))
fd.write("set(DEPDRV_REG_LIST%s %s)\n" % (index, to_cmakelist(depreg_dict[name])))
fd.write("list(APPEND TOTAL_EXAMPLE_LIST EXAMPLE_LIST%s)\n" % index)
fd.write("list(APPEND TOTAL_DEPDRV_REG_LIST DEPDRV_REG_LIST%s)\n" % index)
def getmatch_nodes(sdt, node_list, yamlfile, options):
# Get the example_schema
with open(yamlfile, 'r') as stream:
schema = yaml.safe_load(stream)
driver_compatlist = compat_list(schema)
driver_nodes = []
for compat in driver_compatlist:
for node in node_list:
compat_string = node['compatible'].value[0]
if compat in compat_string:
driver_nodes.append(node)
driver_nodes = get_mapped_nodes(sdt, driver_nodes, options)
driver_nodes = list(dict.fromkeys(driver_nodes))
return driver_nodes
def getxlnx_phytype(sdt, value):
child_node = [node for node in sdt.tree['/'].subnodes() if node.phandle == value[0]]
phy_type = child_node[0]['xlnx,phy-type'].value[0]
return hex(phy_type)
def lwip_topolgy(config):
topology_fd = open('xtopology_g.c', 'w')
tmp_str = "netif/xtopology.h"
tmp_str = '"{}"'.format(tmp_str)
topology_fd.write("\n#include %s\n" % tmp_str)
tmp_str = "xil_types.h"
tmp_str = '"{}"'.format(tmp_str)
topology_fd.write("#include %s\n\n" % tmp_str)
topology_fd.write("struct xtopology_t xtopology[] = {\n")
for index, data in enumerate(config):
if (index % 2) == 0:
topology_fd.write("\t{\n")
topology_fd.write("\t\t%s,\n" % data)
if (index % 2) != 0:
topology_fd.write("\n\t},\n")
topology_fd.write("\t{\n")
topology_fd.write("\t\tNULL\n")
topology_fd.write("\t}\n")
topology_fd.write("};")
def generate_hwtocmake_medata(sdt, node_list, src_path, repo_path, options):
meta_dict = {}
name = src_path.split('/')[-3]
yaml_file = Path( src_path + "../data/" + name + ".yaml")
try:
yaml_file_abs = yaml_file.resolve()
except FileNotFoundError:
yaml_file_abs = ""
if yaml_file_abs:
yamlfile = str(yaml_file_abs)
else:
print("Driver doesn't have yaml file")
return False
with open(yamlfile, 'r') as stream:
schema = yaml.safe_load(stream)
meta_dict = schema['required']
lwip = re.search("lwip211", name)
cmake_file = name.capitalize() + str("Example.cmake")
topology_data = []
with open(cmake_file, "a") as fd:
lwiptype_index = 0
for drv, prop_list in sorted(meta_dict.items(), key=lambda kv:(kv[0], kv[1])):
name = drv + str(".yaml")
drv_yamlpath = [y for x in os.walk(repo_path) for y in glob.glob(os.path.join(x[0], name))]
nodes = getmatch_nodes(sdt, node_list, drv_yamlpath[0], options)
name_list = [node.name for node in nodes]
fd.write("set(%s_NUM_DRIVER_INSTANCES %s)\n" % (drv.upper(), to_cmakelist(name_list)))
for index,node in enumerate(nodes):
val_list = []
for prop in prop_list:
if prop == "reg":
reg,size = scan_reg_size(node, node[prop].value, 0)
val = hex(reg)
if lwip:
topology_data.append(val)
topology_data.append(lwiptype_index)
elif prop == "interrupts":
val = get_interrupt_prop(sdt, node, node[prop].value)
val = val[0]
elif prop == "axistream-connected":
val = hex(get_phandle_regprop(sdt, prop, node[prop].value))
elif prop == "phy-handle":
try:
val = getxlnx_phytype(sdt, node[prop].value)
except KeyError:
val = hex(0)
else:
val = hex(node[prop].value[0])
val_list.append(val)
fd.write("set(%s%s_PROP_LIST %s)\n" % (drv.upper(), index, to_cmakelist(val_list)))
fd.write("list(APPEND TOTAL_%s_PROP_LIST %s%s_PROP_LIST)\n" % (drv.upper(), drv.upper(), index))
lwiptype_index += 1
if topology_data:
lwip_topolgy(topology_data)
def is_compat( node, compat_string_to_test ):
if re.search( "module,bmcmake_metadata_xlnx", compat_string_to_test):
return xlnx_generate_cmake_metadata
return ""
def to_cmakelist(pylist):
cmake_list = ';'.join(pylist)
cmake_list = '"{}"'.format(cmake_list)
return cmake_list
def xlnx_generate_cmake_metadata(tgt_node, sdt, options):
root_node = sdt.tree[tgt_node]
root_sub_nodes = root_node.subnodes()
node_list = []
# Traverse the tree and find the nodes having status=ok property
for node in root_sub_nodes:
try:
status = node["status"].value
if "okay" in status:
node_list.append(node)
except:
pass
src_path = options['args'][1]
command = options['args'][2]
repo_path = ""
try:
repo_path = options['args'][3]
except IndexError:
pass
if command == "drvcmake_metadata":
generate_drvcmake_metadata(sdt, node_list, src_path, options)
elif command == "hwcmake_metadata":
generate_hwtocmake_medata(sdt, node_list, src_path, repo_path, options)
return True
| 36.485075 | 112 | 0.560953 |
79559c2cd620cef66985923964844402015a3db8 | 1,579 | py | Python | configs/solo/solo_r50_fpn_1x_coco.py | ruiningTang/mmdetection | 100b0b5e0edddc45af0812b9f1474493c61671ef | [
"Apache-2.0"
] | null | null | null | configs/solo/solo_r50_fpn_1x_coco.py | ruiningTang/mmdetection | 100b0b5e0edddc45af0812b9f1474493c61671ef | [
"Apache-2.0"
] | null | null | null | configs/solo/solo_r50_fpn_1x_coco.py | ruiningTang/mmdetection | 100b0b5e0edddc45af0812b9f1474493c61671ef | [
"Apache-2.0"
] | null | null | null | _base_ = [
'../_base_/datasets/coco_instance.py',
'../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
]
# model settings
model = dict(
type='SOLO',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50'),
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=0,
num_outs=5),
mask_head=dict(
type='SOLOHead',
num_classes=80,
in_channels=256,
stacked_convs=7,
feat_channels=256,
strides=[8, 8, 16, 32, 32],
scale_ranges=((1, 96), (48, 192), (96, 384), (192, 768), (384, 2048)),
pos_scale=0.2,
num_grids=[40, 36, 24, 16, 12],
cls_down_index=0,
loss_mask=dict(type='SOLODiceLoss', use_sigmoid=True, loss_weight=3.0),
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
norm_cfg=dict(type='GN', num_groups=32, requires_grad=True)),
# model training and testing settings
test_cfg=dict(
nms_pre=500,
score_thr=0.1,
mask_thr=0.5,
filter_thr=0.05,
kernel='gaussian', # gaussian/linear
sigma=2.0,
max_per_img=100))
# optimizer
optimizer = dict(type='SGD', lr=0.005) | 29.792453 | 80 | 0.538949 |
79559c2fb6c744ffde65c11f515c2e8c551ff121 | 386,275 | py | Python | pysnmp/Nortel-MsCarrier-MscPassport-BridgeMIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/Nortel-MsCarrier-MscPassport-BridgeMIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/Nortel-MsCarrier-MscPassport-BridgeMIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module Nortel-MsCarrier-MscPassport-BridgeMIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Nortel-MsCarrier-MscPassport-BridgeMIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:20:02 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint")
StorageType, Unsigned32, Counter32, DisplayString, Gauge32, BridgeId, InterfaceIndex, RowStatus, MacAddress, Integer32 = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB", "StorageType", "Unsigned32", "Counter32", "DisplayString", "Gauge32", "BridgeId", "InterfaceIndex", "RowStatus", "MacAddress", "Integer32")
AsciiStringIndex, HexString, AsciiString, Hex, Link, DashedHexString, NonReplicated = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-TextualConventionsMIB", "AsciiStringIndex", "HexString", "AsciiString", "Hex", "Link", "DashedHexString", "NonReplicated")
mscComponents, mscPassportMIBs = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB", "mscComponents", "mscPassportMIBs")
mscVrIndex, mscVrPpIndex, mscVrPp, mscVr = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex", "mscVrPpIndex", "mscVrPp", "mscVr")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Bits, Counter64, ObjectIdentity, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, MibIdentifier, Counter32, Gauge32, iso, ModuleIdentity, TimeTicks, Integer32, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Counter64", "ObjectIdentity", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "MibIdentifier", "Counter32", "Gauge32", "iso", "ModuleIdentity", "TimeTicks", "Integer32", "NotificationType")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
bridgeMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 25))
mscVrPpTbcl = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2))
mscVrPpTbclRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 1), )
if mibBuilder.loadTexts: mscVrPpTbclRowStatusTable.setStatus('mandatory')
mscVrPpTbclRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclRowStatusEntry.setStatus('mandatory')
mscVrPpTbclRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclRowStatus.setStatus('mandatory')
mscVrPpTbclComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclComponentName.setStatus('mandatory')
mscVrPpTbclStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclStorageType.setStatus('mandatory')
mscVrPpTbclIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpTbclIndex.setStatus('mandatory')
mscVrPpTbclProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 10), )
if mibBuilder.loadTexts: mscVrPpTbclProvTable.setStatus('mandatory')
mscVrPpTbclProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclProvEntry.setStatus('mandatory')
mscVrPpTbclTranslateIpx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("off", 0), ("ieee8023", 1), ("ethernet", 2), ("snap", 3), ("sap", 4))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclTranslateIpx.setStatus('mandatory')
mscVrPpTbclFragmentIp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclFragmentIp.setStatus('mandatory')
mscVrPpTbclServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ordered", 0), ("notOrdered", 1))).clone('notOrdered')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclServiceClass.setStatus('mandatory')
mscVrPpTbclConvertArpMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclConvertArpMacAddress.setStatus('mandatory')
mscVrPpTbclPortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclPortNum.setStatus('mandatory')
mscVrPpTbclTbProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 11), )
if mibBuilder.loadTexts: mscVrPpTbclTbProvTable.setStatus('mandatory')
mscVrPpTbclTbProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclTbProvEntry.setStatus('mandatory')
mscVrPpTbclSecureOption = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclSecureOption.setStatus('mandatory')
mscVrPpTbclStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 12), )
if mibBuilder.loadTexts: mscVrPpTbclStpProvTable.setStatus('mandatory')
mscVrPpTbclStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclStpProvEntry.setStatus('mandatory')
mscVrPpTbclAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclAdminStatus.setStatus('mandatory')
mscVrPpTbclPortStateStpControl = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclPortStateStpControl.setStatus('mandatory')
mscVrPpTbclStpTypeProv = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclStpTypeProv.setStatus('mandatory')
mscVrPpTbclPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclPortPriority.setStatus('mandatory')
mscVrPpTbclPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclPathCost.setStatus('mandatory')
mscVrPpTbclPathCostMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclPathCostMethod.setStatus('mandatory')
mscVrPpTbclDIProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 13), )
if mibBuilder.loadTexts: mscVrPpTbclDIProvTable.setStatus('mandatory')
mscVrPpTbclDIProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclDIProvEntry.setStatus('mandatory')
mscVrPpTbclDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 13, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967293)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclDomainNum.setStatus('mandatory')
mscVrPpTbclPreserveDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclPreserveDomain.setStatus('mandatory')
mscVrPpTbclStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 14), )
if mibBuilder.loadTexts: mscVrPpTbclStateTable.setStatus('mandatory')
mscVrPpTbclStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclStateEntry.setStatus('mandatory')
mscVrPpTbclAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclAdminState.setStatus('mandatory')
mscVrPpTbclOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclOperationalState.setStatus('mandatory')
mscVrPpTbclUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclUsageState.setStatus('mandatory')
mscVrPpTbclOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 15), )
if mibBuilder.loadTexts: mscVrPpTbclOperStatusTable.setStatus('mandatory')
mscVrPpTbclOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclOperStatusEntry.setStatus('mandatory')
mscVrPpTbclSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclSnmpOperStatus.setStatus('mandatory')
mscVrPpTbclOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16), )
if mibBuilder.loadTexts: mscVrPpTbclOperTable.setStatus('mandatory')
mscVrPpTbclOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclOperEntry.setStatus('mandatory')
mscVrPpTbclPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclPortName.setStatus('mandatory')
mscVrPpTbclUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclUpTime.setStatus('mandatory')
mscVrPpTbclDownTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclDownTime.setStatus('mandatory')
mscVrPpTbclBridgingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("tb", 2), ("sr", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclBridgingMode.setStatus('mandatory')
mscVrPpTbclBridgePortConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclBridgePortConfig.setStatus('mandatory')
mscVrPpTbclBridgePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernetBridgePort", 0), ("tokenRingBridgePort", 1), ("fddiBridgePort", 2), ("tokenBusBridgePort", 3), ("fddiEncapsulatingBridgePort", 4), ("vnsBridgePort", 5), ("frameRelayBridgePort", 6), ("pppBridgePort", 7), ("smdsBridgePort", 8), ("vcpBridgePort", 9), ("x25BridgePort", 10), ("srEncapSrBridgePort", 11), ("srEncapTbBridgePort", 12), ("sourceGateSrBridgePort", 13), ("sourceGateTbBridgePort", 14), ("srtbBridgePort", 15), ("tbsrBridgePort", 16), ("clusterSrBridgePort", 17), ("clusterTbBridgePort", 18), ("unknown", 19), ("atmMpeBridgePort", 20), ("snaDlrBridgePort", 21), ("lanEmulationClientEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclBridgePortType.setStatus('mandatory')
mscVrPpTbclIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16, 1, 8), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclIfIndex.setStatus('mandatory')
mscVrPpTbclDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclDelayExceededDiscards.setStatus('mandatory')
mscVrPpTbclMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 16, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclMtuExceededDiscards.setStatus('mandatory')
mscVrPpTbclTbOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17), )
if mibBuilder.loadTexts: mscVrPpTbclTbOperTable.setStatus('mandatory')
mscVrPpTbclTbOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclTbOperEntry.setStatus('mandatory')
mscVrPpTbclMaxInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclMaxInfo.setStatus('mandatory')
mscVrPpTbclBadVerifyDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclBadVerifyDiscards.setStatus('mandatory')
mscVrPpTbclUnicastNoMatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclUnicastNoMatches.setStatus('mandatory')
mscVrPpTbclStaticEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclStaticEntryDiscards.setStatus('mandatory')
mscVrPpTbclDynamicEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclDynamicEntryDiscards.setStatus('mandatory')
mscVrPpTbclLearningDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclLearningDiscards.setStatus('mandatory')
mscVrPpTbclInDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclInDiscards.setStatus('mandatory')
mscVrPpTbclInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclInFrames.setStatus('mandatory')
mscVrPpTbclOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 17, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclOutFrames.setStatus('mandatory')
mscVrPpTbclStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18), )
if mibBuilder.loadTexts: mscVrPpTbclStpOperTable.setStatus('mandatory')
mscVrPpTbclStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclStpOperEntry.setStatus('mandatory')
mscVrPpTbclStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclStpPortState.setStatus('mandatory')
mscVrPpTbclStpTypeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclStpTypeOper.setStatus('mandatory')
mscVrPpTbclDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclDesignatedCost.setStatus('mandatory')
mscVrPpTbclPathCostOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclPathCostOper.setStatus('mandatory')
mscVrPpTbclDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclDesignatedBridge.setStatus('mandatory')
mscVrPpTbclDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18, 1, 7), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclDesignatedPort.setStatus('mandatory')
mscVrPpTbclForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclForwardTransitions.setStatus('mandatory')
mscVrPpTbclBlockingDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclBlockingDiscards.setStatus('mandatory')
mscVrPpTbclDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 18, 1, 10), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclDesignatedRoot.setStatus('mandatory')
mscVrPpTbclStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 19), )
if mibBuilder.loadTexts: mscVrPpTbclStatsTable.setStatus('mandatory')
mscVrPpTbclStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 19, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"))
if mibBuilder.loadTexts: mscVrPpTbclStatsEntry.setStatus('mandatory')
mscVrPpTbclBadAbstractDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 19, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclBadAbstractDiscards.setStatus('mandatory')
mscVrPpTbclTinygramFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 19, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclTinygramFramesIn.setStatus('mandatory')
mscVrPpTbclTinygramFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 19, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclTinygramFramesOut.setStatus('mandatory')
mscVrPpTbclInFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 19, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclInFilterDiscards.setStatus('mandatory')
mscVrPpTbclOutFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 19, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclOutFilterDiscards.setStatus('mandatory')
mscVrPpTbclNs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2))
mscVrPpTbclNsRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2, 1), )
if mibBuilder.loadTexts: mscVrPpTbclNsRowStatusTable.setStatus('mandatory')
mscVrPpTbclNsRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclNsIndex"))
if mibBuilder.loadTexts: mscVrPpTbclNsRowStatusEntry.setStatus('mandatory')
mscVrPpTbclNsRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclNsRowStatus.setStatus('mandatory')
mscVrPpTbclNsComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclNsComponentName.setStatus('mandatory')
mscVrPpTbclNsStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbclNsStorageType.setStatus('mandatory')
mscVrPpTbclNsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpTbclNsIndex.setStatus('mandatory')
mscVrPpTbclNsProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2, 10), )
if mibBuilder.loadTexts: mscVrPpTbclNsProvTable.setStatus('mandatory')
mscVrPpTbclNsProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbclNsIndex"))
if mibBuilder.loadTexts: mscVrPpTbclNsProvEntry.setStatus('mandatory')
mscVrPpTbclNsIncomingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2, 10, 1, 2), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclNsIncomingFilter.setStatus('mandatory')
mscVrPpTbclNsOutgoingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 2, 2, 10, 1, 3), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbclNsOutgoingFilter.setStatus('mandatory')
mscVrPpFddiETB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3))
mscVrPpFddiETBRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 1), )
if mibBuilder.loadTexts: mscVrPpFddiETBRowStatusTable.setStatus('mandatory')
mscVrPpFddiETBRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBRowStatusEntry.setStatus('mandatory')
mscVrPpFddiETBRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBRowStatus.setStatus('mandatory')
mscVrPpFddiETBComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBComponentName.setStatus('mandatory')
mscVrPpFddiETBStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBStorageType.setStatus('mandatory')
mscVrPpFddiETBIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpFddiETBIndex.setStatus('mandatory')
mscVrPpFddiETBProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 10), )
if mibBuilder.loadTexts: mscVrPpFddiETBProvTable.setStatus('mandatory')
mscVrPpFddiETBProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBProvEntry.setStatus('mandatory')
mscVrPpFddiETBTranslateIpx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("off", 0), ("ieee8023", 1), ("ethernet", 2), ("snap", 3), ("sap", 4))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBTranslateIpx.setStatus('mandatory')
mscVrPpFddiETBFragmentIp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBFragmentIp.setStatus('mandatory')
mscVrPpFddiETBServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ordered", 0), ("notOrdered", 1))).clone('notOrdered')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBServiceClass.setStatus('mandatory')
mscVrPpFddiETBConvertArpMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBConvertArpMacAddress.setStatus('mandatory')
mscVrPpFddiETBPortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBPortNum.setStatus('mandatory')
mscVrPpFddiETBTbProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 11), )
if mibBuilder.loadTexts: mscVrPpFddiETBTbProvTable.setStatus('mandatory')
mscVrPpFddiETBTbProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBTbProvEntry.setStatus('mandatory')
mscVrPpFddiETBSecureOption = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBSecureOption.setStatus('mandatory')
mscVrPpFddiETBStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 12), )
if mibBuilder.loadTexts: mscVrPpFddiETBStpProvTable.setStatus('mandatory')
mscVrPpFddiETBStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBStpProvEntry.setStatus('mandatory')
mscVrPpFddiETBAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBAdminStatus.setStatus('mandatory')
mscVrPpFddiETBPortStateStpControl = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBPortStateStpControl.setStatus('mandatory')
mscVrPpFddiETBStpTypeProv = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBStpTypeProv.setStatus('mandatory')
mscVrPpFddiETBPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBPortPriority.setStatus('mandatory')
mscVrPpFddiETBPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBPathCost.setStatus('mandatory')
mscVrPpFddiETBPathCostMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBPathCostMethod.setStatus('mandatory')
mscVrPpFddiETBDIProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 13), )
if mibBuilder.loadTexts: mscVrPpFddiETBDIProvTable.setStatus('mandatory')
mscVrPpFddiETBDIProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBDIProvEntry.setStatus('mandatory')
mscVrPpFddiETBDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 13, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967293)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBDomainNum.setStatus('mandatory')
mscVrPpFddiETBPreserveDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBPreserveDomain.setStatus('mandatory')
mscVrPpFddiETBStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 14), )
if mibBuilder.loadTexts: mscVrPpFddiETBStateTable.setStatus('mandatory')
mscVrPpFddiETBStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBStateEntry.setStatus('mandatory')
mscVrPpFddiETBAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBAdminState.setStatus('mandatory')
mscVrPpFddiETBOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBOperationalState.setStatus('mandatory')
mscVrPpFddiETBUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBUsageState.setStatus('mandatory')
mscVrPpFddiETBOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 15), )
if mibBuilder.loadTexts: mscVrPpFddiETBOperStatusTable.setStatus('mandatory')
mscVrPpFddiETBOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBOperStatusEntry.setStatus('mandatory')
mscVrPpFddiETBSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBSnmpOperStatus.setStatus('mandatory')
mscVrPpFddiETBOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16), )
if mibBuilder.loadTexts: mscVrPpFddiETBOperTable.setStatus('mandatory')
mscVrPpFddiETBOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBOperEntry.setStatus('mandatory')
mscVrPpFddiETBPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBPortName.setStatus('mandatory')
mscVrPpFddiETBUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBUpTime.setStatus('mandatory')
mscVrPpFddiETBDownTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBDownTime.setStatus('mandatory')
mscVrPpFddiETBBridgingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("tb", 2), ("sr", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBBridgingMode.setStatus('mandatory')
mscVrPpFddiETBBridgePortConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBBridgePortConfig.setStatus('mandatory')
mscVrPpFddiETBBridgePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernetBridgePort", 0), ("tokenRingBridgePort", 1), ("fddiBridgePort", 2), ("tokenBusBridgePort", 3), ("fddiEncapsulatingBridgePort", 4), ("vnsBridgePort", 5), ("frameRelayBridgePort", 6), ("pppBridgePort", 7), ("smdsBridgePort", 8), ("vcpBridgePort", 9), ("x25BridgePort", 10), ("srEncapSrBridgePort", 11), ("srEncapTbBridgePort", 12), ("sourceGateSrBridgePort", 13), ("sourceGateTbBridgePort", 14), ("srtbBridgePort", 15), ("tbsrBridgePort", 16), ("clusterSrBridgePort", 17), ("clusterTbBridgePort", 18), ("unknown", 19), ("atmMpeBridgePort", 20), ("snaDlrBridgePort", 21), ("lanEmulationClientEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBBridgePortType.setStatus('mandatory')
mscVrPpFddiETBIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16, 1, 8), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBIfIndex.setStatus('mandatory')
mscVrPpFddiETBDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBDelayExceededDiscards.setStatus('mandatory')
mscVrPpFddiETBMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 16, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBMtuExceededDiscards.setStatus('mandatory')
mscVrPpFddiETBTbOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17), )
if mibBuilder.loadTexts: mscVrPpFddiETBTbOperTable.setStatus('mandatory')
mscVrPpFddiETBTbOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBTbOperEntry.setStatus('mandatory')
mscVrPpFddiETBMaxInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBMaxInfo.setStatus('mandatory')
mscVrPpFddiETBBadVerifyDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBBadVerifyDiscards.setStatus('mandatory')
mscVrPpFddiETBUnicastNoMatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBUnicastNoMatches.setStatus('mandatory')
mscVrPpFddiETBStaticEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBStaticEntryDiscards.setStatus('mandatory')
mscVrPpFddiETBDynamicEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBDynamicEntryDiscards.setStatus('mandatory')
mscVrPpFddiETBLearningDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBLearningDiscards.setStatus('mandatory')
mscVrPpFddiETBInDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBInDiscards.setStatus('mandatory')
mscVrPpFddiETBInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBInFrames.setStatus('mandatory')
mscVrPpFddiETBOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 17, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBOutFrames.setStatus('mandatory')
mscVrPpFddiETBStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18), )
if mibBuilder.loadTexts: mscVrPpFddiETBStpOperTable.setStatus('mandatory')
mscVrPpFddiETBStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBStpOperEntry.setStatus('mandatory')
mscVrPpFddiETBStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBStpPortState.setStatus('mandatory')
mscVrPpFddiETBStpTypeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBStpTypeOper.setStatus('mandatory')
mscVrPpFddiETBDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBDesignatedCost.setStatus('mandatory')
mscVrPpFddiETBPathCostOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBPathCostOper.setStatus('mandatory')
mscVrPpFddiETBDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBDesignatedBridge.setStatus('mandatory')
mscVrPpFddiETBDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18, 1, 7), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBDesignatedPort.setStatus('mandatory')
mscVrPpFddiETBForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBForwardTransitions.setStatus('mandatory')
mscVrPpFddiETBBlockingDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBBlockingDiscards.setStatus('mandatory')
mscVrPpFddiETBDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 18, 1, 10), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBDesignatedRoot.setStatus('mandatory')
mscVrPpFddiETBStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 19), )
if mibBuilder.loadTexts: mscVrPpFddiETBStatsTable.setStatus('mandatory')
mscVrPpFddiETBStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 19, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBStatsEntry.setStatus('mandatory')
mscVrPpFddiETBBadAbstractDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 19, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBBadAbstractDiscards.setStatus('mandatory')
mscVrPpFddiETBTinygramFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 19, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBTinygramFramesIn.setStatus('mandatory')
mscVrPpFddiETBTinygramFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 19, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBTinygramFramesOut.setStatus('mandatory')
mscVrPpFddiETBInFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 19, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBInFilterDiscards.setStatus('mandatory')
mscVrPpFddiETBOutFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 19, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBOutFilterDiscards.setStatus('mandatory')
mscVrPpFddiETBNs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2))
mscVrPpFddiETBNsRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2, 1), )
if mibBuilder.loadTexts: mscVrPpFddiETBNsRowStatusTable.setStatus('mandatory')
mscVrPpFddiETBNsRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBNsIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBNsRowStatusEntry.setStatus('mandatory')
mscVrPpFddiETBNsRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBNsRowStatus.setStatus('mandatory')
mscVrPpFddiETBNsComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBNsComponentName.setStatus('mandatory')
mscVrPpFddiETBNsStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpFddiETBNsStorageType.setStatus('mandatory')
mscVrPpFddiETBNsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpFddiETBNsIndex.setStatus('mandatory')
mscVrPpFddiETBNsProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2, 10), )
if mibBuilder.loadTexts: mscVrPpFddiETBNsProvTable.setStatus('mandatory')
mscVrPpFddiETBNsProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpFddiETBNsIndex"))
if mibBuilder.loadTexts: mscVrPpFddiETBNsProvEntry.setStatus('mandatory')
mscVrPpFddiETBNsIncomingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2, 10, 1, 2), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBNsIncomingFilter.setStatus('mandatory')
mscVrPpFddiETBNsOutgoingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 3, 2, 10, 1, 3), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpFddiETBNsOutgoingFilter.setStatus('mandatory')
mscVrPpTbp = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4))
mscVrPpTbpRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 1), )
if mibBuilder.loadTexts: mscVrPpTbpRowStatusTable.setStatus('mandatory')
mscVrPpTbpRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpRowStatusEntry.setStatus('mandatory')
mscVrPpTbpRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpRowStatus.setStatus('mandatory')
mscVrPpTbpComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpComponentName.setStatus('mandatory')
mscVrPpTbpStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpStorageType.setStatus('mandatory')
mscVrPpTbpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpTbpIndex.setStatus('mandatory')
mscVrPpTbpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 10), )
if mibBuilder.loadTexts: mscVrPpTbpProvTable.setStatus('mandatory')
mscVrPpTbpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpProvEntry.setStatus('mandatory')
mscVrPpTbpTranslateIpx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("off", 0), ("ieee8023", 1), ("ethernet", 2), ("snap", 3), ("sap", 4))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpTranslateIpx.setStatus('mandatory')
mscVrPpTbpFragmentIp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpFragmentIp.setStatus('mandatory')
mscVrPpTbpServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ordered", 0), ("notOrdered", 1))).clone('notOrdered')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpServiceClass.setStatus('mandatory')
mscVrPpTbpConvertArpMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpConvertArpMacAddress.setStatus('mandatory')
mscVrPpTbpPortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpPortNum.setStatus('mandatory')
mscVrPpTbpTbProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 11), )
if mibBuilder.loadTexts: mscVrPpTbpTbProvTable.setStatus('mandatory')
mscVrPpTbpTbProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpTbProvEntry.setStatus('mandatory')
mscVrPpTbpSecureOption = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpSecureOption.setStatus('mandatory')
mscVrPpTbpStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 12), )
if mibBuilder.loadTexts: mscVrPpTbpStpProvTable.setStatus('mandatory')
mscVrPpTbpStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpStpProvEntry.setStatus('mandatory')
mscVrPpTbpAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpAdminStatus.setStatus('mandatory')
mscVrPpTbpPortStateStpControl = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpPortStateStpControl.setStatus('mandatory')
mscVrPpTbpStpTypeProv = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpStpTypeProv.setStatus('mandatory')
mscVrPpTbpPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpPortPriority.setStatus('mandatory')
mscVrPpTbpPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpPathCost.setStatus('mandatory')
mscVrPpTbpPathCostMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpPathCostMethod.setStatus('mandatory')
mscVrPpTbpDIProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 13), )
if mibBuilder.loadTexts: mscVrPpTbpDIProvTable.setStatus('mandatory')
mscVrPpTbpDIProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpDIProvEntry.setStatus('mandatory')
mscVrPpTbpDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 13, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967293)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpDomainNum.setStatus('mandatory')
mscVrPpTbpPreserveDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpPreserveDomain.setStatus('mandatory')
mscVrPpTbpStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 14), )
if mibBuilder.loadTexts: mscVrPpTbpStateTable.setStatus('mandatory')
mscVrPpTbpStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpStateEntry.setStatus('mandatory')
mscVrPpTbpAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpAdminState.setStatus('mandatory')
mscVrPpTbpOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpOperationalState.setStatus('mandatory')
mscVrPpTbpUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpUsageState.setStatus('mandatory')
mscVrPpTbpOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 15), )
if mibBuilder.loadTexts: mscVrPpTbpOperStatusTable.setStatus('mandatory')
mscVrPpTbpOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpOperStatusEntry.setStatus('mandatory')
mscVrPpTbpSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpSnmpOperStatus.setStatus('mandatory')
mscVrPpTbpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16), )
if mibBuilder.loadTexts: mscVrPpTbpOperTable.setStatus('mandatory')
mscVrPpTbpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpOperEntry.setStatus('mandatory')
mscVrPpTbpPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpPortName.setStatus('mandatory')
mscVrPpTbpUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpUpTime.setStatus('mandatory')
mscVrPpTbpDownTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpDownTime.setStatus('mandatory')
mscVrPpTbpBridgingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("tb", 2), ("sr", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpBridgingMode.setStatus('mandatory')
mscVrPpTbpBridgePortConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpBridgePortConfig.setStatus('mandatory')
mscVrPpTbpBridgePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernetBridgePort", 0), ("tokenRingBridgePort", 1), ("fddiBridgePort", 2), ("tokenBusBridgePort", 3), ("fddiEncapsulatingBridgePort", 4), ("vnsBridgePort", 5), ("frameRelayBridgePort", 6), ("pppBridgePort", 7), ("smdsBridgePort", 8), ("vcpBridgePort", 9), ("x25BridgePort", 10), ("srEncapSrBridgePort", 11), ("srEncapTbBridgePort", 12), ("sourceGateSrBridgePort", 13), ("sourceGateTbBridgePort", 14), ("srtbBridgePort", 15), ("tbsrBridgePort", 16), ("clusterSrBridgePort", 17), ("clusterTbBridgePort", 18), ("unknown", 19), ("atmMpeBridgePort", 20), ("snaDlrBridgePort", 21), ("lanEmulationClientEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpBridgePortType.setStatus('mandatory')
mscVrPpTbpIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16, 1, 8), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpIfIndex.setStatus('mandatory')
mscVrPpTbpDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpDelayExceededDiscards.setStatus('mandatory')
mscVrPpTbpMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 16, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpMtuExceededDiscards.setStatus('mandatory')
mscVrPpTbpTbOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17), )
if mibBuilder.loadTexts: mscVrPpTbpTbOperTable.setStatus('mandatory')
mscVrPpTbpTbOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpTbOperEntry.setStatus('mandatory')
mscVrPpTbpMaxInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpMaxInfo.setStatus('mandatory')
mscVrPpTbpBadVerifyDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpBadVerifyDiscards.setStatus('mandatory')
mscVrPpTbpUnicastNoMatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpUnicastNoMatches.setStatus('mandatory')
mscVrPpTbpStaticEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpStaticEntryDiscards.setStatus('mandatory')
mscVrPpTbpDynamicEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpDynamicEntryDiscards.setStatus('mandatory')
mscVrPpTbpLearningDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpLearningDiscards.setStatus('mandatory')
mscVrPpTbpInDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpInDiscards.setStatus('mandatory')
mscVrPpTbpInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpInFrames.setStatus('mandatory')
mscVrPpTbpOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 17, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpOutFrames.setStatus('mandatory')
mscVrPpTbpStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18), )
if mibBuilder.loadTexts: mscVrPpTbpStpOperTable.setStatus('mandatory')
mscVrPpTbpStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpStpOperEntry.setStatus('mandatory')
mscVrPpTbpStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpStpPortState.setStatus('mandatory')
mscVrPpTbpStpTypeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpStpTypeOper.setStatus('mandatory')
mscVrPpTbpDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpDesignatedCost.setStatus('mandatory')
mscVrPpTbpPathCostOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpPathCostOper.setStatus('mandatory')
mscVrPpTbpDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpDesignatedBridge.setStatus('mandatory')
mscVrPpTbpDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18, 1, 7), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpDesignatedPort.setStatus('mandatory')
mscVrPpTbpForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpForwardTransitions.setStatus('mandatory')
mscVrPpTbpBlockingDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpBlockingDiscards.setStatus('mandatory')
mscVrPpTbpDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 18, 1, 10), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpDesignatedRoot.setStatus('mandatory')
mscVrPpTbpStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 19), )
if mibBuilder.loadTexts: mscVrPpTbpStatsTable.setStatus('mandatory')
mscVrPpTbpStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 19, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"))
if mibBuilder.loadTexts: mscVrPpTbpStatsEntry.setStatus('mandatory')
mscVrPpTbpBadAbstractDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 19, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpBadAbstractDiscards.setStatus('mandatory')
mscVrPpTbpTinygramFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 19, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpTinygramFramesIn.setStatus('mandatory')
mscVrPpTbpTinygramFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 19, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpTinygramFramesOut.setStatus('mandatory')
mscVrPpTbpInFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 19, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpInFilterDiscards.setStatus('mandatory')
mscVrPpTbpOutFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 19, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpOutFilterDiscards.setStatus('mandatory')
mscVrPpTbpNs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2))
mscVrPpTbpNsRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2, 1), )
if mibBuilder.loadTexts: mscVrPpTbpNsRowStatusTable.setStatus('mandatory')
mscVrPpTbpNsRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpNsIndex"))
if mibBuilder.loadTexts: mscVrPpTbpNsRowStatusEntry.setStatus('mandatory')
mscVrPpTbpNsRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpNsRowStatus.setStatus('mandatory')
mscVrPpTbpNsComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpNsComponentName.setStatus('mandatory')
mscVrPpTbpNsStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbpNsStorageType.setStatus('mandatory')
mscVrPpTbpNsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpTbpNsIndex.setStatus('mandatory')
mscVrPpTbpNsProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2, 10), )
if mibBuilder.loadTexts: mscVrPpTbpNsProvTable.setStatus('mandatory')
mscVrPpTbpNsProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbpNsIndex"))
if mibBuilder.loadTexts: mscVrPpTbpNsProvEntry.setStatus('mandatory')
mscVrPpTbpNsIncomingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2, 10, 1, 2), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpNsIncomingFilter.setStatus('mandatory')
mscVrPpTbpNsOutgoingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 4, 2, 10, 1, 3), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbpNsOutgoingFilter.setStatus('mandatory')
mscVrPpSrBp = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8))
mscVrPpSrBpRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 1), )
if mibBuilder.loadTexts: mscVrPpSrBpRowStatusTable.setStatus('mandatory')
mscVrPpSrBpRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpRowStatusEntry.setStatus('mandatory')
mscVrPpSrBpRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpRowStatus.setStatus('mandatory')
mscVrPpSrBpComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpComponentName.setStatus('mandatory')
mscVrPpSrBpStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpStorageType.setStatus('mandatory')
mscVrPpSrBpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpSrBpIndex.setStatus('mandatory')
mscVrPpSrBpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 10), )
if mibBuilder.loadTexts: mscVrPpSrBpProvTable.setStatus('mandatory')
mscVrPpSrBpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpProvEntry.setStatus('mandatory')
mscVrPpSrBpTranslateIpx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("off", 0), ("ieee8023", 1), ("ethernet", 2), ("snap", 3), ("sap", 4))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpTranslateIpx.setStatus('mandatory')
mscVrPpSrBpFragmentIp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpFragmentIp.setStatus('mandatory')
mscVrPpSrBpServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ordered", 0), ("notOrdered", 1))).clone('notOrdered')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpServiceClass.setStatus('mandatory')
mscVrPpSrBpConvertArpMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpConvertArpMacAddress.setStatus('mandatory')
mscVrPpSrBpPortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpPortNum.setStatus('mandatory')
mscVrPpSrBpStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 12), )
if mibBuilder.loadTexts: mscVrPpSrBpStpProvTable.setStatus('mandatory')
mscVrPpSrBpStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpStpProvEntry.setStatus('mandatory')
mscVrPpSrBpAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpAdminStatus.setStatus('mandatory')
mscVrPpSrBpPortStateStpControl = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpPortStateStpControl.setStatus('mandatory')
mscVrPpSrBpStpTypeProv = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpStpTypeProv.setStatus('mandatory')
mscVrPpSrBpPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpPortPriority.setStatus('mandatory')
mscVrPpSrBpPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpPathCost.setStatus('mandatory')
mscVrPpSrBpPathCostMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpPathCostMethod.setStatus('mandatory')
mscVrPpSrBpDIProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 13), )
if mibBuilder.loadTexts: mscVrPpSrBpDIProvTable.setStatus('mandatory')
mscVrPpSrBpDIProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpDIProvEntry.setStatus('mandatory')
mscVrPpSrBpDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 13, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967293)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpDomainNum.setStatus('mandatory')
mscVrPpSrBpPreserveDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpPreserveDomain.setStatus('mandatory')
mscVrPpSrBpStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 14), )
if mibBuilder.loadTexts: mscVrPpSrBpStateTable.setStatus('mandatory')
mscVrPpSrBpStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpStateEntry.setStatus('mandatory')
mscVrPpSrBpAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpAdminState.setStatus('mandatory')
mscVrPpSrBpOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpOperationalState.setStatus('mandatory')
mscVrPpSrBpUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpUsageState.setStatus('mandatory')
mscVrPpSrBpOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 15), )
if mibBuilder.loadTexts: mscVrPpSrBpOperStatusTable.setStatus('mandatory')
mscVrPpSrBpOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpOperStatusEntry.setStatus('mandatory')
mscVrPpSrBpSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpSnmpOperStatus.setStatus('mandatory')
mscVrPpSrBpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16), )
if mibBuilder.loadTexts: mscVrPpSrBpOperTable.setStatus('mandatory')
mscVrPpSrBpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpOperEntry.setStatus('mandatory')
mscVrPpSrBpPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpPortName.setStatus('mandatory')
mscVrPpSrBpUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpUpTime.setStatus('mandatory')
mscVrPpSrBpDownTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpDownTime.setStatus('mandatory')
mscVrPpSrBpBridgingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("tb", 2), ("sr", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpBridgingMode.setStatus('mandatory')
mscVrPpSrBpBridgePortConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpBridgePortConfig.setStatus('mandatory')
mscVrPpSrBpBridgePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernetBridgePort", 0), ("tokenRingBridgePort", 1), ("fddiBridgePort", 2), ("tokenBusBridgePort", 3), ("fddiEncapsulatingBridgePort", 4), ("vnsBridgePort", 5), ("frameRelayBridgePort", 6), ("pppBridgePort", 7), ("smdsBridgePort", 8), ("vcpBridgePort", 9), ("x25BridgePort", 10), ("srEncapSrBridgePort", 11), ("srEncapTbBridgePort", 12), ("sourceGateSrBridgePort", 13), ("sourceGateTbBridgePort", 14), ("srtbBridgePort", 15), ("tbsrBridgePort", 16), ("clusterSrBridgePort", 17), ("clusterTbBridgePort", 18), ("unknown", 19), ("atmMpeBridgePort", 20), ("snaDlrBridgePort", 21), ("lanEmulationClientEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpBridgePortType.setStatus('mandatory')
mscVrPpSrBpIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16, 1, 8), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpIfIndex.setStatus('mandatory')
mscVrPpSrBpDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpDelayExceededDiscards.setStatus('mandatory')
mscVrPpSrBpMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 16, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpMtuExceededDiscards.setStatus('mandatory')
mscVrPpSrBpStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18), )
if mibBuilder.loadTexts: mscVrPpSrBpStpOperTable.setStatus('mandatory')
mscVrPpSrBpStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpStpOperEntry.setStatus('mandatory')
mscVrPpSrBpStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpStpPortState.setStatus('mandatory')
mscVrPpSrBpStpTypeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpStpTypeOper.setStatus('mandatory')
mscVrPpSrBpDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpDesignatedCost.setStatus('mandatory')
mscVrPpSrBpPathCostOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpPathCostOper.setStatus('mandatory')
mscVrPpSrBpDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpDesignatedBridge.setStatus('mandatory')
mscVrPpSrBpDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18, 1, 7), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpDesignatedPort.setStatus('mandatory')
mscVrPpSrBpForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpForwardTransitions.setStatus('mandatory')
mscVrPpSrBpBlockingDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpBlockingDiscards.setStatus('mandatory')
mscVrPpSrBpDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 18, 1, 10), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpDesignatedRoot.setStatus('mandatory')
mscVrPpSrBpStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 19), )
if mibBuilder.loadTexts: mscVrPpSrBpStatsTable.setStatus('mandatory')
mscVrPpSrBpStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 19, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpStatsEntry.setStatus('mandatory')
mscVrPpSrBpBadAbstractDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 19, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpBadAbstractDiscards.setStatus('mandatory')
mscVrPpSrBpTinygramFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 19, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpTinygramFramesIn.setStatus('mandatory')
mscVrPpSrBpTinygramFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 19, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpTinygramFramesOut.setStatus('mandatory')
mscVrPpSrBpInFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 19, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpInFilterDiscards.setStatus('mandatory')
mscVrPpSrBpOutFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 19, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpOutFilterDiscards.setStatus('mandatory')
mscVrPpSrBpSrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20), )
if mibBuilder.loadTexts: mscVrPpSrBpSrProvTable.setStatus('mandatory')
mscVrPpSrBpSrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpSrProvEntry.setStatus('mandatory')
mscVrPpSrBpHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 7)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpHopCount.setStatus('mandatory')
mscVrPpSrBpExploreFrameTreatment = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("encap", 0), ("xlate", 1))).clone('encap')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpExploreFrameTreatment.setStatus('mandatory')
mscVrPpSrBpLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpLanId.setStatus('mandatory')
mscVrPpSrBpInternalLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpInternalLanId.setStatus('mandatory')
mscVrPpSrBpBridgeNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 15)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpBridgeNum.setStatus('mandatory')
mscVrPpSrBpLargestFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20, 1, 6), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(516, 516), ValueRangeConstraint(1470, 1470), ValueRangeConstraint(2052, 2052), ValueRangeConstraint(4399, 4399), ValueRangeConstraint(8130, 8130), ValueRangeConstraint(11407, 11407), ValueRangeConstraint(17749, 17749), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpLargestFrame.setStatus('mandatory')
mscVrPpSrBpSteSpanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("autoSpan", 1), ("disabled", 2), ("forced", 3))).clone('autoSpan')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpSteSpanMode.setStatus('mandatory')
mscVrPpSrBpAreRdLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpAreRdLimit.setStatus('mandatory')
mscVrPpSrBpSteRdLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 20, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpSteRdLimit.setStatus('mandatory')
mscVrPpSrBpSrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21), )
if mibBuilder.loadTexts: mscVrPpSrBpSrStatsTable.setStatus('mandatory')
mscVrPpSrBpSrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpSrStatsEntry.setStatus('mandatory')
mscVrPpSrBpSpecInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpSpecInFrames.setStatus('mandatory')
mscVrPpSrBpSpecOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpSpecOutFrames.setStatus('mandatory')
mscVrPpSrBpApeInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpApeInFrames.setStatus('mandatory')
mscVrPpSrBpApeOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpApeOutFrames.setStatus('mandatory')
mscVrPpSrBpSteInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpSteInFrames.setStatus('mandatory')
mscVrPpSrBpSteOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpSteOutFrames.setStatus('mandatory')
mscVrPpSrBpSegmentMismatchDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpSegmentMismatchDiscards.setStatus('mandatory')
mscVrPpSrBpDupSegmentDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpDupSegmentDiscards.setStatus('mandatory')
mscVrPpSrBpHopCountExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpHopCountExceededDiscards.setStatus('mandatory')
mscVrPpSrBpDupLanIdOrTreeErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpDupLanIdOrTreeErrors.setStatus('mandatory')
mscVrPpSrBpLanIdMismatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpLanIdMismatches.setStatus('mandatory')
mscVrPpSrBpStaticDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpStaticDiscards.setStatus('mandatory')
mscVrPpSrBpDynamicDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 21, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpDynamicDiscards.setStatus('mandatory')
mscVrPpSrBpNs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2))
mscVrPpSrBpNsRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2, 1), )
if mibBuilder.loadTexts: mscVrPpSrBpNsRowStatusTable.setStatus('mandatory')
mscVrPpSrBpNsRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpNsIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpNsRowStatusEntry.setStatus('mandatory')
mscVrPpSrBpNsRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpNsRowStatus.setStatus('mandatory')
mscVrPpSrBpNsComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpNsComponentName.setStatus('mandatory')
mscVrPpSrBpNsStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrBpNsStorageType.setStatus('mandatory')
mscVrPpSrBpNsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpSrBpNsIndex.setStatus('mandatory')
mscVrPpSrBpNsProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2, 10), )
if mibBuilder.loadTexts: mscVrPpSrBpNsProvTable.setStatus('mandatory')
mscVrPpSrBpNsProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrBpNsIndex"))
if mibBuilder.loadTexts: mscVrPpSrBpNsProvEntry.setStatus('mandatory')
mscVrPpSrBpNsIncomingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2, 10, 1, 2), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpNsIncomingFilter.setStatus('mandatory')
mscVrPpSrBpNsOutgoingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 8, 2, 10, 1, 3), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrBpNsOutgoingFilter.setStatus('mandatory')
mscVrPpSrtBp = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9))
mscVrPpSrtBpRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 1), )
if mibBuilder.loadTexts: mscVrPpSrtBpRowStatusTable.setStatus('mandatory')
mscVrPpSrtBpRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpRowStatusEntry.setStatus('mandatory')
mscVrPpSrtBpRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpRowStatus.setStatus('mandatory')
mscVrPpSrtBpComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpComponentName.setStatus('mandatory')
mscVrPpSrtBpStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpStorageType.setStatus('mandatory')
mscVrPpSrtBpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpSrtBpIndex.setStatus('mandatory')
mscVrPpSrtBpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 10), )
if mibBuilder.loadTexts: mscVrPpSrtBpProvTable.setStatus('mandatory')
mscVrPpSrtBpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpProvEntry.setStatus('mandatory')
mscVrPpSrtBpTranslateIpx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("off", 0), ("ieee8023", 1), ("ethernet", 2), ("snap", 3), ("sap", 4))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpTranslateIpx.setStatus('mandatory')
mscVrPpSrtBpFragmentIp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpFragmentIp.setStatus('mandatory')
mscVrPpSrtBpServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ordered", 0), ("notOrdered", 1))).clone('notOrdered')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpServiceClass.setStatus('mandatory')
mscVrPpSrtBpConvertArpMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpConvertArpMacAddress.setStatus('mandatory')
mscVrPpSrtBpPortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpPortNum.setStatus('mandatory')
mscVrPpSrtBpTbProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 11), )
if mibBuilder.loadTexts: mscVrPpSrtBpTbProvTable.setStatus('mandatory')
mscVrPpSrtBpTbProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpTbProvEntry.setStatus('mandatory')
mscVrPpSrtBpSecureOption = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpSecureOption.setStatus('mandatory')
mscVrPpSrtBpStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 12), )
if mibBuilder.loadTexts: mscVrPpSrtBpStpProvTable.setStatus('mandatory')
mscVrPpSrtBpStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpStpProvEntry.setStatus('mandatory')
mscVrPpSrtBpAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpAdminStatus.setStatus('mandatory')
mscVrPpSrtBpPortStateStpControl = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpPortStateStpControl.setStatus('mandatory')
mscVrPpSrtBpStpTypeProv = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpStpTypeProv.setStatus('mandatory')
mscVrPpSrtBpPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpPortPriority.setStatus('mandatory')
mscVrPpSrtBpPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpPathCost.setStatus('mandatory')
mscVrPpSrtBpPathCostMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpPathCostMethod.setStatus('mandatory')
mscVrPpSrtBpDIProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 13), )
if mibBuilder.loadTexts: mscVrPpSrtBpDIProvTable.setStatus('mandatory')
mscVrPpSrtBpDIProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpDIProvEntry.setStatus('mandatory')
mscVrPpSrtBpDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 13, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967293)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpDomainNum.setStatus('mandatory')
mscVrPpSrtBpPreserveDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpPreserveDomain.setStatus('mandatory')
mscVrPpSrtBpStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 14), )
if mibBuilder.loadTexts: mscVrPpSrtBpStateTable.setStatus('mandatory')
mscVrPpSrtBpStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpStateEntry.setStatus('mandatory')
mscVrPpSrtBpAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpAdminState.setStatus('mandatory')
mscVrPpSrtBpOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpOperationalState.setStatus('mandatory')
mscVrPpSrtBpUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpUsageState.setStatus('mandatory')
mscVrPpSrtBpOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 15), )
if mibBuilder.loadTexts: mscVrPpSrtBpOperStatusTable.setStatus('mandatory')
mscVrPpSrtBpOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpOperStatusEntry.setStatus('mandatory')
mscVrPpSrtBpSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpSnmpOperStatus.setStatus('mandatory')
mscVrPpSrtBpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16), )
if mibBuilder.loadTexts: mscVrPpSrtBpOperTable.setStatus('mandatory')
mscVrPpSrtBpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpOperEntry.setStatus('mandatory')
mscVrPpSrtBpPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpPortName.setStatus('mandatory')
mscVrPpSrtBpUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpUpTime.setStatus('mandatory')
mscVrPpSrtBpDownTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpDownTime.setStatus('mandatory')
mscVrPpSrtBpBridgingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("tb", 2), ("sr", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpBridgingMode.setStatus('mandatory')
mscVrPpSrtBpBridgePortConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpBridgePortConfig.setStatus('mandatory')
mscVrPpSrtBpBridgePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernetBridgePort", 0), ("tokenRingBridgePort", 1), ("fddiBridgePort", 2), ("tokenBusBridgePort", 3), ("fddiEncapsulatingBridgePort", 4), ("vnsBridgePort", 5), ("frameRelayBridgePort", 6), ("pppBridgePort", 7), ("smdsBridgePort", 8), ("vcpBridgePort", 9), ("x25BridgePort", 10), ("srEncapSrBridgePort", 11), ("srEncapTbBridgePort", 12), ("sourceGateSrBridgePort", 13), ("sourceGateTbBridgePort", 14), ("srtbBridgePort", 15), ("tbsrBridgePort", 16), ("clusterSrBridgePort", 17), ("clusterTbBridgePort", 18), ("unknown", 19), ("atmMpeBridgePort", 20), ("snaDlrBridgePort", 21), ("lanEmulationClientEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpBridgePortType.setStatus('mandatory')
mscVrPpSrtBpIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16, 1, 8), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpIfIndex.setStatus('mandatory')
mscVrPpSrtBpDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpDelayExceededDiscards.setStatus('mandatory')
mscVrPpSrtBpMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 16, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpMtuExceededDiscards.setStatus('mandatory')
mscVrPpSrtBpTbOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17), )
if mibBuilder.loadTexts: mscVrPpSrtBpTbOperTable.setStatus('mandatory')
mscVrPpSrtBpTbOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpTbOperEntry.setStatus('mandatory')
mscVrPpSrtBpMaxInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpMaxInfo.setStatus('mandatory')
mscVrPpSrtBpBadVerifyDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpBadVerifyDiscards.setStatus('mandatory')
mscVrPpSrtBpUnicastNoMatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpUnicastNoMatches.setStatus('mandatory')
mscVrPpSrtBpStaticEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpStaticEntryDiscards.setStatus('mandatory')
mscVrPpSrtBpDynamicEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpDynamicEntryDiscards.setStatus('mandatory')
mscVrPpSrtBpLearningDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpLearningDiscards.setStatus('mandatory')
mscVrPpSrtBpInDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpInDiscards.setStatus('mandatory')
mscVrPpSrtBpInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpInFrames.setStatus('mandatory')
mscVrPpSrtBpOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 17, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpOutFrames.setStatus('mandatory')
mscVrPpSrtBpStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18), )
if mibBuilder.loadTexts: mscVrPpSrtBpStpOperTable.setStatus('mandatory')
mscVrPpSrtBpStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpStpOperEntry.setStatus('mandatory')
mscVrPpSrtBpStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpStpPortState.setStatus('mandatory')
mscVrPpSrtBpStpTypeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpStpTypeOper.setStatus('mandatory')
mscVrPpSrtBpDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpDesignatedCost.setStatus('mandatory')
mscVrPpSrtBpPathCostOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpPathCostOper.setStatus('mandatory')
mscVrPpSrtBpDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpDesignatedBridge.setStatus('mandatory')
mscVrPpSrtBpDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18, 1, 7), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpDesignatedPort.setStatus('mandatory')
mscVrPpSrtBpForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpForwardTransitions.setStatus('mandatory')
mscVrPpSrtBpBlockingDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpBlockingDiscards.setStatus('mandatory')
mscVrPpSrtBpDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 18, 1, 10), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpDesignatedRoot.setStatus('mandatory')
mscVrPpSrtBpStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 19), )
if mibBuilder.loadTexts: mscVrPpSrtBpStatsTable.setStatus('mandatory')
mscVrPpSrtBpStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 19, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpStatsEntry.setStatus('mandatory')
mscVrPpSrtBpBadAbstractDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 19, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpBadAbstractDiscards.setStatus('mandatory')
mscVrPpSrtBpTinygramFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 19, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpTinygramFramesIn.setStatus('mandatory')
mscVrPpSrtBpTinygramFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 19, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpTinygramFramesOut.setStatus('mandatory')
mscVrPpSrtBpInFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 19, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpInFilterDiscards.setStatus('mandatory')
mscVrPpSrtBpOutFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 19, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpOutFilterDiscards.setStatus('mandatory')
mscVrPpSrtBpSrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20), )
if mibBuilder.loadTexts: mscVrPpSrtBpSrProvTable.setStatus('mandatory')
mscVrPpSrtBpSrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpSrProvEntry.setStatus('mandatory')
mscVrPpSrtBpHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 7)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpHopCount.setStatus('mandatory')
mscVrPpSrtBpExploreFrameTreatment = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("encap", 0), ("xlate", 1))).clone('encap')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpExploreFrameTreatment.setStatus('mandatory')
mscVrPpSrtBpLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpLanId.setStatus('mandatory')
mscVrPpSrtBpInternalLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpInternalLanId.setStatus('mandatory')
mscVrPpSrtBpBridgeNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 15)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpBridgeNum.setStatus('mandatory')
mscVrPpSrtBpLargestFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20, 1, 6), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(516, 516), ValueRangeConstraint(1470, 1470), ValueRangeConstraint(2052, 2052), ValueRangeConstraint(4399, 4399), ValueRangeConstraint(8130, 8130), ValueRangeConstraint(11407, 11407), ValueRangeConstraint(17749, 17749), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpLargestFrame.setStatus('mandatory')
mscVrPpSrtBpSteSpanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("autoSpan", 1), ("disabled", 2), ("forced", 3))).clone('autoSpan')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpSteSpanMode.setStatus('mandatory')
mscVrPpSrtBpAreRdLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpAreRdLimit.setStatus('mandatory')
mscVrPpSrtBpSteRdLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 20, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpSteRdLimit.setStatus('mandatory')
mscVrPpSrtBpSrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21), )
if mibBuilder.loadTexts: mscVrPpSrtBpSrStatsTable.setStatus('mandatory')
mscVrPpSrtBpSrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpSrStatsEntry.setStatus('mandatory')
mscVrPpSrtBpSpecInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpSpecInFrames.setStatus('mandatory')
mscVrPpSrtBpSpecOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpSpecOutFrames.setStatus('mandatory')
mscVrPpSrtBpApeInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpApeInFrames.setStatus('mandatory')
mscVrPpSrtBpApeOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpApeOutFrames.setStatus('mandatory')
mscVrPpSrtBpSteInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpSteInFrames.setStatus('mandatory')
mscVrPpSrtBpSteOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpSteOutFrames.setStatus('mandatory')
mscVrPpSrtBpSegmentMismatchDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpSegmentMismatchDiscards.setStatus('mandatory')
mscVrPpSrtBpDupSegmentDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpDupSegmentDiscards.setStatus('mandatory')
mscVrPpSrtBpHopCountExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpHopCountExceededDiscards.setStatus('mandatory')
mscVrPpSrtBpDupLanIdOrTreeErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpDupLanIdOrTreeErrors.setStatus('mandatory')
mscVrPpSrtBpLanIdMismatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpLanIdMismatches.setStatus('mandatory')
mscVrPpSrtBpStaticDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpStaticDiscards.setStatus('mandatory')
mscVrPpSrtBpDynamicDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 21, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpDynamicDiscards.setStatus('mandatory')
mscVrPpSrtBpNs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2))
mscVrPpSrtBpNsRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2, 1), )
if mibBuilder.loadTexts: mscVrPpSrtBpNsRowStatusTable.setStatus('mandatory')
mscVrPpSrtBpNsRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpNsIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpNsRowStatusEntry.setStatus('mandatory')
mscVrPpSrtBpNsRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpNsRowStatus.setStatus('mandatory')
mscVrPpSrtBpNsComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpNsComponentName.setStatus('mandatory')
mscVrPpSrtBpNsStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrtBpNsStorageType.setStatus('mandatory')
mscVrPpSrtBpNsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpSrtBpNsIndex.setStatus('mandatory')
mscVrPpSrtBpNsProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2, 10), )
if mibBuilder.loadTexts: mscVrPpSrtBpNsProvTable.setStatus('mandatory')
mscVrPpSrtBpNsProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrtBpNsIndex"))
if mibBuilder.loadTexts: mscVrPpSrtBpNsProvEntry.setStatus('mandatory')
mscVrPpSrtBpNsIncomingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2, 10, 1, 2), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpNsIncomingFilter.setStatus('mandatory')
mscVrPpSrtBpNsOutgoingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 9, 2, 10, 1, 3), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrtBpNsOutgoingFilter.setStatus('mandatory')
mscVrPpSrse = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10))
mscVrPpSrseRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 1), )
if mibBuilder.loadTexts: mscVrPpSrseRowStatusTable.setStatus('mandatory')
mscVrPpSrseRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseRowStatusEntry.setStatus('mandatory')
mscVrPpSrseRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseRowStatus.setStatus('mandatory')
mscVrPpSrseComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseComponentName.setStatus('mandatory')
mscVrPpSrseStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseStorageType.setStatus('mandatory')
mscVrPpSrseIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpSrseIndex.setStatus('mandatory')
mscVrPpSrseProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 10), )
if mibBuilder.loadTexts: mscVrPpSrseProvTable.setStatus('mandatory')
mscVrPpSrseProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseProvEntry.setStatus('mandatory')
mscVrPpSrseTranslateIpx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("off", 0), ("ieee8023", 1), ("ethernet", 2), ("snap", 3), ("sap", 4))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseTranslateIpx.setStatus('mandatory')
mscVrPpSrseFragmentIp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseFragmentIp.setStatus('mandatory')
mscVrPpSrseServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ordered", 0), ("notOrdered", 1))).clone('notOrdered')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseServiceClass.setStatus('mandatory')
mscVrPpSrseConvertArpMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseConvertArpMacAddress.setStatus('mandatory')
mscVrPpSrsePortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsePortNum.setStatus('mandatory')
mscVrPpSrseStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 12), )
if mibBuilder.loadTexts: mscVrPpSrseStpProvTable.setStatus('mandatory')
mscVrPpSrseStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseStpProvEntry.setStatus('mandatory')
mscVrPpSrseAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseAdminStatus.setStatus('mandatory')
mscVrPpSrsePortStateStpControl = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsePortStateStpControl.setStatus('mandatory')
mscVrPpSrseStpTypeProv = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseStpTypeProv.setStatus('mandatory')
mscVrPpSrsePortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsePortPriority.setStatus('mandatory')
mscVrPpSrsePathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsePathCost.setStatus('mandatory')
mscVrPpSrsePathCostMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsePathCostMethod.setStatus('mandatory')
mscVrPpSrseDIProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 13), )
if mibBuilder.loadTexts: mscVrPpSrseDIProvTable.setStatus('mandatory')
mscVrPpSrseDIProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseDIProvEntry.setStatus('mandatory')
mscVrPpSrseDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 13, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967293)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseDomainNum.setStatus('mandatory')
mscVrPpSrsePreserveDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsePreserveDomain.setStatus('mandatory')
mscVrPpSrseStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 14), )
if mibBuilder.loadTexts: mscVrPpSrseStateTable.setStatus('mandatory')
mscVrPpSrseStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseStateEntry.setStatus('mandatory')
mscVrPpSrseAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseAdminState.setStatus('mandatory')
mscVrPpSrseOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseOperationalState.setStatus('mandatory')
mscVrPpSrseUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseUsageState.setStatus('mandatory')
mscVrPpSrseOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 15), )
if mibBuilder.loadTexts: mscVrPpSrseOperStatusTable.setStatus('mandatory')
mscVrPpSrseOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseOperStatusEntry.setStatus('mandatory')
mscVrPpSrseSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseSnmpOperStatus.setStatus('mandatory')
mscVrPpSrseOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16), )
if mibBuilder.loadTexts: mscVrPpSrseOperTable.setStatus('mandatory')
mscVrPpSrseOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseOperEntry.setStatus('mandatory')
mscVrPpSrsePortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsePortName.setStatus('mandatory')
mscVrPpSrseUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseUpTime.setStatus('mandatory')
mscVrPpSrseDownTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseDownTime.setStatus('mandatory')
mscVrPpSrseBridgingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("tb", 2), ("sr", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseBridgingMode.setStatus('mandatory')
mscVrPpSrseBridgePortConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseBridgePortConfig.setStatus('mandatory')
mscVrPpSrseBridgePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernetBridgePort", 0), ("tokenRingBridgePort", 1), ("fddiBridgePort", 2), ("tokenBusBridgePort", 3), ("fddiEncapsulatingBridgePort", 4), ("vnsBridgePort", 5), ("frameRelayBridgePort", 6), ("pppBridgePort", 7), ("smdsBridgePort", 8), ("vcpBridgePort", 9), ("x25BridgePort", 10), ("srEncapSrBridgePort", 11), ("srEncapTbBridgePort", 12), ("sourceGateSrBridgePort", 13), ("sourceGateTbBridgePort", 14), ("srtbBridgePort", 15), ("tbsrBridgePort", 16), ("clusterSrBridgePort", 17), ("clusterTbBridgePort", 18), ("unknown", 19), ("atmMpeBridgePort", 20), ("snaDlrBridgePort", 21), ("lanEmulationClientEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseBridgePortType.setStatus('mandatory')
mscVrPpSrseIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16, 1, 8), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseIfIndex.setStatus('mandatory')
mscVrPpSrseDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseDelayExceededDiscards.setStatus('mandatory')
mscVrPpSrseMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 16, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseMtuExceededDiscards.setStatus('mandatory')
mscVrPpSrseStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18), )
if mibBuilder.loadTexts: mscVrPpSrseStpOperTable.setStatus('mandatory')
mscVrPpSrseStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseStpOperEntry.setStatus('mandatory')
mscVrPpSrseStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseStpPortState.setStatus('mandatory')
mscVrPpSrseStpTypeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseStpTypeOper.setStatus('mandatory')
mscVrPpSrseDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseDesignatedCost.setStatus('mandatory')
mscVrPpSrsePathCostOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsePathCostOper.setStatus('mandatory')
mscVrPpSrseDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseDesignatedBridge.setStatus('mandatory')
mscVrPpSrseDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18, 1, 7), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseDesignatedPort.setStatus('mandatory')
mscVrPpSrseForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseForwardTransitions.setStatus('mandatory')
mscVrPpSrseBlockingDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseBlockingDiscards.setStatus('mandatory')
mscVrPpSrseDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 18, 1, 10), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseDesignatedRoot.setStatus('mandatory')
mscVrPpSrseStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 19), )
if mibBuilder.loadTexts: mscVrPpSrseStatsTable.setStatus('mandatory')
mscVrPpSrseStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 19, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseStatsEntry.setStatus('mandatory')
mscVrPpSrseBadAbstractDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 19, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseBadAbstractDiscards.setStatus('mandatory')
mscVrPpSrseTinygramFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 19, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseTinygramFramesIn.setStatus('mandatory')
mscVrPpSrseTinygramFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 19, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseTinygramFramesOut.setStatus('mandatory')
mscVrPpSrseInFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 19, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseInFilterDiscards.setStatus('mandatory')
mscVrPpSrseOutFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 19, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseOutFilterDiscards.setStatus('mandatory')
mscVrPpSrseSrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20), )
if mibBuilder.loadTexts: mscVrPpSrseSrProvTable.setStatus('mandatory')
mscVrPpSrseSrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseSrProvEntry.setStatus('mandatory')
mscVrPpSrseHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 7)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseHopCount.setStatus('mandatory')
mscVrPpSrseExploreFrameTreatment = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("encap", 0), ("xlate", 1))).clone('encap')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseExploreFrameTreatment.setStatus('mandatory')
mscVrPpSrseLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseLanId.setStatus('mandatory')
mscVrPpSrseInternalLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseInternalLanId.setStatus('mandatory')
mscVrPpSrseBridgeNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 15)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseBridgeNum.setStatus('mandatory')
mscVrPpSrseLargestFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20, 1, 6), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(516, 516), ValueRangeConstraint(1470, 1470), ValueRangeConstraint(2052, 2052), ValueRangeConstraint(4399, 4399), ValueRangeConstraint(8130, 8130), ValueRangeConstraint(11407, 11407), ValueRangeConstraint(17749, 17749), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseLargestFrame.setStatus('mandatory')
mscVrPpSrseSteSpanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("autoSpan", 1), ("disabled", 2), ("forced", 3))).clone('autoSpan')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseSteSpanMode.setStatus('mandatory')
mscVrPpSrseAreRdLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseAreRdLimit.setStatus('mandatory')
mscVrPpSrseSteRdLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 20, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrseSteRdLimit.setStatus('mandatory')
mscVrPpSrseSrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21), )
if mibBuilder.loadTexts: mscVrPpSrseSrStatsTable.setStatus('mandatory')
mscVrPpSrseSrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrseIndex"))
if mibBuilder.loadTexts: mscVrPpSrseSrStatsEntry.setStatus('mandatory')
mscVrPpSrseSpecInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseSpecInFrames.setStatus('mandatory')
mscVrPpSrseSpecOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseSpecOutFrames.setStatus('mandatory')
mscVrPpSrseApeInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseApeInFrames.setStatus('mandatory')
mscVrPpSrseApeOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseApeOutFrames.setStatus('mandatory')
mscVrPpSrseSteInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseSteInFrames.setStatus('mandatory')
mscVrPpSrseSteOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseSteOutFrames.setStatus('mandatory')
mscVrPpSrseSegmentMismatchDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseSegmentMismatchDiscards.setStatus('mandatory')
mscVrPpSrseDupSegmentDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseDupSegmentDiscards.setStatus('mandatory')
mscVrPpSrseHopCountExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseHopCountExceededDiscards.setStatus('mandatory')
mscVrPpSrseDupLanIdOrTreeErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseDupLanIdOrTreeErrors.setStatus('mandatory')
mscVrPpSrseLanIdMismatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseLanIdMismatches.setStatus('mandatory')
mscVrPpSrseStaticDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseStaticDiscards.setStatus('mandatory')
mscVrPpSrseDynamicDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 10, 21, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrseDynamicDiscards.setStatus('mandatory')
mscVrPpTbse = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11))
mscVrPpTbseRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 1), )
if mibBuilder.loadTexts: mscVrPpTbseRowStatusTable.setStatus('mandatory')
mscVrPpTbseRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseRowStatusEntry.setStatus('mandatory')
mscVrPpTbseRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbseRowStatus.setStatus('mandatory')
mscVrPpTbseComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseComponentName.setStatus('mandatory')
mscVrPpTbseStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseStorageType.setStatus('mandatory')
mscVrPpTbseIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpTbseIndex.setStatus('mandatory')
mscVrPpTbseProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 10), )
if mibBuilder.loadTexts: mscVrPpTbseProvTable.setStatus('mandatory')
mscVrPpTbseProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseProvEntry.setStatus('mandatory')
mscVrPpTbseTranslateIpx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("off", 0), ("ieee8023", 1), ("ethernet", 2), ("snap", 3), ("sap", 4))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbseTranslateIpx.setStatus('mandatory')
mscVrPpTbseFragmentIp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbseFragmentIp.setStatus('mandatory')
mscVrPpTbseServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ordered", 0), ("notOrdered", 1))).clone('notOrdered')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbseServiceClass.setStatus('mandatory')
mscVrPpTbseConvertArpMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbseConvertArpMacAddress.setStatus('mandatory')
mscVrPpTbsePortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsePortNum.setStatus('mandatory')
mscVrPpTbseTbProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 11), )
if mibBuilder.loadTexts: mscVrPpTbseTbProvTable.setStatus('mandatory')
mscVrPpTbseTbProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseTbProvEntry.setStatus('mandatory')
mscVrPpTbseSecureOption = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbseSecureOption.setStatus('mandatory')
mscVrPpTbseStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 12), )
if mibBuilder.loadTexts: mscVrPpTbseStpProvTable.setStatus('mandatory')
mscVrPpTbseStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseStpProvEntry.setStatus('mandatory')
mscVrPpTbseAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbseAdminStatus.setStatus('mandatory')
mscVrPpTbsePortStateStpControl = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsePortStateStpControl.setStatus('mandatory')
mscVrPpTbseStpTypeProv = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbseStpTypeProv.setStatus('mandatory')
mscVrPpTbsePortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsePortPriority.setStatus('mandatory')
mscVrPpTbsePathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsePathCost.setStatus('mandatory')
mscVrPpTbsePathCostMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsePathCostMethod.setStatus('mandatory')
mscVrPpTbseDIProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 13), )
if mibBuilder.loadTexts: mscVrPpTbseDIProvTable.setStatus('mandatory')
mscVrPpTbseDIProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseDIProvEntry.setStatus('mandatory')
mscVrPpTbseDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 13, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967293)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbseDomainNum.setStatus('mandatory')
mscVrPpTbsePreserveDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsePreserveDomain.setStatus('mandatory')
mscVrPpTbseStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 14), )
if mibBuilder.loadTexts: mscVrPpTbseStateTable.setStatus('mandatory')
mscVrPpTbseStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseStateEntry.setStatus('mandatory')
mscVrPpTbseAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseAdminState.setStatus('mandatory')
mscVrPpTbseOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseOperationalState.setStatus('mandatory')
mscVrPpTbseUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseUsageState.setStatus('mandatory')
mscVrPpTbseOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 15), )
if mibBuilder.loadTexts: mscVrPpTbseOperStatusTable.setStatus('mandatory')
mscVrPpTbseOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseOperStatusEntry.setStatus('mandatory')
mscVrPpTbseSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseSnmpOperStatus.setStatus('mandatory')
mscVrPpTbseOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16), )
if mibBuilder.loadTexts: mscVrPpTbseOperTable.setStatus('mandatory')
mscVrPpTbseOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseOperEntry.setStatus('mandatory')
mscVrPpTbsePortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsePortName.setStatus('mandatory')
mscVrPpTbseUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseUpTime.setStatus('mandatory')
mscVrPpTbseDownTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseDownTime.setStatus('mandatory')
mscVrPpTbseBridgingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("tb", 2), ("sr", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseBridgingMode.setStatus('mandatory')
mscVrPpTbseBridgePortConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseBridgePortConfig.setStatus('mandatory')
mscVrPpTbseBridgePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernetBridgePort", 0), ("tokenRingBridgePort", 1), ("fddiBridgePort", 2), ("tokenBusBridgePort", 3), ("fddiEncapsulatingBridgePort", 4), ("vnsBridgePort", 5), ("frameRelayBridgePort", 6), ("pppBridgePort", 7), ("smdsBridgePort", 8), ("vcpBridgePort", 9), ("x25BridgePort", 10), ("srEncapSrBridgePort", 11), ("srEncapTbBridgePort", 12), ("sourceGateSrBridgePort", 13), ("sourceGateTbBridgePort", 14), ("srtbBridgePort", 15), ("tbsrBridgePort", 16), ("clusterSrBridgePort", 17), ("clusterTbBridgePort", 18), ("unknown", 19), ("atmMpeBridgePort", 20), ("snaDlrBridgePort", 21), ("lanEmulationClientEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseBridgePortType.setStatus('mandatory')
mscVrPpTbseIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16, 1, 8), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseIfIndex.setStatus('mandatory')
mscVrPpTbseDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseDelayExceededDiscards.setStatus('mandatory')
mscVrPpTbseMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 16, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseMtuExceededDiscards.setStatus('mandatory')
mscVrPpTbseTbOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17), )
if mibBuilder.loadTexts: mscVrPpTbseTbOperTable.setStatus('mandatory')
mscVrPpTbseTbOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseTbOperEntry.setStatus('mandatory')
mscVrPpTbseMaxInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseMaxInfo.setStatus('mandatory')
mscVrPpTbseBadVerifyDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseBadVerifyDiscards.setStatus('mandatory')
mscVrPpTbseUnicastNoMatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseUnicastNoMatches.setStatus('mandatory')
mscVrPpTbseStaticEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseStaticEntryDiscards.setStatus('mandatory')
mscVrPpTbseDynamicEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseDynamicEntryDiscards.setStatus('mandatory')
mscVrPpTbseLearningDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseLearningDiscards.setStatus('mandatory')
mscVrPpTbseInDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseInDiscards.setStatus('mandatory')
mscVrPpTbseInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseInFrames.setStatus('mandatory')
mscVrPpTbseOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 17, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseOutFrames.setStatus('mandatory')
mscVrPpTbseStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18), )
if mibBuilder.loadTexts: mscVrPpTbseStpOperTable.setStatus('mandatory')
mscVrPpTbseStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseStpOperEntry.setStatus('mandatory')
mscVrPpTbseStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseStpPortState.setStatus('mandatory')
mscVrPpTbseStpTypeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseStpTypeOper.setStatus('mandatory')
mscVrPpTbseDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseDesignatedCost.setStatus('mandatory')
mscVrPpTbsePathCostOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsePathCostOper.setStatus('mandatory')
mscVrPpTbseDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseDesignatedBridge.setStatus('mandatory')
mscVrPpTbseDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18, 1, 7), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseDesignatedPort.setStatus('mandatory')
mscVrPpTbseForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseForwardTransitions.setStatus('mandatory')
mscVrPpTbseBlockingDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseBlockingDiscards.setStatus('mandatory')
mscVrPpTbseDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 18, 1, 10), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseDesignatedRoot.setStatus('mandatory')
mscVrPpTbseStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 19), )
if mibBuilder.loadTexts: mscVrPpTbseStatsTable.setStatus('mandatory')
mscVrPpTbseStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 19, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbseIndex"))
if mibBuilder.loadTexts: mscVrPpTbseStatsEntry.setStatus('mandatory')
mscVrPpTbseBadAbstractDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 19, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseBadAbstractDiscards.setStatus('mandatory')
mscVrPpTbseTinygramFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 19, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseTinygramFramesIn.setStatus('mandatory')
mscVrPpTbseTinygramFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 19, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseTinygramFramesOut.setStatus('mandatory')
mscVrPpTbseInFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 19, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseInFilterDiscards.setStatus('mandatory')
mscVrPpTbseOutFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 11, 19, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbseOutFilterDiscards.setStatus('mandatory')
mscVrPpSrsg = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12))
mscVrPpSrsgRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 1), )
if mibBuilder.loadTexts: mscVrPpSrsgRowStatusTable.setStatus('mandatory')
mscVrPpSrsgRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgRowStatusEntry.setStatus('mandatory')
mscVrPpSrsgRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgRowStatus.setStatus('mandatory')
mscVrPpSrsgComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgComponentName.setStatus('mandatory')
mscVrPpSrsgStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgStorageType.setStatus('mandatory')
mscVrPpSrsgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpSrsgIndex.setStatus('mandatory')
mscVrPpSrsgProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 10), )
if mibBuilder.loadTexts: mscVrPpSrsgProvTable.setStatus('mandatory')
mscVrPpSrsgProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgProvEntry.setStatus('mandatory')
mscVrPpSrsgTranslateIpx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("off", 0), ("ieee8023", 1), ("ethernet", 2), ("snap", 3), ("sap", 4))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgTranslateIpx.setStatus('mandatory')
mscVrPpSrsgFragmentIp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgFragmentIp.setStatus('mandatory')
mscVrPpSrsgServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ordered", 0), ("notOrdered", 1))).clone('notOrdered')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgServiceClass.setStatus('mandatory')
mscVrPpSrsgConvertArpMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgConvertArpMacAddress.setStatus('mandatory')
mscVrPpSrsgPortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgPortNum.setStatus('mandatory')
mscVrPpSrsgStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 12), )
if mibBuilder.loadTexts: mscVrPpSrsgStpProvTable.setStatus('mandatory')
mscVrPpSrsgStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgStpProvEntry.setStatus('mandatory')
mscVrPpSrsgAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgAdminStatus.setStatus('mandatory')
mscVrPpSrsgPortStateStpControl = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgPortStateStpControl.setStatus('mandatory')
mscVrPpSrsgStpTypeProv = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgStpTypeProv.setStatus('mandatory')
mscVrPpSrsgPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgPortPriority.setStatus('mandatory')
mscVrPpSrsgPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgPathCost.setStatus('mandatory')
mscVrPpSrsgPathCostMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgPathCostMethod.setStatus('mandatory')
mscVrPpSrsgDIProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 13), )
if mibBuilder.loadTexts: mscVrPpSrsgDIProvTable.setStatus('mandatory')
mscVrPpSrsgDIProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgDIProvEntry.setStatus('mandatory')
mscVrPpSrsgDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 13, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967293)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgDomainNum.setStatus('mandatory')
mscVrPpSrsgPreserveDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgPreserveDomain.setStatus('mandatory')
mscVrPpSrsgStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 14), )
if mibBuilder.loadTexts: mscVrPpSrsgStateTable.setStatus('mandatory')
mscVrPpSrsgStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgStateEntry.setStatus('mandatory')
mscVrPpSrsgAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgAdminState.setStatus('mandatory')
mscVrPpSrsgOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgOperationalState.setStatus('mandatory')
mscVrPpSrsgUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgUsageState.setStatus('mandatory')
mscVrPpSrsgOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 15), )
if mibBuilder.loadTexts: mscVrPpSrsgOperStatusTable.setStatus('mandatory')
mscVrPpSrsgOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgOperStatusEntry.setStatus('mandatory')
mscVrPpSrsgSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgSnmpOperStatus.setStatus('mandatory')
mscVrPpSrsgOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16), )
if mibBuilder.loadTexts: mscVrPpSrsgOperTable.setStatus('mandatory')
mscVrPpSrsgOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgOperEntry.setStatus('mandatory')
mscVrPpSrsgPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgPortName.setStatus('mandatory')
mscVrPpSrsgUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgUpTime.setStatus('mandatory')
mscVrPpSrsgDownTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgDownTime.setStatus('mandatory')
mscVrPpSrsgBridgingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("tb", 2), ("sr", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgBridgingMode.setStatus('mandatory')
mscVrPpSrsgBridgePortConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgBridgePortConfig.setStatus('mandatory')
mscVrPpSrsgBridgePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernetBridgePort", 0), ("tokenRingBridgePort", 1), ("fddiBridgePort", 2), ("tokenBusBridgePort", 3), ("fddiEncapsulatingBridgePort", 4), ("vnsBridgePort", 5), ("frameRelayBridgePort", 6), ("pppBridgePort", 7), ("smdsBridgePort", 8), ("vcpBridgePort", 9), ("x25BridgePort", 10), ("srEncapSrBridgePort", 11), ("srEncapTbBridgePort", 12), ("sourceGateSrBridgePort", 13), ("sourceGateTbBridgePort", 14), ("srtbBridgePort", 15), ("tbsrBridgePort", 16), ("clusterSrBridgePort", 17), ("clusterTbBridgePort", 18), ("unknown", 19), ("atmMpeBridgePort", 20), ("snaDlrBridgePort", 21), ("lanEmulationClientEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgBridgePortType.setStatus('mandatory')
mscVrPpSrsgIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16, 1, 8), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgIfIndex.setStatus('mandatory')
mscVrPpSrsgDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgDelayExceededDiscards.setStatus('mandatory')
mscVrPpSrsgMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 16, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgMtuExceededDiscards.setStatus('mandatory')
mscVrPpSrsgStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18), )
if mibBuilder.loadTexts: mscVrPpSrsgStpOperTable.setStatus('mandatory')
mscVrPpSrsgStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgStpOperEntry.setStatus('mandatory')
mscVrPpSrsgStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgStpPortState.setStatus('mandatory')
mscVrPpSrsgStpTypeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgStpTypeOper.setStatus('mandatory')
mscVrPpSrsgDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgDesignatedCost.setStatus('mandatory')
mscVrPpSrsgPathCostOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgPathCostOper.setStatus('mandatory')
mscVrPpSrsgDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgDesignatedBridge.setStatus('mandatory')
mscVrPpSrsgDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18, 1, 7), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgDesignatedPort.setStatus('mandatory')
mscVrPpSrsgForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgForwardTransitions.setStatus('mandatory')
mscVrPpSrsgBlockingDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgBlockingDiscards.setStatus('mandatory')
mscVrPpSrsgDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 18, 1, 10), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgDesignatedRoot.setStatus('mandatory')
mscVrPpSrsgStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 19), )
if mibBuilder.loadTexts: mscVrPpSrsgStatsTable.setStatus('mandatory')
mscVrPpSrsgStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 19, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgStatsEntry.setStatus('mandatory')
mscVrPpSrsgBadAbstractDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 19, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgBadAbstractDiscards.setStatus('mandatory')
mscVrPpSrsgTinygramFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 19, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgTinygramFramesIn.setStatus('mandatory')
mscVrPpSrsgTinygramFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 19, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgTinygramFramesOut.setStatus('mandatory')
mscVrPpSrsgInFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 19, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgInFilterDiscards.setStatus('mandatory')
mscVrPpSrsgOutFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 19, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgOutFilterDiscards.setStatus('mandatory')
mscVrPpSrsgSrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20), )
if mibBuilder.loadTexts: mscVrPpSrsgSrProvTable.setStatus('mandatory')
mscVrPpSrsgSrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgSrProvEntry.setStatus('mandatory')
mscVrPpSrsgHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 7)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgHopCount.setStatus('mandatory')
mscVrPpSrsgExploreFrameTreatment = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("encap", 0), ("xlate", 1))).clone('encap')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgExploreFrameTreatment.setStatus('mandatory')
mscVrPpSrsgLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgLanId.setStatus('mandatory')
mscVrPpSrsgInternalLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgInternalLanId.setStatus('mandatory')
mscVrPpSrsgBridgeNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 15)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgBridgeNum.setStatus('mandatory')
mscVrPpSrsgLargestFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20, 1, 6), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(516, 516), ValueRangeConstraint(1470, 1470), ValueRangeConstraint(2052, 2052), ValueRangeConstraint(4399, 4399), ValueRangeConstraint(8130, 8130), ValueRangeConstraint(11407, 11407), ValueRangeConstraint(17749, 17749), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgLargestFrame.setStatus('mandatory')
mscVrPpSrsgSteSpanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("autoSpan", 1), ("disabled", 2), ("forced", 3))).clone('autoSpan')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgSteSpanMode.setStatus('mandatory')
mscVrPpSrsgAreRdLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgAreRdLimit.setStatus('mandatory')
mscVrPpSrsgSteRdLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 20, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrsgSteRdLimit.setStatus('mandatory')
mscVrPpSrsgSrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21), )
if mibBuilder.loadTexts: mscVrPpSrsgSrStatsTable.setStatus('mandatory')
mscVrPpSrsgSrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrsgIndex"))
if mibBuilder.loadTexts: mscVrPpSrsgSrStatsEntry.setStatus('mandatory')
mscVrPpSrsgSpecInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgSpecInFrames.setStatus('mandatory')
mscVrPpSrsgSpecOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgSpecOutFrames.setStatus('mandatory')
mscVrPpSrsgApeInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgApeInFrames.setStatus('mandatory')
mscVrPpSrsgApeOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgApeOutFrames.setStatus('mandatory')
mscVrPpSrsgSteInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgSteInFrames.setStatus('mandatory')
mscVrPpSrsgSteOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgSteOutFrames.setStatus('mandatory')
mscVrPpSrsgSegmentMismatchDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgSegmentMismatchDiscards.setStatus('mandatory')
mscVrPpSrsgDupSegmentDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgDupSegmentDiscards.setStatus('mandatory')
mscVrPpSrsgHopCountExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgHopCountExceededDiscards.setStatus('mandatory')
mscVrPpSrsgDupLanIdOrTreeErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgDupLanIdOrTreeErrors.setStatus('mandatory')
mscVrPpSrsgLanIdMismatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgLanIdMismatches.setStatus('mandatory')
mscVrPpSrsgStaticDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgStaticDiscards.setStatus('mandatory')
mscVrPpSrsgDynamicDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 12, 21, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrsgDynamicDiscards.setStatus('mandatory')
mscVrPpTbsg = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13))
mscVrPpTbsgRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 1), )
if mibBuilder.loadTexts: mscVrPpTbsgRowStatusTable.setStatus('mandatory')
mscVrPpTbsgRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgRowStatusEntry.setStatus('mandatory')
mscVrPpTbsgRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgRowStatus.setStatus('mandatory')
mscVrPpTbsgComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgComponentName.setStatus('mandatory')
mscVrPpTbsgStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgStorageType.setStatus('mandatory')
mscVrPpTbsgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpTbsgIndex.setStatus('mandatory')
mscVrPpTbsgProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 10), )
if mibBuilder.loadTexts: mscVrPpTbsgProvTable.setStatus('mandatory')
mscVrPpTbsgProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgProvEntry.setStatus('mandatory')
mscVrPpTbsgTranslateIpx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("off", 0), ("ieee8023", 1), ("ethernet", 2), ("snap", 3), ("sap", 4))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgTranslateIpx.setStatus('mandatory')
mscVrPpTbsgFragmentIp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgFragmentIp.setStatus('mandatory')
mscVrPpTbsgServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ordered", 0), ("notOrdered", 1))).clone('notOrdered')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgServiceClass.setStatus('mandatory')
mscVrPpTbsgConvertArpMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgConvertArpMacAddress.setStatus('mandatory')
mscVrPpTbsgPortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgPortNum.setStatus('mandatory')
mscVrPpTbsgTbProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 11), )
if mibBuilder.loadTexts: mscVrPpTbsgTbProvTable.setStatus('mandatory')
mscVrPpTbsgTbProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgTbProvEntry.setStatus('mandatory')
mscVrPpTbsgSecureOption = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgSecureOption.setStatus('mandatory')
mscVrPpTbsgStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 12), )
if mibBuilder.loadTexts: mscVrPpTbsgStpProvTable.setStatus('mandatory')
mscVrPpTbsgStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgStpProvEntry.setStatus('mandatory')
mscVrPpTbsgAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgAdminStatus.setStatus('mandatory')
mscVrPpTbsgPortStateStpControl = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgPortStateStpControl.setStatus('mandatory')
mscVrPpTbsgStpTypeProv = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgStpTypeProv.setStatus('mandatory')
mscVrPpTbsgPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgPortPriority.setStatus('mandatory')
mscVrPpTbsgPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgPathCost.setStatus('mandatory')
mscVrPpTbsgPathCostMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgPathCostMethod.setStatus('mandatory')
mscVrPpTbsgDIProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 13), )
if mibBuilder.loadTexts: mscVrPpTbsgDIProvTable.setStatus('mandatory')
mscVrPpTbsgDIProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgDIProvEntry.setStatus('mandatory')
mscVrPpTbsgDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 13, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967293)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgDomainNum.setStatus('mandatory')
mscVrPpTbsgPreserveDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpTbsgPreserveDomain.setStatus('mandatory')
mscVrPpTbsgStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 14), )
if mibBuilder.loadTexts: mscVrPpTbsgStateTable.setStatus('mandatory')
mscVrPpTbsgStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgStateEntry.setStatus('mandatory')
mscVrPpTbsgAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgAdminState.setStatus('mandatory')
mscVrPpTbsgOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgOperationalState.setStatus('mandatory')
mscVrPpTbsgUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgUsageState.setStatus('mandatory')
mscVrPpTbsgOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 15), )
if mibBuilder.loadTexts: mscVrPpTbsgOperStatusTable.setStatus('mandatory')
mscVrPpTbsgOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgOperStatusEntry.setStatus('mandatory')
mscVrPpTbsgSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgSnmpOperStatus.setStatus('mandatory')
mscVrPpTbsgOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16), )
if mibBuilder.loadTexts: mscVrPpTbsgOperTable.setStatus('mandatory')
mscVrPpTbsgOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgOperEntry.setStatus('mandatory')
mscVrPpTbsgPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgPortName.setStatus('mandatory')
mscVrPpTbsgUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgUpTime.setStatus('mandatory')
mscVrPpTbsgDownTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgDownTime.setStatus('mandatory')
mscVrPpTbsgBridgingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("tb", 2), ("sr", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgBridgingMode.setStatus('mandatory')
mscVrPpTbsgBridgePortConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgBridgePortConfig.setStatus('mandatory')
mscVrPpTbsgBridgePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernetBridgePort", 0), ("tokenRingBridgePort", 1), ("fddiBridgePort", 2), ("tokenBusBridgePort", 3), ("fddiEncapsulatingBridgePort", 4), ("vnsBridgePort", 5), ("frameRelayBridgePort", 6), ("pppBridgePort", 7), ("smdsBridgePort", 8), ("vcpBridgePort", 9), ("x25BridgePort", 10), ("srEncapSrBridgePort", 11), ("srEncapTbBridgePort", 12), ("sourceGateSrBridgePort", 13), ("sourceGateTbBridgePort", 14), ("srtbBridgePort", 15), ("tbsrBridgePort", 16), ("clusterSrBridgePort", 17), ("clusterTbBridgePort", 18), ("unknown", 19), ("atmMpeBridgePort", 20), ("snaDlrBridgePort", 21), ("lanEmulationClientEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgBridgePortType.setStatus('mandatory')
mscVrPpTbsgIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16, 1, 8), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgIfIndex.setStatus('mandatory')
mscVrPpTbsgDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgDelayExceededDiscards.setStatus('mandatory')
mscVrPpTbsgMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 16, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgMtuExceededDiscards.setStatus('mandatory')
mscVrPpTbsgTbOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17), )
if mibBuilder.loadTexts: mscVrPpTbsgTbOperTable.setStatus('mandatory')
mscVrPpTbsgTbOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgTbOperEntry.setStatus('mandatory')
mscVrPpTbsgMaxInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgMaxInfo.setStatus('mandatory')
mscVrPpTbsgBadVerifyDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgBadVerifyDiscards.setStatus('mandatory')
mscVrPpTbsgUnicastNoMatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgUnicastNoMatches.setStatus('mandatory')
mscVrPpTbsgStaticEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgStaticEntryDiscards.setStatus('mandatory')
mscVrPpTbsgDynamicEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgDynamicEntryDiscards.setStatus('mandatory')
mscVrPpTbsgLearningDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgLearningDiscards.setStatus('mandatory')
mscVrPpTbsgInDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgInDiscards.setStatus('mandatory')
mscVrPpTbsgInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgInFrames.setStatus('mandatory')
mscVrPpTbsgOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 17, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgOutFrames.setStatus('mandatory')
mscVrPpTbsgStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18), )
if mibBuilder.loadTexts: mscVrPpTbsgStpOperTable.setStatus('mandatory')
mscVrPpTbsgStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgStpOperEntry.setStatus('mandatory')
mscVrPpTbsgStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgStpPortState.setStatus('mandatory')
mscVrPpTbsgStpTypeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgStpTypeOper.setStatus('mandatory')
mscVrPpTbsgDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgDesignatedCost.setStatus('mandatory')
mscVrPpTbsgPathCostOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgPathCostOper.setStatus('mandatory')
mscVrPpTbsgDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgDesignatedBridge.setStatus('mandatory')
mscVrPpTbsgDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18, 1, 7), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgDesignatedPort.setStatus('mandatory')
mscVrPpTbsgForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgForwardTransitions.setStatus('mandatory')
mscVrPpTbsgBlockingDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgBlockingDiscards.setStatus('mandatory')
mscVrPpTbsgDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 18, 1, 10), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgDesignatedRoot.setStatus('mandatory')
mscVrPpTbsgStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 19), )
if mibBuilder.loadTexts: mscVrPpTbsgStatsTable.setStatus('mandatory')
mscVrPpTbsgStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 19, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpTbsgIndex"))
if mibBuilder.loadTexts: mscVrPpTbsgStatsEntry.setStatus('mandatory')
mscVrPpTbsgBadAbstractDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 19, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgBadAbstractDiscards.setStatus('mandatory')
mscVrPpTbsgTinygramFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 19, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgTinygramFramesIn.setStatus('mandatory')
mscVrPpTbsgTinygramFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 19, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgTinygramFramesOut.setStatus('mandatory')
mscVrPpTbsgInFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 19, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgInFilterDiscards.setStatus('mandatory')
mscVrPpTbsgOutFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 13, 19, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpTbsgOutFilterDiscards.setStatus('mandatory')
mscVrPpSrcl = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14))
mscVrPpSrclRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 1), )
if mibBuilder.loadTexts: mscVrPpSrclRowStatusTable.setStatus('mandatory')
mscVrPpSrclRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclRowStatusEntry.setStatus('mandatory')
mscVrPpSrclRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclRowStatus.setStatus('mandatory')
mscVrPpSrclComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclComponentName.setStatus('mandatory')
mscVrPpSrclStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclStorageType.setStatus('mandatory')
mscVrPpSrclIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpSrclIndex.setStatus('mandatory')
mscVrPpSrclProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 10), )
if mibBuilder.loadTexts: mscVrPpSrclProvTable.setStatus('mandatory')
mscVrPpSrclProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclProvEntry.setStatus('mandatory')
mscVrPpSrclTranslateIpx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("off", 0), ("ieee8023", 1), ("ethernet", 2), ("snap", 3), ("sap", 4))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclTranslateIpx.setStatus('mandatory')
mscVrPpSrclFragmentIp = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclFragmentIp.setStatus('mandatory')
mscVrPpSrclServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ordered", 0), ("notOrdered", 1))).clone('notOrdered')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclServiceClass.setStatus('mandatory')
mscVrPpSrclConvertArpMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclConvertArpMacAddress.setStatus('mandatory')
mscVrPpSrclPortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclPortNum.setStatus('mandatory')
mscVrPpSrclStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 12), )
if mibBuilder.loadTexts: mscVrPpSrclStpProvTable.setStatus('mandatory')
mscVrPpSrclStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclStpProvEntry.setStatus('mandatory')
mscVrPpSrclAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclAdminStatus.setStatus('mandatory')
mscVrPpSrclPortStateStpControl = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1))).clone('on')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclPortStateStpControl.setStatus('mandatory')
mscVrPpSrclStpTypeProv = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclStpTypeProv.setStatus('mandatory')
mscVrPpSrclPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 12, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclPortPriority.setStatus('mandatory')
mscVrPpSrclPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 12, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclPathCost.setStatus('mandatory')
mscVrPpSrclPathCostMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("off", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclPathCostMethod.setStatus('mandatory')
mscVrPpSrclDIProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 13), )
if mibBuilder.loadTexts: mscVrPpSrclDIProvTable.setStatus('mandatory')
mscVrPpSrclDIProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclDIProvEntry.setStatus('mandatory')
mscVrPpSrclDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 13, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967293)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclDomainNum.setStatus('mandatory')
mscVrPpSrclPreserveDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclPreserveDomain.setStatus('mandatory')
mscVrPpSrclStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 14), )
if mibBuilder.loadTexts: mscVrPpSrclStateTable.setStatus('mandatory')
mscVrPpSrclStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclStateEntry.setStatus('mandatory')
mscVrPpSrclAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclAdminState.setStatus('mandatory')
mscVrPpSrclOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclOperationalState.setStatus('mandatory')
mscVrPpSrclUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclUsageState.setStatus('mandatory')
mscVrPpSrclOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 15), )
if mibBuilder.loadTexts: mscVrPpSrclOperStatusTable.setStatus('mandatory')
mscVrPpSrclOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclOperStatusEntry.setStatus('mandatory')
mscVrPpSrclSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclSnmpOperStatus.setStatus('mandatory')
mscVrPpSrclOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16), )
if mibBuilder.loadTexts: mscVrPpSrclOperTable.setStatus('mandatory')
mscVrPpSrclOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclOperEntry.setStatus('mandatory')
mscVrPpSrclPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclPortName.setStatus('mandatory')
mscVrPpSrclUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclUpTime.setStatus('mandatory')
mscVrPpSrclDownTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclDownTime.setStatus('mandatory')
mscVrPpSrclBridgingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("tb", 2), ("sr", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclBridgingMode.setStatus('mandatory')
mscVrPpSrclBridgePortConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclBridgePortConfig.setStatus('mandatory')
mscVrPpSrclBridgePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernetBridgePort", 0), ("tokenRingBridgePort", 1), ("fddiBridgePort", 2), ("tokenBusBridgePort", 3), ("fddiEncapsulatingBridgePort", 4), ("vnsBridgePort", 5), ("frameRelayBridgePort", 6), ("pppBridgePort", 7), ("smdsBridgePort", 8), ("vcpBridgePort", 9), ("x25BridgePort", 10), ("srEncapSrBridgePort", 11), ("srEncapTbBridgePort", 12), ("sourceGateSrBridgePort", 13), ("sourceGateTbBridgePort", 14), ("srtbBridgePort", 15), ("tbsrBridgePort", 16), ("clusterSrBridgePort", 17), ("clusterTbBridgePort", 18), ("unknown", 19), ("atmMpeBridgePort", 20), ("snaDlrBridgePort", 21), ("lanEmulationClientEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclBridgePortType.setStatus('mandatory')
mscVrPpSrclIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16, 1, 8), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclIfIndex.setStatus('mandatory')
mscVrPpSrclDelayExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclDelayExceededDiscards.setStatus('mandatory')
mscVrPpSrclMtuExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 16, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclMtuExceededDiscards.setStatus('mandatory')
mscVrPpSrclStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18), )
if mibBuilder.loadTexts: mscVrPpSrclStpOperTable.setStatus('mandatory')
mscVrPpSrclStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclStpOperEntry.setStatus('mandatory')
mscVrPpSrclStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclStpPortState.setStatus('mandatory')
mscVrPpSrclStpTypeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclStpTypeOper.setStatus('mandatory')
mscVrPpSrclDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclDesignatedCost.setStatus('mandatory')
mscVrPpSrclPathCostOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclPathCostOper.setStatus('mandatory')
mscVrPpSrclDesignatedBridge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclDesignatedBridge.setStatus('mandatory')
mscVrPpSrclDesignatedPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18, 1, 7), Hex().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclDesignatedPort.setStatus('mandatory')
mscVrPpSrclForwardTransitions = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclForwardTransitions.setStatus('mandatory')
mscVrPpSrclBlockingDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclBlockingDiscards.setStatus('mandatory')
mscVrPpSrclDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 18, 1, 10), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclDesignatedRoot.setStatus('mandatory')
mscVrPpSrclStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 19), )
if mibBuilder.loadTexts: mscVrPpSrclStatsTable.setStatus('mandatory')
mscVrPpSrclStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 19, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclStatsEntry.setStatus('mandatory')
mscVrPpSrclBadAbstractDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 19, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclBadAbstractDiscards.setStatus('mandatory')
mscVrPpSrclTinygramFramesIn = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 19, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclTinygramFramesIn.setStatus('mandatory')
mscVrPpSrclTinygramFramesOut = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 19, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclTinygramFramesOut.setStatus('mandatory')
mscVrPpSrclInFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 19, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclInFilterDiscards.setStatus('mandatory')
mscVrPpSrclOutFilterDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 19, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclOutFilterDiscards.setStatus('mandatory')
mscVrPpSrclSrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20), )
if mibBuilder.loadTexts: mscVrPpSrclSrProvTable.setStatus('mandatory')
mscVrPpSrclSrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclSrProvEntry.setStatus('mandatory')
mscVrPpSrclHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 7)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclHopCount.setStatus('mandatory')
mscVrPpSrclExploreFrameTreatment = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("encap", 0), ("xlate", 1))).clone('encap')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclExploreFrameTreatment.setStatus('mandatory')
mscVrPpSrclLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclLanId.setStatus('mandatory')
mscVrPpSrclInternalLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclInternalLanId.setStatus('mandatory')
mscVrPpSrclBridgeNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 15)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclBridgeNum.setStatus('mandatory')
mscVrPpSrclLargestFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20, 1, 6), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(516, 516), ValueRangeConstraint(1470, 1470), ValueRangeConstraint(2052, 2052), ValueRangeConstraint(4399, 4399), ValueRangeConstraint(8130, 8130), ValueRangeConstraint(11407, 11407), ValueRangeConstraint(17749, 17749), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclLargestFrame.setStatus('mandatory')
mscVrPpSrclSteSpanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("autoSpan", 1), ("disabled", 2), ("forced", 3))).clone('autoSpan')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclSteSpanMode.setStatus('mandatory')
mscVrPpSrclAreRdLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclAreRdLimit.setStatus('mandatory')
mscVrPpSrclSteRdLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 20, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclSteRdLimit.setStatus('mandatory')
mscVrPpSrclSrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21), )
if mibBuilder.loadTexts: mscVrPpSrclSrStatsTable.setStatus('mandatory')
mscVrPpSrclSrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"))
if mibBuilder.loadTexts: mscVrPpSrclSrStatsEntry.setStatus('mandatory')
mscVrPpSrclSpecInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclSpecInFrames.setStatus('mandatory')
mscVrPpSrclSpecOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclSpecOutFrames.setStatus('mandatory')
mscVrPpSrclApeInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclApeInFrames.setStatus('mandatory')
mscVrPpSrclApeOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclApeOutFrames.setStatus('mandatory')
mscVrPpSrclSteInFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclSteInFrames.setStatus('mandatory')
mscVrPpSrclSteOutFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclSteOutFrames.setStatus('mandatory')
mscVrPpSrclSegmentMismatchDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclSegmentMismatchDiscards.setStatus('mandatory')
mscVrPpSrclDupSegmentDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclDupSegmentDiscards.setStatus('mandatory')
mscVrPpSrclHopCountExceededDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclHopCountExceededDiscards.setStatus('mandatory')
mscVrPpSrclDupLanIdOrTreeErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclDupLanIdOrTreeErrors.setStatus('mandatory')
mscVrPpSrclLanIdMismatches = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclLanIdMismatches.setStatus('mandatory')
mscVrPpSrclStaticDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclStaticDiscards.setStatus('mandatory')
mscVrPpSrclDynamicDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 21, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclDynamicDiscards.setStatus('mandatory')
mscVrPpSrclNs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2))
mscVrPpSrclNsRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2, 1), )
if mibBuilder.loadTexts: mscVrPpSrclNsRowStatusTable.setStatus('mandatory')
mscVrPpSrclNsRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclNsIndex"))
if mibBuilder.loadTexts: mscVrPpSrclNsRowStatusEntry.setStatus('mandatory')
mscVrPpSrclNsRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclNsRowStatus.setStatus('mandatory')
mscVrPpSrclNsComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclNsComponentName.setStatus('mandatory')
mscVrPpSrclNsStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrPpSrclNsStorageType.setStatus('mandatory')
mscVrPpSrclNsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrPpSrclNsIndex.setStatus('mandatory')
mscVrPpSrclNsProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2, 10), )
if mibBuilder.loadTexts: mscVrPpSrclNsProvTable.setStatus('mandatory')
mscVrPpSrclNsProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrPpIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrPpSrclNsIndex"))
if mibBuilder.loadTexts: mscVrPpSrclNsProvEntry.setStatus('mandatory')
mscVrPpSrclNsIncomingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2, 10, 1, 2), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclNsIncomingFilter.setStatus('mandatory')
mscVrPpSrclNsOutgoingFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 3, 14, 2, 10, 1, 3), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrPpSrclNsOutgoingFilter.setStatus('mandatory')
mscVrBr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5))
mscVrBrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 1), )
if mibBuilder.loadTexts: mscVrBrRowStatusTable.setStatus('mandatory')
mscVrBrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"))
if mibBuilder.loadTexts: mscVrBrRowStatusEntry.setStatus('mandatory')
mscVrBrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrRowStatus.setStatus('mandatory')
mscVrBrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrComponentName.setStatus('mandatory')
mscVrBrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrStorageType.setStatus('mandatory')
mscVrBrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrBrIndex.setStatus('mandatory')
mscVrBrAdminControlTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 10), )
if mibBuilder.loadTexts: mscVrBrAdminControlTable.setStatus('mandatory')
mscVrBrAdminControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"))
if mibBuilder.loadTexts: mscVrBrAdminControlEntry.setStatus('mandatory')
mscVrBrAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrAdminStatus.setStatus('mandatory')
mscVrBrStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 12), )
if mibBuilder.loadTexts: mscVrBrStateTable.setStatus('mandatory')
mscVrBrStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"))
if mibBuilder.loadTexts: mscVrBrStateEntry.setStatus('mandatory')
mscVrBrAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrAdminState.setStatus('mandatory')
mscVrBrOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrOperationalState.setStatus('mandatory')
mscVrBrUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrUsageState.setStatus('mandatory')
mscVrBrOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 13), )
if mibBuilder.loadTexts: mscVrBrOperStatusTable.setStatus('mandatory')
mscVrBrOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"))
if mibBuilder.loadTexts: mscVrBrOperStatusEntry.setStatus('mandatory')
mscVrBrSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 13, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSnmpOperStatus.setStatus('mandatory')
mscVrBrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 14), )
if mibBuilder.loadTexts: mscVrBrOperTable.setStatus('mandatory')
mscVrBrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"))
if mibBuilder.loadTexts: mscVrBrOperEntry.setStatus('mandatory')
mscVrBrBridgeAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 14, 1, 1), MacAddress().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrBridgeAddress.setStatus('mandatory')
mscVrBrNumPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 14, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrNumPorts.setStatus('mandatory')
mscVrBrType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("transparentOnly", 2), ("sourceRouteOnly", 3), ("srt", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrType.setStatus('mandatory')
mscVrBrPte = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2))
mscVrBrPteRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 1), )
if mibBuilder.loadTexts: mscVrBrPteRowStatusTable.setStatus('mandatory')
mscVrBrPteRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrPteDomainNumIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrPtePortNameIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrPteModeIndex"))
if mibBuilder.loadTexts: mscVrBrPteRowStatusEntry.setStatus('mandatory')
mscVrBrPteRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPteRowStatus.setStatus('mandatory')
mscVrBrPteComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPteComponentName.setStatus('mandatory')
mscVrBrPteStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPteStorageType.setStatus('mandatory')
mscVrBrPteDomainNumIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 268435455)))
if mibBuilder.loadTexts: mscVrBrPteDomainNumIndex.setStatus('mandatory')
mscVrBrPtePortNameIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 1, 1, 11), AsciiStringIndex().subtype(subtypeSpec=ValueSizeConstraint(0, 29)))
if mibBuilder.loadTexts: mscVrBrPtePortNameIndex.setStatus('mandatory')
mscVrBrPteModeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4))).clone(namedValues=NamedValues(("tb", 2), ("sr", 3), ("srt", 4))))
if mibBuilder.loadTexts: mscVrBrPteModeIndex.setStatus('mandatory')
mscVrBrPteOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 10), )
if mibBuilder.loadTexts: mscVrBrPteOperTable.setStatus('mandatory')
mscVrBrPteOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrPteDomainNumIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrPtePortNameIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrPteModeIndex"))
if mibBuilder.loadTexts: mscVrBrPteOperEntry.setStatus('mandatory')
mscVrBrPteMacType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))).clone(namedValues=NamedValues(("ethernet", 0), ("tokenRing", 1), ("fddiBridgePort", 2), ("tokenBus", 3), ("fddiEncapsulating", 4), ("vns", 5), ("frameRelay", 6), ("ppp", 7), ("smds", 8), ("vcp", 9), ("x25", 10), ("srEncapSr", 11), ("srEncapTb", 12), ("sourceGateSr", 13), ("sourceGateTb", 14), ("srtb", 15), ("tbsr", 16), ("clusterSr", 17), ("clusterTb", 18), ("unknown", 19), ("atmMpe", 20), ("snaDlr", 21), ("lecEthernet", 22)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPteMacType.setStatus('mandatory')
mscVrBrPteStpState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabled", 1), ("blocking", 2), ("listening", 3), ("learning", 4), ("forwarding", 5), ("broken", 6))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPteStpState.setStatus('mandatory')
mscVrBrPteStpType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unknown", 1), ("dec", 2), ("ieee8021", 3))).clone('ieee8021')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPteStpType.setStatus('mandatory')
mscVrBrPteFilterPoints = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("in", 1), ("out", 2), ("inOut", 3), ("none", 4))).clone('none')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPteFilterPoints.setStatus('mandatory')
mscVrBrPtePortPointsTo = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 10, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("rootBridge", 2), ("designatedBridge", 3))).clone('none')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPtePortPointsTo.setStatus('mandatory')
mscVrBrPteSpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 11), )
if mibBuilder.loadTexts: mscVrBrPteSpOperTable.setStatus('mandatory')
mscVrBrPteSpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrPteDomainNumIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrPtePortNameIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrPteModeIndex"))
if mibBuilder.loadTexts: mscVrBrPteSpOperEntry.setStatus('mandatory')
mscVrBrPteLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 11, 1, 1), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 4095), ValueRangeConstraint(65535, 65535), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPteLanId.setStatus('mandatory')
mscVrBrPteInternalLanId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 11, 1, 2), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 4095), ValueRangeConstraint(65535, 65535), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPteInternalLanId.setStatus('mandatory')
mscVrBrPteBridgeNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 2, 11, 1, 3), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 15), ValueRangeConstraint(65535, 65535), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrPteBridgeNum.setStatus('mandatory')
mscVrBrNs = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3))
mscVrBrNsRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 1), )
if mibBuilder.loadTexts: mscVrBrNsRowStatusTable.setStatus('mandatory')
mscVrBrNsRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrNsIndex"))
if mibBuilder.loadTexts: mscVrBrNsRowStatusEntry.setStatus('mandatory')
mscVrBrNsRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsRowStatus.setStatus('mandatory')
mscVrBrNsComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrNsComponentName.setStatus('mandatory')
mscVrBrNsStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrNsStorageType.setStatus('mandatory')
mscVrBrNsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrBrNsIndex.setStatus('mandatory')
mscVrBrNsProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 10), )
if mibBuilder.loadTexts: mscVrBrNsProvTable.setStatus('mandatory')
mscVrBrNsProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrNsIndex"))
if mibBuilder.loadTexts: mscVrBrNsProvEntry.setStatus('mandatory')
mscVrBrNsFirstFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 10, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsFirstFilter.setStatus('mandatory')
mscVrBrNsLastFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 10, 1, 2), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsLastFilter.setStatus('mandatory')
mscVrBrNsAte = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2))
mscVrBrNsAteRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 1), )
if mibBuilder.loadTexts: mscVrBrNsAteRowStatusTable.setStatus('mandatory')
mscVrBrNsAteRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrNsIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrNsAteEntryNumberIndex"))
if mibBuilder.loadTexts: mscVrBrNsAteRowStatusEntry.setStatus('mandatory')
mscVrBrNsAteRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsAteRowStatus.setStatus('mandatory')
mscVrBrNsAteComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrNsAteComponentName.setStatus('mandatory')
mscVrBrNsAteStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrNsAteStorageType.setStatus('mandatory')
mscVrBrNsAteEntryNumberIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)))
if mibBuilder.loadTexts: mscVrBrNsAteEntryNumberIndex.setStatus('mandatory')
mscVrBrNsAteProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 10), )
if mibBuilder.loadTexts: mscVrBrNsAteProvTable.setStatus('mandatory')
mscVrBrNsAteProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrNsIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrNsAteEntryNumberIndex"))
if mibBuilder.loadTexts: mscVrBrNsAteProvEntry.setStatus('mandatory')
mscVrBrNsAteDomainNum = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967293))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsAteDomainNum.setStatus('mandatory')
mscVrBrNsAteFirstMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 10, 1, 2), MacAddress().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6).clone(hexValue="000000000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsAteFirstMacAddress.setStatus('mandatory')
mscVrBrNsAteFirstMacAddressMask = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 10, 1, 3), MacAddress().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6).clone(hexValue="ffffffffffff")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsAteFirstMacAddressMask.setStatus('mandatory')
mscVrBrNsAteSecondMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 10, 1, 4), MacAddress().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6).clone(hexValue="000000000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsAteSecondMacAddress.setStatus('mandatory')
mscVrBrNsAteSecondMacAddressMask = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 10, 1, 5), MacAddress().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6).clone(hexValue="ffffffffffff")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsAteSecondMacAddressMask.setStatus('mandatory')
mscVrBrNsAteDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("to", 0), ("from", 1), ("toFrom", 2))).clone('toFrom')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsAteDirection.setStatus('mandatory')
mscVrBrNsAteFilterName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 3, 2, 10, 1, 7), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrNsAteFilterName.setStatus('mandatory')
mscVrBrTb = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4))
mscVrBrTbRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 1), )
if mibBuilder.loadTexts: mscVrBrTbRowStatusTable.setStatus('mandatory')
mscVrBrTbRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"))
if mibBuilder.loadTexts: mscVrBrTbRowStatusEntry.setStatus('mandatory')
mscVrBrTbRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbRowStatus.setStatus('mandatory')
mscVrBrTbComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbComponentName.setStatus('mandatory')
mscVrBrTbStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStorageType.setStatus('mandatory')
mscVrBrTbIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrBrTbIndex.setStatus('mandatory')
mscVrBrTbProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 10), )
if mibBuilder.loadTexts: mscVrBrTbProvTable.setStatus('mandatory')
mscVrBrTbProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"))
if mibBuilder.loadTexts: mscVrBrTbProvEntry.setStatus('mandatory')
mscVrBrTbFwdTableNumEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbFwdTableNumEntries.setStatus('mandatory')
mscVrBrTbAgingTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 10, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(10, 1000000)).clone(300)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbAgingTime.setStatus('mandatory')
mscVrBrTbStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 11), )
if mibBuilder.loadTexts: mscVrBrTbStatsTable.setStatus('mandatory')
mscVrBrTbStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"))
if mibBuilder.loadTexts: mscVrBrTbStatsEntry.setStatus('mandatory')
mscVrBrTbLearnedEntryDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 11, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbLearnedEntryDiscards.setStatus('mandatory')
mscVrBrTbTotalForwardingTableEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 11, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbTotalForwardingTableEntries.setStatus('mandatory')
mscVrBrTbNumFtEntriesFree = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 11, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbNumFtEntriesFree.setStatus('mandatory')
mscVrBrTbNumFtEntriesDenied = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 11, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbNumFtEntriesDenied.setStatus('mandatory')
mscVrBrTbStp = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2))
mscVrBrTbStpRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 1), )
if mibBuilder.loadTexts: mscVrBrTbStpRowStatusTable.setStatus('mandatory')
mscVrBrTbStpRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbStpIndex"))
if mibBuilder.loadTexts: mscVrBrTbStpRowStatusEntry.setStatus('mandatory')
mscVrBrTbStpRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbStpRowStatus.setStatus('mandatory')
mscVrBrTbStpComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpComponentName.setStatus('mandatory')
mscVrBrTbStpStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpStorageType.setStatus('mandatory')
mscVrBrTbStpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 268435455)))
if mibBuilder.loadTexts: mscVrBrTbStpIndex.setStatus('mandatory')
mscVrBrTbStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 10), )
if mibBuilder.loadTexts: mscVrBrTbStpProvTable.setStatus('mandatory')
mscVrBrTbStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbStpIndex"))
if mibBuilder.loadTexts: mscVrBrTbStpProvEntry.setStatus('mandatory')
mscVrBrTbStpStpMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2))).clone('access')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbStpStpMode.setStatus('mandatory')
mscVrBrTbStpProtocolSpec = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("both", 1), ("dec", 2), ("ieee8021d", 3))).clone('ieee8021d')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbStpProtocolSpec.setStatus('mandatory')
mscVrBrTbStpPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 10, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(32768)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbStpPriority.setStatus('mandatory')
mscVrBrTbStpBridgeMaxAge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 10, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(600, 4000)).clone(2000)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbStpBridgeMaxAge.setStatus('mandatory')
mscVrBrTbStpBridgeHelloTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(100, 1000)).clone(200)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbStpBridgeHelloTime.setStatus('mandatory')
mscVrBrTbStpBridgeForwardDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 10, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(400, 3000)).clone(1500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbStpBridgeForwardDelay.setStatus('mandatory')
mscVrBrTbStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11), )
if mibBuilder.loadTexts: mscVrBrTbStpOperTable.setStatus('mandatory')
mscVrBrTbStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbStpIndex"))
if mibBuilder.loadTexts: mscVrBrTbStpOperEntry.setStatus('mandatory')
mscVrBrTbStpBridgeId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 1), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpBridgeId.setStatus('mandatory')
mscVrBrTbStpRootPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 2), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpRootPortName.setStatus('mandatory')
mscVrBrTbStpTimeSinceTopologyChange = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpTimeSinceTopologyChange.setStatus('mandatory')
mscVrBrTbStpTopologyChangeDetect = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("false", 0), ("true", 1))).clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpTopologyChangeDetect.setStatus('mandatory')
mscVrBrTbStpTopologyChanges = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpTopologyChanges.setStatus('mandatory')
mscVrBrTbStpDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpDesignatedRoot.setStatus('mandatory')
mscVrBrTbStpRootCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpRootCost.setStatus('mandatory')
mscVrBrTbStpMaxAge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(600, 4000)).clone(2000)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpMaxAge.setStatus('mandatory')
mscVrBrTbStpAgingTimeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 10), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(10, 1000000)).clone(300)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpAgingTimeOper.setStatus('mandatory')
mscVrBrTbStpHelloTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 11), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(100, 1000)).clone(200)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpHelloTime.setStatus('mandatory')
mscVrBrTbStpHoldTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 12), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(100, 100)).clone(100)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpHoldTime.setStatus('mandatory')
mscVrBrTbStpFwdDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 2, 11, 1, 13), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(400, 3000)).clone(1500)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbStpFwdDelay.setStatus('mandatory')
mscVrBrTbSte = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3))
mscVrBrTbSteRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 1), )
if mibBuilder.loadTexts: mscVrBrTbSteRowStatusTable.setStatus('mandatory')
mscVrBrTbSteRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbSteAddressIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbSteReceivePortIndex"))
if mibBuilder.loadTexts: mscVrBrTbSteRowStatusEntry.setStatus('mandatory')
mscVrBrTbSteRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbSteRowStatus.setStatus('mandatory')
mscVrBrTbSteComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbSteComponentName.setStatus('mandatory')
mscVrBrTbSteStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbSteStorageType.setStatus('mandatory')
mscVrBrTbSteAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 1, 1, 10), DashedHexString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6))
if mibBuilder.loadTexts: mscVrBrTbSteAddressIndex.setStatus('mandatory')
mscVrBrTbSteReceivePortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 1, 1, 11), AsciiStringIndex().subtype(subtypeSpec=ValueSizeConstraint(1, 29)))
if mibBuilder.loadTexts: mscVrBrTbSteReceivePortIndex.setStatus('mandatory')
mscVrBrTbSteProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 10), )
if mibBuilder.loadTexts: mscVrBrTbSteProvTable.setStatus('mandatory')
mscVrBrTbSteProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbSteAddressIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbSteReceivePortIndex"))
if mibBuilder.loadTexts: mscVrBrTbSteProvEntry.setStatus('mandatory')
mscVrBrTbSteStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3))).clone(namedValues=NamedValues(("permanent", 3))).clone('permanent')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbSteStatus.setStatus('mandatory')
mscVrBrTbSteAtgtTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 271), )
if mibBuilder.loadTexts: mscVrBrTbSteAtgtTable.setStatus('mandatory')
mscVrBrTbSteAtgtEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 271, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbSteAddressIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbSteReceivePortIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbSteAtgtValue"))
if mibBuilder.loadTexts: mscVrBrTbSteAtgtEntry.setStatus('mandatory')
mscVrBrTbSteAtgtValue = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 271, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrTbSteAtgtValue.setStatus('mandatory')
mscVrBrTbSteAtgtRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 3, 271, 1, 2), RowStatus()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: mscVrBrTbSteAtgtRowStatus.setStatus('mandatory')
mscVrBrTbFte = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4))
mscVrBrTbFteRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 1), )
if mibBuilder.loadTexts: mscVrBrTbFteRowStatusTable.setStatus('mandatory')
mscVrBrTbFteRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbFteAddressIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbFteDomainNumIndex"))
if mibBuilder.loadTexts: mscVrBrTbFteRowStatusEntry.setStatus('mandatory')
mscVrBrTbFteRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbFteRowStatus.setStatus('mandatory')
mscVrBrTbFteComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbFteComponentName.setStatus('mandatory')
mscVrBrTbFteStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbFteStorageType.setStatus('mandatory')
mscVrBrTbFteAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 1, 1, 10), DashedHexString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6))
if mibBuilder.loadTexts: mscVrBrTbFteAddressIndex.setStatus('mandatory')
mscVrBrTbFteDomainNumIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 268435455)))
if mibBuilder.loadTexts: mscVrBrTbFteDomainNumIndex.setStatus('mandatory')
mscVrBrTbFteOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 10), )
if mibBuilder.loadTexts: mscVrBrTbFteOperTable.setStatus('mandatory')
mscVrBrTbFteOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbFteAddressIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbFteDomainNumIndex"))
if mibBuilder.loadTexts: mscVrBrTbFteOperEntry.setStatus('mandatory')
mscVrBrTbFtePort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 10, 1, 3), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbFtePort.setStatus('mandatory')
mscVrBrTbFteAgeOfEntry = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 10, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbFteAgeOfEntry.setStatus('mandatory')
mscVrBrTbFtePeerAddressInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 10, 1, 5), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbFtePeerAddressInfo.setStatus('mandatory')
mscVrBrTbFteStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 4, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3))).clone(namedValues=NamedValues(("learned", 3))).clone('learned')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbFteStatus.setStatus('mandatory')
mscVrBrTbNcFte = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5))
mscVrBrTbNcFteRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 1), )
if mibBuilder.loadTexts: mscVrBrTbNcFteRowStatusTable.setStatus('mandatory')
mscVrBrTbNcFteRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbNcFteAddressIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbNcFteDomainNumIndex"))
if mibBuilder.loadTexts: mscVrBrTbNcFteRowStatusEntry.setStatus('mandatory')
mscVrBrTbNcFteRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbNcFteRowStatus.setStatus('mandatory')
mscVrBrTbNcFteComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbNcFteComponentName.setStatus('mandatory')
mscVrBrTbNcFteStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbNcFteStorageType.setStatus('mandatory')
mscVrBrTbNcFteAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 1, 1, 10), DashedHexString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6))
if mibBuilder.loadTexts: mscVrBrTbNcFteAddressIndex.setStatus('mandatory')
mscVrBrTbNcFteDomainNumIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 268435455)))
if mibBuilder.loadTexts: mscVrBrTbNcFteDomainNumIndex.setStatus('mandatory')
mscVrBrTbNcFteOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 10), )
if mibBuilder.loadTexts: mscVrBrTbNcFteOperTable.setStatus('mandatory')
mscVrBrTbNcFteOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbNcFteAddressIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrTbNcFteDomainNumIndex"))
if mibBuilder.loadTexts: mscVrBrTbNcFteOperEntry.setStatus('mandatory')
mscVrBrTbNcFtePort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 10, 1, 3), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbNcFtePort.setStatus('mandatory')
mscVrBrTbNcFteAgeOfEntry = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 10, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbNcFteAgeOfEntry.setStatus('mandatory')
mscVrBrTbNcFtePeerAddressInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 10, 1, 5), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbNcFtePeerAddressInfo.setStatus('mandatory')
mscVrBrTbNcFteStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 4, 5, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3))).clone(namedValues=NamedValues(("learned", 3))).clone('learned')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrTbNcFteStatus.setStatus('mandatory')
mscVrBrSrb = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5))
mscVrBrSrbRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 1), )
if mibBuilder.loadTexts: mscVrBrSrbRowStatusTable.setStatus('mandatory')
mscVrBrSrbRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbIndex"))
if mibBuilder.loadTexts: mscVrBrSrbRowStatusEntry.setStatus('mandatory')
mscVrBrSrbRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrSrbRowStatus.setStatus('mandatory')
mscVrBrSrbComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbComponentName.setStatus('mandatory')
mscVrBrSrbStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStorageType.setStatus('mandatory')
mscVrBrSrbIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscVrBrSrbIndex.setStatus('mandatory')
mscVrBrSrbProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 10), )
if mibBuilder.loadTexts: mscVrBrSrbProvTable.setStatus('mandatory')
mscVrBrSrbProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbIndex"))
if mibBuilder.loadTexts: mscVrBrSrbProvEntry.setStatus('mandatory')
mscVrBrSrbLanIdTableNumEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(500, 65535)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrSrbLanIdTableNumEntries.setStatus('mandatory')
mscVrBrSrbAgingTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 10, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(10, 1000000)).clone(300)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrSrbAgingTime.setStatus('mandatory')
mscVrBrSrbBridgeLfMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("mode3", 1), ("mode6", 2))).clone('mode3')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbBridgeLfMode.setStatus('mandatory')
mscVrBrSrbStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 11), )
if mibBuilder.loadTexts: mscVrBrSrbStatsTable.setStatus('mandatory')
mscVrBrSrbStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbIndex"))
if mibBuilder.loadTexts: mscVrBrSrbStatsEntry.setStatus('mandatory')
mscVrBrSrbTotalLanIdTableEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 11, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbTotalLanIdTableEntries.setStatus('mandatory')
mscVrBrSrbNumLanIdtEntriesFree = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 11, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbNumLanIdtEntriesFree.setStatus('mandatory')
mscVrBrSrbNumLanIdtEntriesDenied = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 11, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbNumLanIdtEntriesDenied.setStatus('mandatory')
mscVrBrSrbStp = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2))
mscVrBrSrbStpRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 1), )
if mibBuilder.loadTexts: mscVrBrSrbStpRowStatusTable.setStatus('mandatory')
mscVrBrSrbStpRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbStpIndex"))
if mibBuilder.loadTexts: mscVrBrSrbStpRowStatusEntry.setStatus('mandatory')
mscVrBrSrbStpRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrSrbStpRowStatus.setStatus('mandatory')
mscVrBrSrbStpComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpComponentName.setStatus('mandatory')
mscVrBrSrbStpStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpStorageType.setStatus('mandatory')
mscVrBrSrbStpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 268435455)))
if mibBuilder.loadTexts: mscVrBrSrbStpIndex.setStatus('mandatory')
mscVrBrSrbStpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 10), )
if mibBuilder.loadTexts: mscVrBrSrbStpProvTable.setStatus('mandatory')
mscVrBrSrbStpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbStpIndex"))
if mibBuilder.loadTexts: mscVrBrSrbStpProvEntry.setStatus('mandatory')
mscVrBrSrbStpStpMode = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("access", 1), ("backbone", 2))).clone('access')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrSrbStpStpMode.setStatus('mandatory')
mscVrBrSrbStpProtocolSpec = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("both", 1), ("dec", 2), ("ieee8021d", 3))).clone('ieee8021d')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrSrbStpProtocolSpec.setStatus('mandatory')
mscVrBrSrbStpPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 10, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(32768)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrSrbStpPriority.setStatus('mandatory')
mscVrBrSrbStpBridgeMaxAge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 10, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(600, 4000)).clone(2000)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrSrbStpBridgeMaxAge.setStatus('mandatory')
mscVrBrSrbStpBridgeHelloTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 10, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(100, 1000)).clone(200)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrSrbStpBridgeHelloTime.setStatus('mandatory')
mscVrBrSrbStpBridgeForwardDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 10, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(400, 3000)).clone(1500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscVrBrSrbStpBridgeForwardDelay.setStatus('mandatory')
mscVrBrSrbStpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11), )
if mibBuilder.loadTexts: mscVrBrSrbStpOperTable.setStatus('mandatory')
mscVrBrSrbStpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbStpIndex"))
if mibBuilder.loadTexts: mscVrBrSrbStpOperEntry.setStatus('mandatory')
mscVrBrSrbStpBridgeId = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 1), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpBridgeId.setStatus('mandatory')
mscVrBrSrbStpRootPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 2), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpRootPortName.setStatus('mandatory')
mscVrBrSrbStpTimeSinceTopologyChange = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpTimeSinceTopologyChange.setStatus('mandatory')
mscVrBrSrbStpTopologyChangeDetect = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("false", 0), ("true", 1))).clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpTopologyChangeDetect.setStatus('mandatory')
mscVrBrSrbStpTopologyChanges = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpTopologyChanges.setStatus('mandatory')
mscVrBrSrbStpDesignatedRoot = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 6), BridgeId().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpDesignatedRoot.setStatus('mandatory')
mscVrBrSrbStpRootCost = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpRootCost.setStatus('mandatory')
mscVrBrSrbStpMaxAge = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(600, 4000)).clone(2000)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpMaxAge.setStatus('mandatory')
mscVrBrSrbStpAgingTimeOper = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 10), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(10, 1000000)).clone(300)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpAgingTimeOper.setStatus('mandatory')
mscVrBrSrbStpHelloTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 11), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(100, 1000)).clone(200)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpHelloTime.setStatus('mandatory')
mscVrBrSrbStpHoldTime = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 12), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(100, 100)).clone(100)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpHoldTime.setStatus('mandatory')
mscVrBrSrbStpFwdDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 2, 11, 1, 13), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(400, 3000)).clone(1500)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbStpFwdDelay.setStatus('mandatory')
mscVrBrSrbLte = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3))
mscVrBrSrbLteRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 1), )
if mibBuilder.loadTexts: mscVrBrSrbLteRowStatusTable.setStatus('mandatory')
mscVrBrSrbLteRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbLteLanIdIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbLteDomainNumIndex"))
if mibBuilder.loadTexts: mscVrBrSrbLteRowStatusEntry.setStatus('mandatory')
mscVrBrSrbLteRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbLteRowStatus.setStatus('mandatory')
mscVrBrSrbLteComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbLteComponentName.setStatus('mandatory')
mscVrBrSrbLteStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbLteStorageType.setStatus('mandatory')
mscVrBrSrbLteLanIdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 4095), ValueRangeConstraint(65535, 65535), )))
if mibBuilder.loadTexts: mscVrBrSrbLteLanIdIndex.setStatus('mandatory')
mscVrBrSrbLteDomainNumIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 268435455)))
if mibBuilder.loadTexts: mscVrBrSrbLteDomainNumIndex.setStatus('mandatory')
mscVrBrSrbLteOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 10), )
if mibBuilder.loadTexts: mscVrBrSrbLteOperTable.setStatus('mandatory')
mscVrBrSrbLteOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-VirtualRouterMIB", "mscVrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbLteLanIdIndex"), (0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscVrBrSrbLteDomainNumIndex"))
if mibBuilder.loadTexts: mscVrBrSrbLteOperEntry.setStatus('mandatory')
mscVrBrSrbLtePortName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 10, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(4, 29))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbLtePortName.setStatus('mandatory')
mscVrBrSrbLteAgeOfEntry = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 10, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbLteAgeOfEntry.setStatus('mandatory')
mscVrBrSrbLtePeerMACAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 10, 1, 3), DashedHexString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbLtePeerMACAddress.setStatus('mandatory')
mscVrBrSrbLteTypeOfEntry = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 100, 5, 5, 3, 10, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("static", 0), ("dynamic", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscVrBrSrbLteTypeOfEntry.setStatus('mandatory')
mscCB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103))
mscCBRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 1), )
if mibBuilder.loadTexts: mscCBRowStatusTable.setStatus('mandatory')
mscCBRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscCBIndex"))
if mibBuilder.loadTexts: mscCBRowStatusEntry.setStatus('mandatory')
mscCBRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscCBRowStatus.setStatus('mandatory')
mscCBComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscCBComponentName.setStatus('mandatory')
mscCBStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscCBStorageType.setStatus('mandatory')
mscCBIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscCBIndex.setStatus('mandatory')
mscCBAdminControlTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 10), )
if mibBuilder.loadTexts: mscCBAdminControlTable.setStatus('mandatory')
mscCBAdminControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscCBIndex"))
if mibBuilder.loadTexts: mscCBAdminControlEntry.setStatus('mandatory')
mscCBSnmpAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscCBSnmpAdminStatus.setStatus('obsolete')
mscCBIfEntryTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 11), )
if mibBuilder.loadTexts: mscCBIfEntryTable.setStatus('mandatory')
mscCBIfEntryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscCBIndex"))
if mibBuilder.loadTexts: mscCBIfEntryEntry.setStatus('mandatory')
mscCBIfAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscCBIfAdminStatus.setStatus('mandatory')
mscCBIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 11, 1, 2), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscCBIfIndex.setStatus('mandatory')
mscCBMpTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 12), )
if mibBuilder.loadTexts: mscCBMpTable.setStatus('mandatory')
mscCBMpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscCBIndex"))
if mibBuilder.loadTexts: mscCBMpEntry.setStatus('mandatory')
mscCBLinkToProtocolPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 12, 1, 1), Link()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscCBLinkToProtocolPort.setStatus('mandatory')
mscCBOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 13), )
if mibBuilder.loadTexts: mscCBOperTable.setStatus('mandatory')
mscCBOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscCBIndex"))
if mibBuilder.loadTexts: mscCBOperEntry.setStatus('mandatory')
mscCBMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 13, 1, 1), MacAddress().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscCBMacAddress.setStatus('mandatory')
mscCBStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 14), )
if mibBuilder.loadTexts: mscCBStateTable.setStatus('mandatory')
mscCBStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscCBIndex"))
if mibBuilder.loadTexts: mscCBStateEntry.setStatus('mandatory')
mscCBAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscCBAdminState.setStatus('mandatory')
mscCBOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscCBOperationalState.setStatus('mandatory')
mscCBUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscCBUsageState.setStatus('mandatory')
mscCBOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 15), )
if mibBuilder.loadTexts: mscCBOperStatusTable.setStatus('mandatory')
mscCBOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscCBIndex"))
if mibBuilder.loadTexts: mscCBOperStatusEntry.setStatus('mandatory')
mscCBSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 103, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscCBSnmpOperStatus.setStatus('mandatory')
mscPB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104))
mscPBRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 1), )
if mibBuilder.loadTexts: mscPBRowStatusTable.setStatus('mandatory')
mscPBRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscPBIndex"))
if mibBuilder.loadTexts: mscPBRowStatusEntry.setStatus('mandatory')
mscPBRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscPBRowStatus.setStatus('mandatory')
mscPBComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscPBComponentName.setStatus('mandatory')
mscPBStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscPBStorageType.setStatus('mandatory')
mscPBIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscPBIndex.setStatus('mandatory')
mscPBAdminControlTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 10), )
if mibBuilder.loadTexts: mscPBAdminControlTable.setStatus('mandatory')
mscPBAdminControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscPBIndex"))
if mibBuilder.loadTexts: mscPBAdminControlEntry.setStatus('mandatory')
mscPBSnmpAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscPBSnmpAdminStatus.setStatus('obsolete')
mscPBIfEntryTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 11), )
if mibBuilder.loadTexts: mscPBIfEntryTable.setStatus('mandatory')
mscPBIfEntryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscPBIndex"))
if mibBuilder.loadTexts: mscPBIfEntryEntry.setStatus('mandatory')
mscPBIfAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscPBIfAdminStatus.setStatus('mandatory')
mscPBIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 11, 1, 2), InterfaceIndex().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscPBIfIndex.setStatus('mandatory')
mscPBMpTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 12), )
if mibBuilder.loadTexts: mscPBMpTable.setStatus('mandatory')
mscPBMpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscPBIndex"))
if mibBuilder.loadTexts: mscPBMpEntry.setStatus('mandatory')
mscPBLinkToProtocolPort = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 12, 1, 1), Link()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscPBLinkToProtocolPort.setStatus('mandatory')
mscPBOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 13), )
if mibBuilder.loadTexts: mscPBOperTable.setStatus('mandatory')
mscPBOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscPBIndex"))
if mibBuilder.loadTexts: mscPBOperEntry.setStatus('mandatory')
mscPBMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 13, 1, 1), HexString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscPBMacAddress.setStatus('mandatory')
mscPBStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 14), )
if mibBuilder.loadTexts: mscPBStateTable.setStatus('mandatory')
mscPBStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 14, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscPBIndex"))
if mibBuilder.loadTexts: mscPBStateEntry.setStatus('mandatory')
mscPBAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscPBAdminState.setStatus('mandatory')
mscPBOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscPBOperationalState.setStatus('mandatory')
mscPBUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscPBUsageState.setStatus('mandatory')
mscPBOperStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 15), )
if mibBuilder.loadTexts: mscPBOperStatusTable.setStatus('mandatory')
mscPBOperStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 15, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-BridgeMIB", "mscPBIndex"))
if mibBuilder.loadTexts: mscPBOperStatusEntry.setStatus('mandatory')
mscPBSnmpOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 104, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3))).clone('up')).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscPBSnmpOperStatus.setStatus('mandatory')
bridgeGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 25, 1))
bridgeGroupCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 25, 1, 1))
bridgeGroupCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 25, 1, 1, 3))
bridgeGroupCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 25, 1, 1, 3, 2))
bridgeCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 25, 3))
bridgeCapabilitiesCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 25, 3, 1))
bridgeCapabilitiesCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 25, 3, 1, 3))
bridgeCapabilitiesCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 25, 3, 1, 3, 2))
mibBuilder.exportSymbols("Nortel-MsCarrier-MscPassport-BridgeMIB", mscVrPpTbclTbOperTable=mscVrPpTbclTbOperTable, mscVrPpTbsgDesignatedBridge=mscVrPpTbsgDesignatedBridge, mscVrPpFddiETBOperTable=mscVrPpFddiETBOperTable, mscVrPpTbseDesignatedPort=mscVrPpTbseDesignatedPort, mscVrPpTbsgTinygramFramesIn=mscVrPpTbsgTinygramFramesIn, mscVrPpTbpBlockingDiscards=mscVrPpTbpBlockingDiscards, mscVrPpSrBpPathCostMethod=mscVrPpSrBpPathCostMethod, mscVrPpSrseStpOperTable=mscVrPpSrseStpOperTable, mscVrPpSrseSrProvEntry=mscVrPpSrseSrProvEntry, mscVrPpSrclPreserveDomain=mscVrPpSrclPreserveDomain, mscVrPpSrclSegmentMismatchDiscards=mscVrPpSrclSegmentMismatchDiscards, mscVrBrTbNcFteComponentName=mscVrBrTbNcFteComponentName, mscVrPpFddiETBNsStorageType=mscVrPpFddiETBNsStorageType, mscVrPpTbpNs=mscVrPpTbpNs, mscVrPpSrseSteSpanMode=mscVrPpSrseSteSpanMode, mscVrPpSrtBpDynamicDiscards=mscVrPpSrtBpDynamicDiscards, mscVrPpSrclAreRdLimit=mscVrPpSrclAreRdLimit, mscVrBrTbStpBridgeMaxAge=mscVrBrTbStpBridgeMaxAge, mscVrBrTbNcFteOperTable=mscVrBrTbNcFteOperTable, mscVrPpTbclStpOperEntry=mscVrPpTbclStpOperEntry, mscVrBrTbStpRootPortName=mscVrBrTbStpRootPortName, mscVrPpSrclAdminStatus=mscVrPpSrclAdminStatus, mscVrBrTbNcFteOperEntry=mscVrBrTbNcFteOperEntry, mscVrPpTbpForwardTransitions=mscVrPpTbpForwardTransitions, mscVrPpSrtBpTbOperTable=mscVrPpSrtBpTbOperTable, mscVrPpTbsgDesignatedCost=mscVrPpTbsgDesignatedCost, mscVrPpFddiETBStpTypeOper=mscVrPpFddiETBStpTypeOper, mscVrPpSrtBpNsIndex=mscVrPpSrtBpNsIndex, mscVrBrOperStatusEntry=mscVrBrOperStatusEntry, mscCBLinkToProtocolPort=mscCBLinkToProtocolPort, mscCBSnmpAdminStatus=mscCBSnmpAdminStatus, mscVrPpSrclBlockingDiscards=mscVrPpSrclBlockingDiscards, mscCB=mscCB, mscVrPpSrBpStpTypeOper=mscVrPpSrBpStpTypeOper, mscVrPpSrsePortName=mscVrPpSrsePortName, mscVrPpSrclExploreFrameTreatment=mscVrPpSrclExploreFrameTreatment, mscVrPpSrsgPortNum=mscVrPpSrsgPortNum, mscVrBrPteBridgeNum=mscVrBrPteBridgeNum, mscVrPpTbsgBridgePortConfig=mscVrPpTbsgBridgePortConfig, mscVrPpFddiETBStpOperTable=mscVrPpFddiETBStpOperTable, mscVrPpSrtBpOperTable=mscVrPpSrtBpOperTable, mscVrPpTbseStateEntry=mscVrPpTbseStateEntry, mscVrBrTbStpBridgeId=mscVrBrTbStpBridgeId, mscVrPpSrBpComponentName=mscVrPpSrBpComponentName, mscVrBrPteInternalLanId=mscVrBrPteInternalLanId, mscVrPpSrBpSteOutFrames=mscVrPpSrBpSteOutFrames, mscVrPpSrtBpPathCostMethod=mscVrPpSrtBpPathCostMethod, mscVrPpTbseDesignatedRoot=mscVrPpTbseDesignatedRoot, mscVrPpSrseServiceClass=mscVrPpSrseServiceClass, mscVrPpFddiETBTinygramFramesIn=mscVrPpFddiETBTinygramFramesIn, mscVrPpSrsePortStateStpControl=mscVrPpSrsePortStateStpControl, mscVrPpSrclStatsEntry=mscVrPpSrclStatsEntry, mscCBStateEntry=mscCBStateEntry, mscVrPpFddiETBConvertArpMacAddress=mscVrPpFddiETBConvertArpMacAddress, mscVrPpTbclStpTypeProv=mscVrPpTbclStpTypeProv, mscVrPpTbsgProvEntry=mscVrPpTbsgProvEntry, mscVrPpSrsgStpOperEntry=mscVrPpSrsgStpOperEntry, mscVrPpSrseDupLanIdOrTreeErrors=mscVrPpSrseDupLanIdOrTreeErrors, mscVrPpSrclApeInFrames=mscVrPpSrclApeInFrames, mscVrBrPteOperEntry=mscVrBrPteOperEntry, mscVrPpTbpNsRowStatusEntry=mscVrPpTbpNsRowStatusEntry, mscVrPpTbseOperationalState=mscVrPpTbseOperationalState, mscVrPpTbpNsComponentName=mscVrPpTbpNsComponentName, mscVrPpTbsgMtuExceededDiscards=mscVrPpTbsgMtuExceededDiscards, mscVrPpSrclStpTypeProv=mscVrPpSrclStpTypeProv, mscVrBrNsIndex=mscVrBrNsIndex, mscVrPpSrseSnmpOperStatus=mscVrPpSrseSnmpOperStatus, mscVrBrSrbStatsEntry=mscVrBrSrbStatsEntry, bridgeCapabilitiesCA02=bridgeCapabilitiesCA02, mscVrPpTbpBridgePortConfig=mscVrPpTbpBridgePortConfig, mscVrPpSrtBpStaticEntryDiscards=mscVrPpSrtBpStaticEntryDiscards, mscVrPpSrBpStpOperTable=mscVrPpSrBpStpOperTable, mscVrPpSrseConvertArpMacAddress=mscVrPpSrseConvertArpMacAddress, mscVrPpSrclDesignatedRoot=mscVrPpSrclDesignatedRoot, mscVrBrNsAteEntryNumberIndex=mscVrBrNsAteEntryNumberIndex, mscVrPpTbclTbOperEntry=mscVrPpTbclTbOperEntry, mscVrPpSrBpDIProvTable=mscVrPpSrBpDIProvTable, mscVrPpTbseTbOperTable=mscVrPpTbseTbOperTable, mscVrPpSrclFragmentIp=mscVrPpSrclFragmentIp, mscVrPpSrseDesignatedBridge=mscVrPpSrseDesignatedBridge, mscVrBrPteStorageType=mscVrBrPteStorageType, mscVrBrSrbStpProvEntry=mscVrBrSrbStpProvEntry, mscCBIndex=mscCBIndex, mscVrPpTbsgTbOperEntry=mscVrPpTbsgTbOperEntry, mscVrPpSrBpAreRdLimit=mscVrPpSrBpAreRdLimit, mscVrPpTbpTinygramFramesOut=mscVrPpTbpTinygramFramesOut, mscVrPpSrclOperStatusEntry=mscVrPpSrclOperStatusEntry, mscVrBrTbNcFteRowStatusTable=mscVrBrTbNcFteRowStatusTable, mscVrPpSrBpNs=mscVrPpSrBpNs, mscVrPpTbclDelayExceededDiscards=mscVrPpTbclDelayExceededDiscards, mscVrPpTbpOperStatusTable=mscVrPpTbpOperStatusTable, mscVrPpTbpNsRowStatus=mscVrPpTbpNsRowStatus, mscVrPpFddiETB=mscVrPpFddiETB, mscVrPpTbpStateTable=mscVrPpTbpStateTable, mscVrPpSrtBpTbProvTable=mscVrPpSrtBpTbProvTable, mscVrPpTbseMtuExceededDiscards=mscVrPpTbseMtuExceededDiscards, mscVrPpTbseBadAbstractDiscards=mscVrPpTbseBadAbstractDiscards, mscVrPpSrsgInFilterDiscards=mscVrPpSrsgInFilterDiscards, mscVrPpSrclDomainNum=mscVrPpSrclDomainNum, mscVrPpFddiETBNsRowStatus=mscVrPpFddiETBNsRowStatus, mscVrPpFddiETBPathCostOper=mscVrPpFddiETBPathCostOper, mscVrBrTbNcFtePort=mscVrBrTbNcFtePort, mscVrBrSrbStpBridgeForwardDelay=mscVrBrSrbStpBridgeForwardDelay, mscCBIfAdminStatus=mscCBIfAdminStatus, mscVrPpSrtBpMtuExceededDiscards=mscVrPpSrtBpMtuExceededDiscards, mscVrBrSrbStpDesignatedRoot=mscVrBrSrbStpDesignatedRoot, mscVrPpTbseAdminState=mscVrPpTbseAdminState, mscVrPpTbclSecureOption=mscVrPpTbclSecureOption, mscVrPpTbsgPreserveDomain=mscVrPpTbsgPreserveDomain, mscVrBrPteStpState=mscVrBrPteStpState, mscVrBrTbFtePeerAddressInfo=mscVrBrTbFtePeerAddressInfo, mscVrPpTbsgStpTypeProv=mscVrPpTbsgStpTypeProv, mscVrBrSrbStpTopologyChangeDetect=mscVrBrSrbStpTopologyChangeDetect, mscVrPpSrsgTinygramFramesOut=mscVrPpSrsgTinygramFramesOut, mscVrPpTbclNsProvEntry=mscVrPpTbclNsProvEntry, mscVrBrNsAteFirstMacAddressMask=mscVrBrNsAteFirstMacAddressMask, mscVrBrTbStpOperEntry=mscVrBrTbStpOperEntry, mscVrPpSrseBridgePortConfig=mscVrPpSrseBridgePortConfig, mscVrBrTbAgingTime=mscVrBrTbAgingTime, mscVrBrTbStpPriority=mscVrBrTbStpPriority, mscVrBrSrbLteTypeOfEntry=mscVrBrSrbLteTypeOfEntry, mscVrPpFddiETBSecureOption=mscVrPpFddiETBSecureOption, mscVrPpTbpPortStateStpControl=mscVrPpTbpPortStateStpControl, mscVrPpSrseLanId=mscVrPpSrseLanId, mscVrPpSrtBpSegmentMismatchDiscards=mscVrPpSrtBpSegmentMismatchDiscards, mscVrPpSrseApeInFrames=mscVrPpSrseApeInFrames, mscVrBrTbStpRowStatusTable=mscVrBrTbStpRowStatusTable, mscVrPpSrtBpTinygramFramesIn=mscVrPpSrtBpTinygramFramesIn, mscVrBrSrbComponentName=mscVrBrSrbComponentName, mscVrBrSrbTotalLanIdTableEntries=mscVrBrSrbTotalLanIdTableEntries, mscVrBrSrbLteRowStatusTable=mscVrBrSrbLteRowStatusTable, mscVrPpTbpOperStatusEntry=mscVrPpTbpOperStatusEntry, mscVrPpSrtBp=mscVrPpSrtBp, mscVrPpFddiETBProvTable=mscVrPpFddiETBProvTable, mscVrBrPtePortPointsTo=mscVrBrPtePortPointsTo, mscPBIfAdminStatus=mscPBIfAdminStatus, mscVrPpSrtBpNsRowStatus=mscVrPpSrtBpNsRowStatus, mscVrPpSrtBpOutFilterDiscards=mscVrPpSrtBpOutFilterDiscards, mscVrPpSrtBpDelayExceededDiscards=mscVrPpSrtBpDelayExceededDiscards, mscVrBrTbNumFtEntriesFree=mscVrBrTbNumFtEntriesFree, mscVrPpFddiETBStateTable=mscVrPpFddiETBStateTable, mscVrPpSrclDupSegmentDiscards=mscVrPpSrclDupSegmentDiscards, mscVrPpTbseTbProvTable=mscVrPpTbseTbProvTable, mscVrPpTbclForwardTransitions=mscVrPpTbclForwardTransitions, mscVrPpFddiETBUsageState=mscVrPpFddiETBUsageState, mscVrPpTbpStpTypeProv=mscVrPpTbpStpTypeProv, mscVrPpTbclBridgePortType=mscVrPpTbclBridgePortType, mscVrPpSrtBpDesignatedBridge=mscVrPpSrtBpDesignatedBridge, mscVrBrTbStpFwdDelay=mscVrBrTbStpFwdDelay, mscVrPpTbsgDomainNum=mscVrPpTbsgDomainNum, mscVrPpSrclConvertArpMacAddress=mscVrPpSrclConvertArpMacAddress, mscVrBrAdminControlTable=mscVrBrAdminControlTable, mscPBLinkToProtocolPort=mscPBLinkToProtocolPort, mscVrPpSrseAdminState=mscVrPpSrseAdminState, mscVrPpTbsgConvertArpMacAddress=mscVrPpTbsgConvertArpMacAddress, mscVrPpSrBpNsRowStatusTable=mscVrPpSrBpNsRowStatusTable, mscVrBrTbStpIndex=mscVrBrTbStpIndex, mscVrBrSrbStpTopologyChanges=mscVrBrSrbStpTopologyChanges, mscVrPpSrBpDomainNum=mscVrPpSrBpDomainNum, mscVrPpTbsgComponentName=mscVrPpTbsgComponentName, mscVrBrTbNcFteStatus=mscVrBrTbNcFteStatus, mscVrPpFddiETBDIProvEntry=mscVrPpFddiETBDIProvEntry, mscVrBrTbSteRowStatusEntry=mscVrBrTbSteRowStatusEntry, mscVrPpSrsgPortPriority=mscVrPpSrsgPortPriority, mscVrPpFddiETBDesignatedBridge=mscVrPpFddiETBDesignatedBridge, mscVrPpSrsePathCost=mscVrPpSrsePathCost, mscVrPpSrsgHopCount=mscVrPpSrsgHopCount, mscVrPpSrBpBridgePortConfig=mscVrPpSrBpBridgePortConfig, mscPBStorageType=mscPBStorageType, mscVrPpSrtBpNsProvEntry=mscVrPpSrtBpNsProvEntry, mscVrPpSrBpTranslateIpx=mscVrPpSrBpTranslateIpx, mscVrPpTbclStatsEntry=mscVrPpTbclStatsEntry, mscVrPpSrtBpPortStateStpControl=mscVrPpSrtBpPortStateStpControl, mscVrPpTbsgInDiscards=mscVrPpTbsgInDiscards, mscPBRowStatus=mscPBRowStatus, mscVrPpSrBpDupLanIdOrTreeErrors=mscVrPpSrBpDupLanIdOrTreeErrors, mscVrPpSrsgRowStatusEntry=mscVrPpSrsgRowStatusEntry, mscVrBrNsAteFirstMacAddress=mscVrBrNsAteFirstMacAddress, mscVrPpTbsgAdminStatus=mscVrPpTbsgAdminStatus, mscVrBrTbStpTimeSinceTopologyChange=mscVrBrTbStpTimeSinceTopologyChange, mscVrPpSrclPortNum=mscVrPpSrclPortNum, mscVrPpSrtBpStpPortState=mscVrPpSrtBpStpPortState, mscVrPpSrseHopCountExceededDiscards=mscVrPpSrseHopCountExceededDiscards, mscVrPpTbpProvTable=mscVrPpTbpProvTable, mscVrPpSrclStorageType=mscVrPpSrclStorageType, mscVrBrTbStatsTable=mscVrBrTbStatsTable, mscVrPpTbclComponentName=mscVrPpTbclComponentName, mscVrPpTbclMaxInfo=mscVrPpTbclMaxInfo, mscVrPpSrseInternalLanId=mscVrPpSrseInternalLanId, mscVrPpSrseApeOutFrames=mscVrPpSrseApeOutFrames, mscVrPpTbseStpProvTable=mscVrPpTbseStpProvTable, mscVrPpSrsgSpecOutFrames=mscVrPpSrsgSpecOutFrames, mscVrPpTbclNsIncomingFilter=mscVrPpTbclNsIncomingFilter, mscVrPpSrsgBridgeNum=mscVrPpSrsgBridgeNum, mscVrPpTbpStaticEntryDiscards=mscVrPpTbpStaticEntryDiscards, mscVrPpTbseUnicastNoMatches=mscVrPpTbseUnicastNoMatches, mscVrPpSrclHopCount=mscVrPpSrclHopCount, mscVrPpSrtBpConvertArpMacAddress=mscVrPpSrtBpConvertArpMacAddress, mscVrPpSrclApeOutFrames=mscVrPpSrclApeOutFrames, mscVrPpSrBpNsIncomingFilter=mscVrPpSrBpNsIncomingFilter, mscVrPpTbseOutFrames=mscVrPpTbseOutFrames, mscVrPpSrsgStpTypeProv=mscVrPpSrsgStpTypeProv, mscVrPpSrsgIfIndex=mscVrPpSrsgIfIndex, mscVrPpTbsgUpTime=mscVrPpTbsgUpTime, mscVrPpSrsgDesignatedRoot=mscVrPpSrsgDesignatedRoot, mscVrPpSrBpBridgePortType=mscVrPpSrBpBridgePortType, mscVrPpSrclNsProvEntry=mscVrPpSrclNsProvEntry, mscVrBrTbStp=mscVrBrTbStp, mscVrPpTbseOperStatusEntry=mscVrPpTbseOperStatusEntry, mscVrPpSrtBpSteSpanMode=mscVrPpSrtBpSteSpanMode, mscVrBrNsAteFilterName=mscVrBrNsAteFilterName, mscVrBrTbStpHoldTime=mscVrBrTbStpHoldTime, mscVrBrPtePortNameIndex=mscVrBrPtePortNameIndex, mscVrPpSrclPortStateStpControl=mscVrPpSrclPortStateStpControl, mscVrBrTbFteRowStatusTable=mscVrBrTbFteRowStatusTable, mscVrPpTbpPathCostOper=mscVrPpTbpPathCostOper, mscVrPpTbpMtuExceededDiscards=mscVrPpTbpMtuExceededDiscards, mscVrPpTbpNsProvTable=mscVrPpTbpNsProvTable, mscVrPpSrclDownTime=mscVrPpSrclDownTime, mscVrBrTbSteAddressIndex=mscVrBrTbSteAddressIndex, mscVrBrSrbLteAgeOfEntry=mscVrBrSrbLteAgeOfEntry, mscVrPpSrsgSrStatsTable=mscVrPpSrsgSrStatsTable, mscVrPpSrseAreRdLimit=mscVrPpSrseAreRdLimit, mscVrPpSrclHopCountExceededDiscards=mscVrPpSrclHopCountExceededDiscards, mscVrPpTbclNsIndex=mscVrPpTbclNsIndex, mscVrBrTbFtePort=mscVrBrTbFtePort, mscVrPpSrBpPathCost=mscVrPpSrBpPathCost, mscVrPpSrclStpPortState=mscVrPpSrclStpPortState, mscVrPpSrBpUsageState=mscVrPpSrBpUsageState, mscVrBrNsStorageType=mscVrBrNsStorageType, mscVrPpTbclStpOperTable=mscVrPpTbclStpOperTable, mscVrBrNumPorts=mscVrBrNumPorts, mscVrBrComponentName=mscVrBrComponentName, mscVrPpFddiETBBridgePortConfig=mscVrPpFddiETBBridgePortConfig, mscVrPpTbpSnmpOperStatus=mscVrPpTbpSnmpOperStatus, mscVrPpTbseTinygramFramesIn=mscVrPpTbseTinygramFramesIn, mscVrPpTbsgOperTable=mscVrPpTbsgOperTable, mscVrPpTbsePreserveDomain=mscVrPpTbsePreserveDomain, mscVrBrNsRowStatusTable=mscVrBrNsRowStatusTable, mscVrPpFddiETBOperStatusEntry=mscVrPpFddiETBOperStatusEntry, mscVrBrStorageType=mscVrBrStorageType, mscVrBrSrbStpBridgeId=mscVrBrSrbStpBridgeId, mscVrPpTbsgDIProvEntry=mscVrPpTbsgDIProvEntry, mscVrPpTbclPortStateStpControl=mscVrPpTbclPortStateStpControl, mscVrPpSrsgPathCost=mscVrPpSrsgPathCost, mscVrPpSrBpNsRowStatus=mscVrPpSrBpNsRowStatus, mscVrBrPteSpOperEntry=mscVrBrPteSpOperEntry, mscVrBrSrbStpMaxAge=mscVrBrSrbStpMaxAge, mscVrPpSrsgStaticDiscards=mscVrPpSrsgStaticDiscards, mscVrPpTbpSecureOption=mscVrPpTbpSecureOption, mscCBAdminControlEntry=mscCBAdminControlEntry, mscVrPpSrtBpOperEntry=mscVrPpSrtBpOperEntry, mscVrPpTbpTbOperEntry=mscVrPpTbpTbOperEntry, mscVrPpTbpStpProvEntry=mscVrPpTbpStpProvEntry)
mibBuilder.exportSymbols("Nortel-MsCarrier-MscPassport-BridgeMIB", mscVrPpTbpDynamicEntryDiscards=mscVrPpTbpDynamicEntryDiscards, mscVrPpTbseMaxInfo=mscVrPpTbseMaxInfo, mscVrPpSrBpFragmentIp=mscVrPpSrBpFragmentIp, mscVrPpTbpPathCostMethod=mscVrPpTbpPathCostMethod, mscVrPpSrseDIProvEntry=mscVrPpSrseDIProvEntry, mscVrPpSrtBpComponentName=mscVrPpSrtBpComponentName, mscVrPpTbpRowStatus=mscVrPpTbpRowStatus, mscVrPpTbpTbOperTable=mscVrPpTbpTbOperTable, mscVrPpTbclUpTime=mscVrPpTbclUpTime, mscVrBrTbStpRowStatusEntry=mscVrBrTbStpRowStatusEntry, mscVrBrTbNcFteAgeOfEntry=mscVrBrTbNcFteAgeOfEntry, mscVrPpSrtBpUsageState=mscVrPpSrtBpUsageState, mscVrPpSrBpSrStatsEntry=mscVrPpSrBpSrStatsEntry, mscVrBrTbStpComponentName=mscVrBrTbStpComponentName, mscVrPpTbclNsComponentName=mscVrPpTbclNsComponentName, mscVrPpFddiETBInFilterDiscards=mscVrPpFddiETBInFilterDiscards, mscVrPpFddiETBBadAbstractDiscards=mscVrPpFddiETBBadAbstractDiscards, mscVrPpSrBpSpecInFrames=mscVrPpSrBpSpecInFrames, mscVrBrSrbStpBridgeMaxAge=mscVrBrSrbStpBridgeMaxAge, mscVrPpSrseStpOperEntry=mscVrPpSrseStpOperEntry, mscVrPpFddiETBBridgingMode=mscVrPpFddiETBBridgingMode, mscVrBrTbSteStatus=mscVrBrTbSteStatus, mscVrPpTbpUnicastNoMatches=mscVrPpTbpUnicastNoMatches, mscVrPpSrseFragmentIp=mscVrPpSrseFragmentIp, mscVrPpSrsePortPriority=mscVrPpSrsePortPriority, mscVrPpSrtBpApeInFrames=mscVrPpSrtBpApeInFrames, mscVrPpTbsgForwardTransitions=mscVrPpTbsgForwardTransitions, mscCBOperEntry=mscCBOperEntry, mscPBIfEntryEntry=mscPBIfEntryEntry, mscVrBrNsProvTable=mscVrBrNsProvTable, mscVrPpSrclComponentName=mscVrPpSrclComponentName, mscVrPpFddiETBStatsTable=mscVrPpFddiETBStatsTable, mscVrPpFddiETBPortNum=mscVrPpFddiETBPortNum, mscPBStateEntry=mscPBStateEntry, mscVrPpSrtBpTbProvEntry=mscVrPpSrtBpTbProvEntry, mscVrPpSrtBpLargestFrame=mscVrPpSrtBpLargestFrame, mscVrPpSrtBpStaticDiscards=mscVrPpSrtBpStaticDiscards, mscVrPpTbseDIProvEntry=mscVrPpTbseDIProvEntry, mscVrPpTbsgIndex=mscVrPpTbsgIndex, mscVrPpSrclStaticDiscards=mscVrPpSrclStaticDiscards, mscCBRowStatusEntry=mscCBRowStatusEntry, mscVrPpTbsgPortName=mscVrPpTbsgPortName, mscVrBrTbLearnedEntryDiscards=mscVrBrTbLearnedEntryDiscards, mscVrPpSrsgUpTime=mscVrPpSrsgUpTime, mscVrPpSrclStpTypeOper=mscVrPpSrclStpTypeOper, mscCBIfIndex=mscCBIfIndex, mscVrBrPteRowStatusTable=mscVrBrPteRowStatusTable, mscVrPpTbp=mscVrPpTbp, mscVrPpSrsePreserveDomain=mscVrPpSrsePreserveDomain, mscVrBrTbFteOperTable=mscVrBrTbFteOperTable, mscVrPpSrsgDesignatedPort=mscVrPpSrsgDesignatedPort, mscVrPpTbseDynamicEntryDiscards=mscVrPpTbseDynamicEntryDiscards, mscVrBrTbStpTopologyChangeDetect=mscVrBrTbStpTopologyChangeDetect, mscVrPpFddiETBBadVerifyDiscards=mscVrPpFddiETBBadVerifyDiscards, mscVrPpTbclOutFilterDiscards=mscVrPpTbclOutFilterDiscards, mscVrPpSrclOperStatusTable=mscVrPpSrclOperStatusTable, mscVrPpTbpBadVerifyDiscards=mscVrPpTbpBadVerifyDiscards, mscVrPpSrsgDupLanIdOrTreeErrors=mscVrPpSrsgDupLanIdOrTreeErrors, mscVrPpSrclBridgePortType=mscVrPpSrclBridgePortType, mscVrPpSrtBpFragmentIp=mscVrPpSrtBpFragmentIp, mscVrBrStateEntry=mscVrBrStateEntry, mscVrPpSrtBpTbOperEntry=mscVrPpSrtBpTbOperEntry, mscVrPpTbpDelayExceededDiscards=mscVrPpTbpDelayExceededDiscards, mscVrBr=mscVrBr, mscVrPpSrclSrProvEntry=mscVrPpSrclSrProvEntry, mscVrBrTbRowStatusTable=mscVrBrTbRowStatusTable, mscPBAdminControlTable=mscPBAdminControlTable, mscPBRowStatusEntry=mscPBRowStatusEntry, bridgeGroup=bridgeGroup, mscVrPpFddiETBOutFrames=mscVrPpFddiETBOutFrames, mscVrBrTb=mscVrBrTb, mscVrPpTbclNsRowStatusTable=mscVrPpTbclNsRowStatusTable, mscVrBrPteRowStatus=mscVrBrPteRowStatus, mscVrPpSrseDownTime=mscVrPpSrseDownTime, mscVrPpSrseAdminStatus=mscVrPpSrseAdminStatus, mscVrBrTbFte=mscVrBrTbFte, mscVrPpTbpTranslateIpx=mscVrPpTbpTranslateIpx, mscVrPpTbsgRowStatus=mscVrPpTbsgRowStatus, mscVrPpSrtBpSpecInFrames=mscVrPpSrtBpSpecInFrames, mscVrPpSrseRowStatus=mscVrPpSrseRowStatus, mscVrBrPteFilterPoints=mscVrBrPteFilterPoints, mscVrPpTbclDesignatedRoot=mscVrPpTbclDesignatedRoot, mscVrPpSrBpBridgeNum=mscVrPpSrBpBridgeNum, mscVrPpSrtBpStpOperEntry=mscVrPpSrtBpStpOperEntry, mscVrPpTbclNsProvTable=mscVrPpTbclNsProvTable, mscVrPpSrsgSrProvEntry=mscVrPpSrsgSrProvEntry, mscCBOperStatusEntry=mscCBOperStatusEntry, mscVrPpSrclSrStatsEntry=mscVrPpSrclSrStatsEntry, mscVrBrPteComponentName=mscVrBrPteComponentName, mscVrPpTbsgDesignatedPort=mscVrPpTbsgDesignatedPort, mscVrPpTbclInFilterDiscards=mscVrPpTbclInFilterDiscards, mscVrPpSrseDynamicDiscards=mscVrPpSrseDynamicDiscards, mscVrPpTbsgInFilterDiscards=mscVrPpTbsgInFilterDiscards, mscVrPpTbclOperEntry=mscVrPpTbclOperEntry, mscVrPpTbpStpOperTable=mscVrPpTbpStpOperTable, mscVrPpSrBpBlockingDiscards=mscVrPpSrBpBlockingDiscards, mscVrPpFddiETBDesignatedPort=mscVrPpFddiETBDesignatedPort, mscVrPpTbpStpProvTable=mscVrPpTbpStpProvTable, mscVrPpSrsgOperStatusEntry=mscVrPpSrsgOperStatusEntry, mscVrPpSrclForwardTransitions=mscVrPpSrclForwardTransitions, mscVrPpSrclDynamicDiscards=mscVrPpSrclDynamicDiscards, mscVrPpSrseSpecOutFrames=mscVrPpSrseSpecOutFrames, mscVrPpTbseDomainNum=mscVrPpTbseDomainNum, mscVrPpFddiETBTranslateIpx=mscVrPpFddiETBTranslateIpx, mscVrPpTbsgLearningDiscards=mscVrPpTbsgLearningDiscards, mscVrPpSrseProvEntry=mscVrPpSrseProvEntry, mscVrBrNsAteProvTable=mscVrBrNsAteProvTable, mscVrBrSrbLanIdTableNumEntries=mscVrBrSrbLanIdTableNumEntries, mscVrPpTbclInDiscards=mscVrPpTbclInDiscards, mscPBOperationalState=mscPBOperationalState, mscVrPpSrsgPathCostMethod=mscVrPpSrsgPathCostMethod, mscVrBrAdminControlEntry=mscVrBrAdminControlEntry, mscVrPpFddiETBPortName=mscVrPpFddiETBPortName, mscVrPpTbclTranslateIpx=mscVrPpTbclTranslateIpx, mscVrPpSrclSteOutFrames=mscVrPpSrclSteOutFrames, mscVrPpFddiETBPathCostMethod=mscVrPpFddiETBPathCostMethod, mscVrPpSrtBpLanId=mscVrPpSrtBpLanId, mscVrPpSrclDesignatedPort=mscVrPpSrclDesignatedPort, mscVrPpTbsgSnmpOperStatus=mscVrPpTbsgSnmpOperStatus, mscVrPpTbclTinygramFramesOut=mscVrPpTbclTinygramFramesOut, mscVrPpSrclRowStatusTable=mscVrPpSrclRowStatusTable, mscVrPpSrBpInternalLanId=mscVrPpSrBpInternalLanId, mscPBIfIndex=mscPBIfIndex, mscVrPpTbpAdminState=mscVrPpTbpAdminState, mscVrPpTbpNsProvEntry=mscVrPpTbpNsProvEntry, mscVrBrSrbRowStatus=mscVrBrSrbRowStatus, mscVrPpTbclBadVerifyDiscards=mscVrPpTbclBadVerifyDiscards, mscVrPpFddiETBNsProvTable=mscVrPpFddiETBNsProvTable, mscVrPpTbclRowStatusTable=mscVrPpTbclRowStatusTable, mscVrPpTbclMtuExceededDiscards=mscVrPpTbclMtuExceededDiscards, mscVrPpTbsgStateTable=mscVrPpTbsgStateTable, mscVrPpSrsgStateTable=mscVrPpSrsgStateTable, mscVrBrTbStpOperTable=mscVrBrTbStpOperTable, mscVrBrSrbAgingTime=mscVrBrSrbAgingTime, mscVrPpTbclTinygramFramesIn=mscVrPpTbclTinygramFramesIn, mscVrPpSrtBpBridgeNum=mscVrPpSrtBpBridgeNum, mscVrPpTbseOperTable=mscVrPpTbseOperTable, mscVrPpTbclBridgePortConfig=mscVrPpTbclBridgePortConfig, mscVrPpSrtBpAdminState=mscVrPpSrtBpAdminState, mscVrPpSrclOperTable=mscVrPpSrclOperTable, mscVrPpSrclNsRowStatusTable=mscVrPpSrclNsRowStatusTable, bridgeCapabilitiesCA=bridgeCapabilitiesCA, mscVrPpSrseSteOutFrames=mscVrPpSrseSteOutFrames, mscVrBrPteOperTable=mscVrBrPteOperTable, mscVrPpTbpProvEntry=mscVrPpTbpProvEntry, mscVrBrSrbStpFwdDelay=mscVrBrSrbStpFwdDelay, mscVrPpSrseTinygramFramesIn=mscVrPpSrseTinygramFramesIn, mscVrBrAdminState=mscVrBrAdminState, mscVrPpTbsgUsageState=mscVrPpTbsgUsageState, mscVrPpTbseRowStatusEntry=mscVrPpTbseRowStatusEntry, mscVrPpTbpDownTime=mscVrPpTbpDownTime, mscVrPpTbsgOperEntry=mscVrPpTbsgOperEntry, mscVrPpTbsgInFrames=mscVrPpTbsgInFrames, mscVrPpSrclMtuExceededDiscards=mscVrPpSrclMtuExceededDiscards, mscVrPpTbseTinygramFramesOut=mscVrPpTbseTinygramFramesOut, mscCBIfEntryTable=mscCBIfEntryTable, mscVrPpSrsePathCostOper=mscVrPpSrsePathCostOper, mscVrPpTbseBridgePortConfig=mscVrPpTbseBridgePortConfig, mscVrPpSrsgUsageState=mscVrPpSrsgUsageState, mscVrBrNsAteDirection=mscVrBrNsAteDirection, mscVrBrTbStpAgingTimeOper=mscVrBrTbStpAgingTimeOper, mscVrBrTbNcFte=mscVrBrTbNcFte, mscCBComponentName=mscCBComponentName, mscVrPpSrtBpIfIndex=mscVrPpSrtBpIfIndex, mscVrPpTbsgStatsEntry=mscVrPpTbsgStatsEntry, mscVrPpSrseStatsEntry=mscVrPpSrseStatsEntry, mscVrPpSrclNsStorageType=mscVrPpSrclNsStorageType, mscVrPpTbclStpPortState=mscVrPpTbclStpPortState, mscVrPpSrBpSrStatsTable=mscVrPpSrBpSrStatsTable, mscVrPpSrBpInFilterDiscards=mscVrPpSrBpInFilterDiscards, mscVrPpSrtBpSpecOutFrames=mscVrPpSrtBpSpecOutFrames, mscVrPpTbsgPortStateStpControl=mscVrPpTbsgPortStateStpControl, mscVrBrPteMacType=mscVrBrPteMacType, mscVrPpTbclPortPriority=mscVrPpTbclPortPriority, mscVrBrTbNcFteDomainNumIndex=mscVrBrTbNcFteDomainNumIndex, mscVrPpTbclPathCost=mscVrPpTbclPathCost, mscVrPpTbpStpPortState=mscVrPpTbpStpPortState, mscVrPpSrBpStateEntry=mscVrPpSrBpStateEntry, mscVrPpSrtBpDIProvEntry=mscVrPpSrtBpDIProvEntry, mscVrPpSrseOperEntry=mscVrPpSrseOperEntry, mscVrBrSrbLteStorageType=mscVrBrSrbLteStorageType, mscVrPpTbpPortPriority=mscVrPpTbpPortPriority, mscVrPpSrseStatsTable=mscVrPpSrseStatsTable, mscVrPpFddiETBAdminStatus=mscVrPpFddiETBAdminStatus, mscVrPpSrtBpNsProvTable=mscVrPpSrtBpNsProvTable, mscVrBrTbRowStatus=mscVrBrTbRowStatus, mscVrBrTbIndex=mscVrBrTbIndex, mscCBStorageType=mscCBStorageType, mscVrPpTbclUnicastNoMatches=mscVrPpTbclUnicastNoMatches, mscPBStateTable=mscPBStateTable, mscVrPpTbpOperTable=mscVrPpTbpOperTable, mscVrBrTbNumFtEntriesDenied=mscVrBrTbNumFtEntriesDenied, mscVrBrTbStpMaxAge=mscVrBrTbStpMaxAge, mscVrPpSrseDomainNum=mscVrPpSrseDomainNum, mscVrPpTbseStpPortState=mscVrPpTbseStpPortState, mscVrPpSrtBpInFrames=mscVrPpSrtBpInFrames, mscCBMpTable=mscCBMpTable, mscVrPpSrBpStpProvTable=mscVrPpSrBpStpProvTable, mscVrPpFddiETBStpPortState=mscVrPpFddiETBStpPortState, mscVrPpSrtBpStorageType=mscVrPpSrtBpStorageType, mscVrPpTbsgStatsTable=mscVrPpTbsgStatsTable, mscVrBrTbSteStorageType=mscVrBrTbSteStorageType, mscVrPpSrBpOperTable=mscVrPpSrBpOperTable, mscVrPpSrseIndex=mscVrPpSrseIndex, mscVrPpTbpBridgePortType=mscVrPpTbpBridgePortType, mscVrPpSrBpApeInFrames=mscVrPpSrBpApeInFrames, mscVrPpFddiETBDomainNum=mscVrPpFddiETBDomainNum, mscVrPpSrBpNsProvEntry=mscVrPpSrBpNsProvEntry, mscVrPpSrsgApeOutFrames=mscVrPpSrsgApeOutFrames, mscVrPpTbclStpProvTable=mscVrPpTbclStpProvTable, mscVrPpTbpStpOperEntry=mscVrPpTbpStpOperEntry, mscVrPpTbclTbProvEntry=mscVrPpTbclTbProvEntry, mscVrPpSrtBpStpTypeProv=mscVrPpSrtBpStpTypeProv, mscVrPpSrtBpInternalLanId=mscVrPpSrtBpInternalLanId, mscVrPpTbpMaxInfo=mscVrPpTbpMaxInfo, mscVrPpSrsgTranslateIpx=mscVrPpSrsgTranslateIpx, mscVrPpTbseBadVerifyDiscards=mscVrPpTbseBadVerifyDiscards, mscVrPpSrtBpBadAbstractDiscards=mscVrPpSrtBpBadAbstractDiscards, mscVrPpTbsgTinygramFramesOut=mscVrPpTbsgTinygramFramesOut, mscPBIndex=mscPBIndex, mscCBStateTable=mscCBStateTable, mscVrPpSrsgDesignatedCost=mscVrPpSrsgDesignatedCost, mscVrPpSrtBpSrStatsEntry=mscVrPpSrtBpSrStatsEntry, mscVrBrSrbStpPriority=mscVrBrSrbStpPriority, mscVrBrOperStatusTable=mscVrBrOperStatusTable, mscVrBrSrbLtePortName=mscVrBrSrbLtePortName, mscCBRowStatus=mscCBRowStatus, mscVrPpSrseSpecInFrames=mscVrPpSrseSpecInFrames, mscVrPpSrsgPortName=mscVrPpSrsgPortName, mscVrPpSrclSrStatsTable=mscVrPpSrclSrStatsTable, mscVrBrTbSteAtgtTable=mscVrBrTbSteAtgtTable, mscVrPpTbclStpProvEntry=mscVrPpTbclStpProvEntry, mscVrPpSrclProvTable=mscVrPpSrclProvTable, mscVrPpFddiETBStorageType=mscVrPpFddiETBStorageType, mscVrPpFddiETBPathCost=mscVrPpFddiETBPathCost, mscVrPpSrseStateEntry=mscVrPpSrseStateEntry, mscVrPpSrseOutFilterDiscards=mscVrPpSrseOutFilterDiscards, mscVrPpSrclLanId=mscVrPpSrclLanId, mscVrPpTbclUsageState=mscVrPpTbclUsageState, mscVrPpSrtBpSnmpOperStatus=mscVrPpSrtBpSnmpOperStatus, mscVrPpSrseBlockingDiscards=mscVrPpSrseBlockingDiscards, mscVrPpFddiETBNsIncomingFilter=mscVrPpFddiETBNsIncomingFilter, mscVrPpTbseSnmpOperStatus=mscVrPpTbseSnmpOperStatus, mscVrPpSrBpNsOutgoingFilter=mscVrPpSrBpNsOutgoingFilter, mscVrPpTbpRowStatusEntry=mscVrPpTbpRowStatusEntry, mscVrPpSrtBpUpTime=mscVrPpSrtBpUpTime, mscVrPpTbseBlockingDiscards=mscVrPpTbseBlockingDiscards, mscVrBrTbStatsEntry=mscVrBrTbStatsEntry, mscVrPpTbsgBlockingDiscards=mscVrPpTbsgBlockingDiscards, mscVrPpSrBpStatsEntry=mscVrPpSrBpStatsEntry, mscVrPpSrclNsOutgoingFilter=mscVrPpSrclNsOutgoingFilter, mscVrPpTbclDesignatedCost=mscVrPpTbclDesignatedCost, mscVrPpTbseRowStatusTable=mscVrPpTbseRowStatusTable, mscVrPpTbclAdminStatus=mscVrPpTbclAdminStatus)
mibBuilder.exportSymbols("Nortel-MsCarrier-MscPassport-BridgeMIB", mscVrPpSrtBpPathCostOper=mscVrPpSrtBpPathCostOper, mscVrPpSrclUpTime=mscVrPpSrclUpTime, mscVrPpTbclPortName=mscVrPpTbclPortName, mscVrPpTbseBridgePortType=mscVrPpTbseBridgePortType, mscVrBrSrbLteOperEntry=mscVrBrSrbLteOperEntry, mscVrPpSrtBpBridgePortType=mscVrPpSrtBpBridgePortType, mscVrPpSrseStorageType=mscVrPpSrseStorageType, mscVrPpTbcl=mscVrPpTbcl, mscVrPpTbsgDelayExceededDiscards=mscVrPpTbsgDelayExceededDiscards, mscVrPpFddiETBDynamicEntryDiscards=mscVrPpFddiETBDynamicEntryDiscards, mscVrPpTbclNsOutgoingFilter=mscVrPpTbclNsOutgoingFilter, mscPBMpEntry=mscPBMpEntry, mscVrPpFddiETBMaxInfo=mscVrPpFddiETBMaxInfo, mscVrPpTbpDIProvEntry=mscVrPpTbpDIProvEntry, mscVrPpSrBpSteSpanMode=mscVrPpSrBpSteSpanMode, mscVrPpFddiETBOperEntry=mscVrPpFddiETBOperEntry, mscVrPpSrBpSpecOutFrames=mscVrPpSrBpSpecOutFrames, mscVrPpTbseStatsEntry=mscVrPpTbseStatsEntry, mscVrBrTbNcFteRowStatusEntry=mscVrBrTbNcFteRowStatusEntry, mscVrPpTbsePathCost=mscVrPpTbsePathCost, mscVrPpSrclDIProvEntry=mscVrPpSrclDIProvEntry, mscVrPpSrsgDesignatedBridge=mscVrPpSrsgDesignatedBridge, mscVrPpTbsgOperStatusTable=mscVrPpTbsgOperStatusTable, mscVrBrPteModeIndex=mscVrBrPteModeIndex, mscVrPpTbsgTbProvEntry=mscVrPpTbsgTbProvEntry, mscVrPpFddiETBPortPriority=mscVrPpFddiETBPortPriority, mscVrPpTbpFragmentIp=mscVrPpTbpFragmentIp, mscVrPpSrsg=mscVrPpSrsg, mscVrPpTbclPathCostOper=mscVrPpTbclPathCostOper, mscVrPpTbpTbProvTable=mscVrPpTbpTbProvTable, mscVrPpTbpDesignatedPort=mscVrPpTbpDesignatedPort, mscVrPpSrBpIfIndex=mscVrPpSrBpIfIndex, mscVrPpTbseDesignatedBridge=mscVrPpTbseDesignatedBridge, mscVrBrNsAteRowStatusTable=mscVrBrNsAteRowStatusTable, mscVrBrSrbStpBridgeHelloTime=mscVrBrSrbStpBridgeHelloTime, mscVrPpTbseAdminStatus=mscVrPpTbseAdminStatus, mscVrPpSrtBpRowStatusTable=mscVrPpSrtBpRowStatusTable, mscVrPpTbsgRowStatusEntry=mscVrPpTbsgRowStatusEntry, mscPB=mscPB, mscVrPpSrsgLanIdMismatches=mscVrPpSrsgLanIdMismatches, bridgeCapabilitiesCA02A=bridgeCapabilitiesCA02A, mscVrBrSrbStpHelloTime=mscVrBrSrbStpHelloTime, mscVrBrSnmpOperStatus=mscVrBrSnmpOperStatus, mscVrPpTbsgStpTypeOper=mscVrPpTbsgStpTypeOper, mscVrBrSrbStpAgingTimeOper=mscVrBrSrbStpAgingTimeOper, mscVrBrTbStpRowStatus=mscVrBrTbStpRowStatus, mscVrPpSrclSteSpanMode=mscVrPpSrclSteSpanMode, mscVrPpSrtBpAdminStatus=mscVrPpSrtBpAdminStatus, mscVrBrRowStatus=mscVrBrRowStatus, mscVrPpSrBpStpPortState=mscVrPpSrBpStpPortState, mscVrPpSrsgDIProvTable=mscVrPpSrsgDIProvTable, mscVrPpTbclProvTable=mscVrPpTbclProvTable, mscVrPpFddiETBInFrames=mscVrPpFddiETBInFrames, mscVrPpSrclNsComponentName=mscVrPpSrclNsComponentName, mscVrPpTbpIfIndex=mscVrPpTbpIfIndex, mscVrBrSrbProvTable=mscVrBrSrbProvTable, bridgeCapabilities=bridgeCapabilities, mscVrPpTbseTbOperEntry=mscVrPpTbseTbOperEntry, mscVrBrSrbProvEntry=mscVrBrSrbProvEntry, mscVrPpSrclSrProvTable=mscVrPpSrclSrProvTable, mscVrBrNsAteComponentName=mscVrBrNsAteComponentName, mscVrPpSrBpLargestFrame=mscVrPpSrBpLargestFrame, mscVrBrTbSteAtgtValue=mscVrBrTbSteAtgtValue, mscVrPpTbseProvEntry=mscVrPpTbseProvEntry, mscVrPpTbpDomainNum=mscVrPpTbpDomainNum, mscVrBrSrbLtePeerMACAddress=mscVrBrSrbLtePeerMACAddress, mscVrPpSrsgProvTable=mscVrPpSrsgProvTable, mscVrPpTbclPathCostMethod=mscVrPpTbclPathCostMethod, mscVrPpTbpComponentName=mscVrPpTbpComponentName, mscVrPpSrBpTinygramFramesOut=mscVrPpSrBpTinygramFramesOut, mscVrPpTbsePortStateStpControl=mscVrPpTbsePortStateStpControl, mscVrPpSrtBpBlockingDiscards=mscVrPpSrtBpBlockingDiscards, mscVrPpSrclBridgeNum=mscVrPpSrclBridgeNum, mscVrBrTbStpProvEntry=mscVrBrTbStpProvEntry, mscVrBrTbFteOperEntry=mscVrBrTbFteOperEntry, mscVrPpTbclStateEntry=mscVrPpTbclStateEntry, mscVrPpSrBpRowStatus=mscVrPpSrBpRowStatus, mscVrPpSrsgPathCostOper=mscVrPpSrsgPathCostOper, mscVrPpSrBpSteInFrames=mscVrPpSrBpSteInFrames, mscVrPpSrtBpSteRdLimit=mscVrPpSrtBpSteRdLimit, mscVrPpSrBpSrProvEntry=mscVrPpSrBpSrProvEntry, mscVrBrTbSteRowStatus=mscVrBrTbSteRowStatus, mscVrPpTbclDownTime=mscVrPpTbclDownTime, mscVrPpSrtBpStpProvEntry=mscVrPpSrtBpStpProvEntry, mscVrPpFddiETBNsProvEntry=mscVrPpFddiETBNsProvEntry, mscVrPpTbclDesignatedPort=mscVrPpTbclDesignatedPort, mscVrPpSrsgApeInFrames=mscVrPpSrsgApeInFrames, mscVrPpSrtBpTranslateIpx=mscVrPpSrtBpTranslateIpx, mscVrPpSrBpDIProvEntry=mscVrPpSrBpDIProvEntry, mscVrBrTbFteRowStatusEntry=mscVrBrTbFteRowStatusEntry, mscVrBrTbFteRowStatus=mscVrBrTbFteRowStatus, mscVrPpSrtBpDesignatedCost=mscVrPpSrtBpDesignatedCost, mscVrPpSrtBpNsComponentName=mscVrPpSrtBpNsComponentName, mscVrPpSrseMtuExceededDiscards=mscVrPpSrseMtuExceededDiscards, mscVrBrTbFteStatus=mscVrBrTbFteStatus, mscVrPpTbsgBadAbstractDiscards=mscVrPpTbsgBadAbstractDiscards, mscVrPpTbclStateTable=mscVrPpTbclStateTable, mscVrPpSrtBpPortNum=mscVrPpSrtBpPortNum, mscVrPpSrclPathCost=mscVrPpSrclPathCost, mscVrPpTbpStatsEntry=mscVrPpTbpStatsEntry, mscVrPpSrBpDupSegmentDiscards=mscVrPpSrBpDupSegmentDiscards, mscVrPpSrsgBridgePortType=mscVrPpSrsgBridgePortType, mscVrPpSrtBpPortName=mscVrPpSrtBpPortName, mscVrPpTbseIndex=mscVrPpTbseIndex, mscVrPpSrtBpOutFrames=mscVrPpSrtBpOutFrames, mscVrPpSrtBpStpOperTable=mscVrPpSrtBpStpOperTable, mscVrBrSrbStpRootCost=mscVrBrSrbStpRootCost, mscVrPpSrtBpSecureOption=mscVrPpSrtBpSecureOption, mscVrPpTbsePortNum=mscVrPpTbsePortNum, mscVrPpSrsgDIProvEntry=mscVrPpSrsgDIProvEntry, mscVrBrOperTable=mscVrBrOperTable, mscVrPpSrtBpPathCost=mscVrPpSrtBpPathCost, mscVrBrNsAteProvEntry=mscVrBrNsAteProvEntry, mscVrPpSrsgBridgingMode=mscVrPpSrsgBridgingMode, mscVrBrSrbLteOperTable=mscVrBrSrbLteOperTable, mscVrPpFddiETBNs=mscVrPpFddiETBNs, mscVrPpTbseUsageState=mscVrPpTbseUsageState, mscVrPpSrseInFilterDiscards=mscVrPpSrseInFilterDiscards, mscVrPpFddiETBStpOperEntry=mscVrPpFddiETBStpOperEntry, mscVrPpFddiETBNsOutgoingFilter=mscVrPpFddiETBNsOutgoingFilter, mscVrPpSrtBpMaxInfo=mscVrPpSrtBpMaxInfo, mscVrPpSrseSteRdLimit=mscVrPpSrseSteRdLimit, mscVrPpSrsgForwardTransitions=mscVrPpSrsgForwardTransitions, mscVrPpTbsgStateEntry=mscVrPpTbsgStateEntry, mscVrPpSrclDelayExceededDiscards=mscVrPpSrclDelayExceededDiscards, mscVrPpSrclStpOperTable=mscVrPpSrclStpOperTable, mscVrPpFddiETBForwardTransitions=mscVrPpFddiETBForwardTransitions, mscVrPpSrsgDupSegmentDiscards=mscVrPpSrsgDupSegmentDiscards, mscVrPpTbsgDownTime=mscVrPpTbsgDownTime, mscVrPpFddiETBComponentName=mscVrPpFddiETBComponentName, mscVrBrRowStatusTable=mscVrBrRowStatusTable, mscVrBrNsProvEntry=mscVrBrNsProvEntry, mscVrPpTbseDownTime=mscVrPpTbseDownTime, mscVrPpSrBpPortPriority=mscVrPpSrBpPortPriority, mscVrPpSrsgInternalLanId=mscVrPpSrsgInternalLanId, mscVrPpSrseRowStatusTable=mscVrPpSrseRowStatusTable, mscVrPpTbpAdminStatus=mscVrPpTbpAdminStatus, mscVrPpTbpPortName=mscVrPpTbpPortName, mscCBAdminState=mscCBAdminState, mscVrPpSrseLargestFrame=mscVrPpSrseLargestFrame, mscVrBrTbNcFteAddressIndex=mscVrBrTbNcFteAddressIndex, mscVrPpSrclNsRowStatus=mscVrPpSrclNsRowStatus, mscVrPpTbclProvEntry=mscVrPpTbclProvEntry, mscVrPpTbseStpProvEntry=mscVrPpTbseStpProvEntry, mscVrPpSrBpStpOperEntry=mscVrPpSrBpStpOperEntry, mscVrPpSrclUsageState=mscVrPpSrclUsageState, mscVrPpSrtBpDynamicEntryDiscards=mscVrPpSrtBpDynamicEntryDiscards, mscVrPpSrsgOperEntry=mscVrPpSrsgOperEntry, mscVrPpSrclSnmpOperStatus=mscVrPpSrclSnmpOperStatus, mscVrBrNsRowStatusEntry=mscVrBrNsRowStatusEntry, mscVrPpSrseBridgeNum=mscVrPpSrseBridgeNum, mscVrPpSrtBpDIProvTable=mscVrPpSrtBpDIProvTable, mscVrPpSrsgConvertArpMacAddress=mscVrPpSrsgConvertArpMacAddress, mscVrPpTbsgMaxInfo=mscVrPpTbsgMaxInfo, mscVrPpSrclRowStatus=mscVrPpSrclRowStatus, mscVrPpSrseComponentName=mscVrPpSrseComponentName, mscVrPpTbpPathCost=mscVrPpTbpPathCost, mscVrPpTbclBadAbstractDiscards=mscVrPpTbclBadAbstractDiscards, mscVrPpSrclPathCostMethod=mscVrPpSrclPathCostMethod, mscVrBrTbProvTable=mscVrBrTbProvTable, mscVrPpTbsgStpOperEntry=mscVrPpTbsgStpOperEntry, mscVrPpSrsgHopCountExceededDiscards=mscVrPpSrsgHopCountExceededDiscards, mscVrBrSrbStatsTable=mscVrBrSrbStatsTable, mscVrBrTbFteAddressIndex=mscVrBrTbFteAddressIndex, mscVrBrTbNcFtePeerAddressInfo=mscVrBrTbNcFtePeerAddressInfo, mscPBRowStatusTable=mscPBRowStatusTable, mscVrPpSrtBpServiceClass=mscVrPpSrtBpServiceClass, mscVrPpSrtBpStpTypeOper=mscVrPpSrtBpStpTypeOper, mscVrPpSrtBpTinygramFramesOut=mscVrPpSrtBpTinygramFramesOut, mscVrBrAdminStatus=mscVrBrAdminStatus, mscVrBrOperEntry=mscVrBrOperEntry, mscVrPpSrclProvEntry=mscVrPpSrclProvEntry, mscVrPpTbclPreserveDomain=mscVrPpTbclPreserveDomain, mscVrPpSrBpAdminStatus=mscVrPpSrBpAdminStatus, mscCBIfEntryEntry=mscCBIfEntryEntry, mscVrPpFddiETBStatsEntry=mscVrPpFddiETBStatsEntry, mscVrPpSrtBpSteOutFrames=mscVrPpSrtBpSteOutFrames, mscVrPpFddiETBIfIndex=mscVrPpFddiETBIfIndex, mscPBUsageState=mscPBUsageState, mscVrPpSrtBpProvEntry=mscVrPpSrtBpProvEntry, mscVrPpSrseForwardTransitions=mscVrPpSrseForwardTransitions, mscVrPpTbpNsStorageType=mscVrPpTbpNsStorageType, mscVrPpFddiETBAdminState=mscVrPpFddiETBAdminState, mscVrPpSrseStpProvTable=mscVrPpSrseStpProvTable, mscVrPpSrseDesignatedPort=mscVrPpSrseDesignatedPort, mscVrPpTbclTbProvTable=mscVrPpTbclTbProvTable, mscVrPpFddiETBUnicastNoMatches=mscVrPpFddiETBUnicastNoMatches, mscVrPpTbseStpOperEntry=mscVrPpTbseStpOperEntry, mscVrPpSrsgComponentName=mscVrPpSrsgComponentName, mscVrPpSrsgStpProvTable=mscVrPpSrsgStpProvTable, mscVrBrOperationalState=mscVrBrOperationalState, mscVrBrSrbLteRowStatusEntry=mscVrBrSrbLteRowStatusEntry, mscPBOperTable=mscPBOperTable, mscVrBrSrbLteRowStatus=mscVrBrSrbLteRowStatus, mscVrPpTbsgDynamicEntryDiscards=mscVrPpTbsgDynamicEntryDiscards, mscVrPpTbseSecureOption=mscVrPpTbseSecureOption, mscVrBrPteRowStatusEntry=mscVrBrPteRowStatusEntry, mscVrPpSrseSteInFrames=mscVrPpSrseSteInFrames, mscPBIfEntryTable=mscPBIfEntryTable, mscVrBrTbNcFteRowStatus=mscVrBrTbNcFteRowStatus, mscVrPpTbclPortNum=mscVrPpTbclPortNum, mscVrPpSrtBpPreserveDomain=mscVrPpSrtBpPreserveDomain, mscVrBrSrbStpComponentName=mscVrBrSrbStpComponentName, mscVrBrTbStpHelloTime=mscVrBrTbStpHelloTime, mscVrPpTbpStpTypeOper=mscVrPpTbpStpTypeOper, mscVrPpSrseTranslateIpx=mscVrPpSrseTranslateIpx, mscVrPpSrtBpStateTable=mscVrPpSrtBpStateTable, mscVrPpSrBpOperationalState=mscVrPpSrBpOperationalState, mscVrPpSrseBridgingMode=mscVrPpSrseBridgingMode, mscVrPpTbsgOutFilterDiscards=mscVrPpTbsgOutFilterDiscards, mscVrPpSrseIfIndex=mscVrPpSrseIfIndex, mscVrPpSrBpNsComponentName=mscVrPpSrBpNsComponentName, mscVrPpTbsgOutFrames=mscVrPpTbsgOutFrames, mscVrPpTbclDIProvEntry=mscVrPpTbclDIProvEntry, mscVrPpTbpTinygramFramesIn=mscVrPpTbpTinygramFramesIn, mscVrPpSrsgOperTable=mscVrPpSrsgOperTable, mscVrPpTbclDIProvTable=mscVrPpTbclDIProvTable, mscVrBrTbSteRowStatusTable=mscVrBrTbSteRowStatusTable, mscVrPpSrtBpNsIncomingFilter=mscVrPpSrtBpNsIncomingFilter, mscVrBrTbStorageType=mscVrBrTbStorageType, mscVrPpSrseLanIdMismatches=mscVrPpSrseLanIdMismatches, mscVrPpTbclOperStatusEntry=mscVrPpTbclOperStatusEntry, mscVrPpFddiETBServiceClass=mscVrPpFddiETBServiceClass, mscVrPpFddiETBOperationalState=mscVrPpFddiETBOperationalState, mscVrPpSrtBpStateEntry=mscVrPpSrtBpStateEntry, mscVrPpSrBpStorageType=mscVrPpSrBpStorageType, mscVrPpTbpInDiscards=mscVrPpTbpInDiscards, mscVrPpSrtBpNsStorageType=mscVrPpSrtBpNsStorageType, mscVrPpFddiETBNsRowStatusEntry=mscVrPpFddiETBNsRowStatusEntry, mscVrBrSrbStpRowStatusEntry=mscVrBrSrbStpRowStatusEntry, mscVrPpSrclStateEntry=mscVrPpSrclStateEntry, mscVrPpTbseDIProvTable=mscVrPpTbseDIProvTable, mscVrPpTbclAdminState=mscVrPpTbclAdminState, mscVrPpSrtBpNs=mscVrPpSrtBpNs, mscVrPpTbsePathCostMethod=mscVrPpTbsePathCostMethod, mscVrPpSrsgDomainNum=mscVrPpSrsgDomainNum, mscVrPpTbclIndex=mscVrPpTbclIndex, mscVrPpSrBpServiceClass=mscVrPpSrBpServiceClass, mscVrPpTbclStatsTable=mscVrPpTbclStatsTable, mscVrPpTbclDomainNum=mscVrPpTbclDomainNum, mscVrPpTbpStateEntry=mscVrPpTbpStateEntry, mscVrPpSrtBpDomainNum=mscVrPpSrtBpDomainNum, mscVrPpTbsgTranslateIpx=mscVrPpTbsgTranslateIpx, mscVrBrSrbLteComponentName=mscVrBrSrbLteComponentName, mscVrPpTbseForwardTransitions=mscVrPpTbseForwardTransitions, mscVrPpSrclNsRowStatusEntry=mscVrPpSrclNsRowStatusEntry, mscVrPpSrsgDynamicDiscards=mscVrPpSrsgDynamicDiscards, mscVrPpFddiETBStpTypeProv=mscVrPpFddiETBStpTypeProv, mscVrPpTbsgPathCost=mscVrPpTbsgPathCost, mscVrPpTbse=mscVrPpTbse, mscVrPpFddiETBOperStatusTable=mscVrPpFddiETBOperStatusTable, mscVrPpTbpNsIncomingFilter=mscVrPpTbpNsIncomingFilter, mscVrPpSrBpLanId=mscVrPpSrBpLanId, mscVrPpTbseOperStatusTable=mscVrPpTbseOperStatusTable)
mibBuilder.exportSymbols("Nortel-MsCarrier-MscPassport-BridgeMIB", mscVrBrStateTable=mscVrBrStateTable, mscVrBrType=mscVrBrType, mscVrPpSrclStatsTable=mscVrPpSrclStatsTable, mscVrPpSrsgSegmentMismatchDiscards=mscVrPpSrsgSegmentMismatchDiscards, mscVrPpTbpDIProvTable=mscVrPpTbpDIProvTable, mscVrPpSrBpHopCountExceededDiscards=mscVrPpSrBpHopCountExceededDiscards, mscVrPpSrsgBridgePortConfig=mscVrPpSrsgBridgePortConfig, mscCBOperStatusTable=mscCBOperStatusTable, mscVrPpSrseStateTable=mscVrPpSrseStateTable, mscVrPpFddiETBNsComponentName=mscVrPpFddiETBNsComponentName, mscVrPpSrse=mscVrPpSrse, mscVrPpTbsgIfIndex=mscVrPpTbsgIfIndex, mscVrBrTbStpBridgeForwardDelay=mscVrBrTbStpBridgeForwardDelay, mscVrBrTbSteProvEntry=mscVrBrTbSteProvEntry, mscVrPpSrclRowStatusEntry=mscVrPpSrclRowStatusEntry, mscVrBrSrbIndex=mscVrBrSrbIndex, mscVrPpTbclFragmentIp=mscVrPpTbclFragmentIp, mscVrPpSrsePortNum=mscVrPpSrsePortNum, mscVrPpSrtBpStatsTable=mscVrPpSrtBpStatsTable, mscVrBrSrb=mscVrBrSrb, mscVrPpFddiETBDownTime=mscVrPpFddiETBDownTime, mscVrBrSrbLteLanIdIndex=mscVrBrSrbLteLanIdIndex, mscVrPpSrseSrStatsTable=mscVrPpSrseSrStatsTable, mscVrBrSrbStpHoldTime=mscVrBrSrbStpHoldTime, mscVrPpTbsg=mscVrPpTbsg, mscVrPpTbclDesignatedBridge=mscVrPpTbclDesignatedBridge, mscVrPpSrsgSnmpOperStatus=mscVrPpSrsgSnmpOperStatus, mscVrPpFddiETBBridgePortType=mscVrPpFddiETBBridgePortType, mscVrPpFddiETBOutFilterDiscards=mscVrPpFddiETBOutFilterDiscards, mscVrPpSrclNsProvTable=mscVrPpSrclNsProvTable, mscVrPpTbsgStaticEntryDiscards=mscVrPpTbsgStaticEntryDiscards, mscVrPpSrseDesignatedRoot=mscVrPpSrseDesignatedRoot, mscVrPpTbseComponentName=mscVrPpTbseComponentName, mscVrBrPteLanId=mscVrBrPteLanId, mscVrPpTbclOperationalState=mscVrPpTbclOperationalState, mscVrPpSrclDupLanIdOrTreeErrors=mscVrPpSrclDupLanIdOrTreeErrors, mscVrPpSrBpAdminState=mscVrPpSrBpAdminState, mscVrBrSrbLte=mscVrBrSrbLte, mscVrPpSrtBpDupSegmentDiscards=mscVrPpSrtBpDupSegmentDiscards, mscVrPpTbseStpTypeOper=mscVrPpTbseStpTypeOper, mscVrPpSrclPortPriority=mscVrPpSrclPortPriority, mscPBAdminState=mscPBAdminState, mscVrPpSrsgStateEntry=mscVrPpSrsgStateEntry, mscVrPpSrtBpOperationalState=mscVrPpSrtBpOperationalState, mscVrPpSrBpPathCostOper=mscVrPpSrBpPathCostOper, mscVrPpTbclIfIndex=mscVrPpTbclIfIndex, mscVrPpSrtBpSrProvTable=mscVrPpSrtBpSrProvTable, mscVrBrBridgeAddress=mscVrBrBridgeAddress, mscVrBrRowStatusEntry=mscVrBrRowStatusEntry, mscVrPpSrBpStpProvEntry=mscVrPpSrBpStpProvEntry, mscVrPpSrtBpStatsEntry=mscVrPpSrtBpStatsEntry, mscCBAdminControlTable=mscCBAdminControlTable, mscVrPpTbsgTbOperTable=mscVrPpTbsgTbOperTable, mscVrPpTbseServiceClass=mscVrPpTbseServiceClass, mscVrPpSrseStpProvEntry=mscVrPpSrseStpProvEntry, mscVrPpSrsgOperationalState=mscVrPpSrsgOperationalState, mscPBSnmpOperStatus=mscPBSnmpOperStatus, mscVrPpSrBpDownTime=mscVrPpSrBpDownTime, mscVrPpSrBpSegmentMismatchDiscards=mscVrPpSrBpSegmentMismatchDiscards, mscVrPpSrBpProvEntry=mscVrPpSrBpProvEntry, mscVrPpTbpBridgingMode=mscVrPpTbpBridgingMode, mscVrPpSrBpExploreFrameTreatment=mscVrPpSrBpExploreFrameTreatment, mscVrPpTbsgStpProvEntry=mscVrPpTbsgStpProvEntry, mscVrPpTbpServiceClass=mscVrPpTbpServiceClass, mscPBOperEntry=mscPBOperEntry, mscVrPpFddiETBFragmentIp=mscVrPpFddiETBFragmentIp, mscVrPpSrseHopCount=mscVrPpSrseHopCount, mscVrPpSrsgDelayExceededDiscards=mscVrPpSrsgDelayExceededDiscards, mscVrPpSrtBpForwardTransitions=mscVrPpSrtBpForwardTransitions, mscVrPpFddiETBStpProvTable=mscVrPpFddiETBStpProvTable, mscVrPpSrBpPreserveDomain=mscVrPpSrBpPreserveDomain, mscVrPpSrBpOutFilterDiscards=mscVrPpSrBpOutFilterDiscards, mscVrPpTbsgPathCostMethod=mscVrPpTbsgPathCostMethod, mscVrPpSrclNs=mscVrPpSrclNs, mscVrBrTbProvEntry=mscVrBrTbProvEntry, mscVrBrTbStpStpMode=mscVrBrTbStpStpMode, mscVrPpSrBpHopCount=mscVrPpSrBpHopCount, mscVrBrTbStpDesignatedRoot=mscVrBrTbStpDesignatedRoot, mscVrPpSrsgBadAbstractDiscards=mscVrPpSrsgBadAbstractDiscards, mscVrPpTbsgStpPortState=mscVrPpTbsgStpPortState, mscVrPpSrclSpecOutFrames=mscVrPpSrclSpecOutFrames, mscVrPpTbpPortNum=mscVrPpTbpPortNum, mscVrPpSrsgTinygramFramesIn=mscVrPpSrsgTinygramFramesIn, mscCBOperTable=mscCBOperTable, mscVrPpSrBpRowStatusTable=mscVrPpSrBpRowStatusTable, mscCBMpEntry=mscCBMpEntry, mscVrPpTbseLearningDiscards=mscVrPpTbseLearningDiscards, mscVrPpSrcl=mscVrPpSrcl, mscVrPpSrBpSrProvTable=mscVrPpSrBpSrProvTable, mscVrPpSrtBpRowStatus=mscVrPpSrtBpRowStatus, mscVrPpSrtBpOperStatusTable=mscVrPpSrtBpOperStatusTable, mscCBRowStatusTable=mscCBRowStatusTable, mscVrBrSrbStpTimeSinceTopologyChange=mscVrBrSrbStpTimeSinceTopologyChange, mscVrPpSrclOperEntry=mscVrPpSrclOperEntry, mscVrPpSrclTranslateIpx=mscVrPpSrclTranslateIpx, mscVrPpFddiETBDesignatedRoot=mscVrPpFddiETBDesignatedRoot, mscVrPpSrtBpHopCountExceededDiscards=mscVrPpSrtBpHopCountExceededDiscards, mscVrPpSrseDIProvTable=mscVrPpSrseDIProvTable, mscVrPpSrBpDesignatedRoot=mscVrPpSrBpDesignatedRoot, mscVrPpTbsgDesignatedRoot=mscVrPpTbsgDesignatedRoot, mscVrBrSrbStpRowStatus=mscVrBrSrbStpRowStatus, mscVrPpSrBpNsStorageType=mscVrPpSrBpNsStorageType, mscVrPpSrseDelayExceededDiscards=mscVrPpSrseDelayExceededDiscards, mscVrBrSrbStorageType=mscVrBrSrbStorageType, mscVrBrTbFteComponentName=mscVrBrTbFteComponentName, mscVrPpSrclServiceClass=mscVrPpSrclServiceClass, mscVrPpSrBpSnmpOperStatus=mscVrPpSrBpSnmpOperStatus, mscVrPpSrBp=mscVrPpSrBp, mscVrPpSrsgSrProvTable=mscVrPpSrsgSrProvTable, mscVrPpSrclStpOperEntry=mscVrPpSrclStpOperEntry, mscVrPpSrtBpInFilterDiscards=mscVrPpSrtBpInFilterDiscards, mscVrPpSrclOutFilterDiscards=mscVrPpSrclOutFilterDiscards, mscVrPpSrsgMtuExceededDiscards=mscVrPpSrsgMtuExceededDiscards, mscVrBrPteSpOperTable=mscVrBrPteSpOperTable, mscVrPpTbpNsRowStatusTable=mscVrPpTbpNsRowStatusTable, mscVrBrTbStpTopologyChanges=mscVrBrTbStpTopologyChanges, mscVrPpSrsgStatsEntry=mscVrPpSrsgStatsEntry, mscVrPpSrtBpDupLanIdOrTreeErrors=mscVrPpSrtBpDupLanIdOrTreeErrors, mscVrBrTbTotalForwardingTableEntries=mscVrBrTbTotalForwardingTableEntries, mscVrPpTbclSnmpOperStatus=mscVrPpTbclSnmpOperStatus, mscVrPpSrseBridgePortType=mscVrPpSrseBridgePortType, mscVrPpTbsgPortNum=mscVrPpTbsgPortNum, mscVrPpSrclBridgePortConfig=mscVrPpSrclBridgePortConfig, mscVrPpTbpBadAbstractDiscards=mscVrPpTbpBadAbstractDiscards, mscVrBrTbFteAgeOfEntry=mscVrBrTbFteAgeOfEntry, mscVrPpTbpDesignatedBridge=mscVrPpTbpDesignatedBridge, mscVrPpTbseRowStatus=mscVrPpTbseRowStatus, mscVrPpSrsgSteInFrames=mscVrPpSrsgSteInFrames, mscVrPpTbseOperEntry=mscVrPpTbseOperEntry, mscVrPpSrsgAdminState=mscVrPpSrsgAdminState, mscVrPpTbclNsRowStatusEntry=mscVrPpTbclNsRowStatusEntry, mscVrPpSrseStpPortState=mscVrPpSrseStpPortState, mscVrPpSrBpPortNum=mscVrPpSrBpPortNum, mscVrPpSrBpSteRdLimit=mscVrPpSrBpSteRdLimit, mscVrPpSrtBpSteInFrames=mscVrPpSrtBpSteInFrames, mscVrPpTbseInDiscards=mscVrPpTbseInDiscards, mscVrBrTbSteAtgtRowStatus=mscVrBrTbSteAtgtRowStatus, mscVrPpSrtBpStpProvTable=mscVrPpSrtBpStpProvTable, mscVrPpSrsgLanId=mscVrPpSrsgLanId, mscVrBrTbSte=mscVrBrTbSte, mscVrPpTbpNsIndex=mscVrPpTbpNsIndex, mscVrPpSrtBpApeOutFrames=mscVrPpSrtBpApeOutFrames, mscVrPpSrclStpProvTable=mscVrPpSrclStpProvTable, mscVrPpFddiETBDesignatedCost=mscVrPpFddiETBDesignatedCost, mscVrPpSrclTinygramFramesIn=mscVrPpSrclTinygramFramesIn, mscVrPpSrseDesignatedCost=mscVrPpSrseDesignatedCost, mscVrPpSrtBpDesignatedRoot=mscVrPpSrtBpDesignatedRoot, mscVrPpFddiETBRowStatus=mscVrPpFddiETBRowStatus, mscVrBrTbStpProvTable=mscVrBrTbStpProvTable, mscVrPpSrtBpBadVerifyDiscards=mscVrPpSrtBpBadVerifyDiscards, bridgeGroupCA02=bridgeGroupCA02, mscVrPpSrsgPortStateStpControl=mscVrPpSrsgPortStateStpControl, mscVrPpFddiETBTbOperEntry=mscVrPpFddiETBTbOperEntry, mscVrPpTbpOutFilterDiscards=mscVrPpTbpOutFilterDiscards, mscVrPpTbseConvertArpMacAddress=mscVrPpTbseConvertArpMacAddress, mscVrBrNsAteStorageType=mscVrBrNsAteStorageType, mscVrPpSrsgLargestFrame=mscVrPpSrsgLargestFrame, mscVrPpSrclInFilterDiscards=mscVrPpSrclInFilterDiscards, mscVrPpSrclIfIndex=mscVrPpSrclIfIndex, mscVrPpFddiETBDIProvTable=mscVrPpFddiETBDIProvTable, mscVrPpFddiETBNsRowStatusTable=mscVrPpFddiETBNsRowStatusTable, mscVrPpSrBpProvTable=mscVrPpSrBpProvTable, mscVrPpSrsgDownTime=mscVrPpSrsgDownTime, mscVrPpSrBpDelayExceededDiscards=mscVrPpSrBpDelayExceededDiscards, mscVrPpTbseProvTable=mscVrPpTbseProvTable, mscVrPpTbpRowStatusTable=mscVrPpTbpRowStatusTable, mscVrPpTbclInFrames=mscVrPpTbclInFrames, mscVrPpFddiETBDelayExceededDiscards=mscVrPpFddiETBDelayExceededDiscards, mscPBSnmpAdminStatus=mscPBSnmpAdminStatus, mscVrPpFddiETBTbProvTable=mscVrPpFddiETBTbProvTable, mscVrPpSrsgStpProvEntry=mscVrPpSrsgStpProvEntry, mscVrPpSrclSteInFrames=mscVrPpSrclSteInFrames, mscVrPpSrseSegmentMismatchDiscards=mscVrPpSrseSegmentMismatchDiscards, mscVrPpSrtBpSrStatsTable=mscVrPpSrtBpSrStatsTable, mscVrPpTbclStpTypeOper=mscVrPpTbclStpTypeOper, mscVrPpSrBpNsRowStatusEntry=mscVrPpSrBpNsRowStatusEntry, mscVrPpTbpIndex=mscVrPpTbpIndex, mscPBMpTable=mscPBMpTable, mscVrPpTbseOutFilterDiscards=mscVrPpTbseOutFilterDiscards, mscVrPpSrseExploreFrameTreatment=mscVrPpSrseExploreFrameTreatment, mscVrBrNsAteSecondMacAddress=mscVrBrNsAteSecondMacAddress, mscVrPpTbclBlockingDiscards=mscVrPpTbclBlockingDiscards, mscVrPpSrBpNsIndex=mscVrPpSrBpNsIndex, mscVrPpTbseDelayExceededDiscards=mscVrPpTbseDelayExceededDiscards, mscVrPpSrsgBlockingDiscards=mscVrPpSrsgBlockingDiscards, mscVrPpTbsgStorageType=mscVrPpTbsgStorageType, mscCBOperationalState=mscCBOperationalState, mscVrPpFddiETBBlockingDiscards=mscVrPpFddiETBBlockingDiscards, bridgeMIB=bridgeMIB, mscVrPpTbclConvertArpMacAddress=mscVrPpTbclConvertArpMacAddress, mscVrPpSrseBadAbstractDiscards=mscVrPpSrseBadAbstractDiscards, mscVrPpSrseUsageState=mscVrPpSrseUsageState, mscVrPpFddiETBSnmpOperStatus=mscVrPpFddiETBSnmpOperStatus, mscVrPpSrBpBadAbstractDiscards=mscVrPpSrBpBadAbstractDiscards, mscVrPpTbseDesignatedCost=mscVrPpTbseDesignatedCost, mscVrPpTbseUpTime=mscVrPpTbseUpTime, mscVrPpSrclDIProvTable=mscVrPpSrclDIProvTable, mscVrBrNsAteSecondMacAddressMask=mscVrBrNsAteSecondMacAddressMask, mscVrPpSrclNsIncomingFilter=mscVrPpSrclNsIncomingFilter, mscVrBrTbSteAtgtEntry=mscVrBrTbSteAtgtEntry, mscVrBrSrbStpOperTable=mscVrBrSrbStpOperTable, mscVrPpSrBpPortName=mscVrPpSrBpPortName, mscVrPpSrBpForwardTransitions=mscVrPpSrBpForwardTransitions, mscVrPpSrBpOperStatusEntry=mscVrPpSrBpOperStatusEntry, mscVrPpSrseStpTypeProv=mscVrPpSrseStpTypeProv, mscVrPpSrseProvTable=mscVrPpSrseProvTable, mscVrPpSrtBpProvTable=mscVrPpSrtBpProvTable, mscVrPpTbsgUnicastNoMatches=mscVrPpTbsgUnicastNoMatches, mscVrPpSrBpNsProvTable=mscVrPpSrBpNsProvTable, mscVrPpFddiETBRowStatusTable=mscVrPpFddiETBRowStatusTable, mscVrPpSrsgRowStatus=mscVrPpSrsgRowStatus, mscVrBrNs=mscVrBrNs, mscVrPpSrBpOperStatusTable=mscVrPpSrBpOperStatusTable, mscVrBrNsComponentName=mscVrBrNsComponentName, mscVrPpTbclStaticEntryDiscards=mscVrPpTbclStaticEntryDiscards, mscVrPpTbpConvertArpMacAddress=mscVrPpTbpConvertArpMacAddress, mscVrPpTbclServiceClass=mscVrPpTbclServiceClass, mscVrPpSrBpConvertArpMacAddress=mscVrPpSrBpConvertArpMacAddress, mscVrPpSrtBpNsOutgoingFilter=mscVrPpSrtBpNsOutgoingFilter, mscVrPpTbsgDIProvTable=mscVrPpTbsgDIProvTable, mscVrPpSrsgSpecInFrames=mscVrPpSrsgSpecInFrames, mscVrBrPteDomainNumIndex=mscVrBrPteDomainNumIndex, mscVrPpSrBpBridgingMode=mscVrPpSrBpBridgingMode, mscVrBrTbRowStatusEntry=mscVrBrTbRowStatusEntry, mscPBOperStatusEntry=mscPBOperStatusEntry, mscVrPpSrtBpUnicastNoMatches=mscVrPpSrtBpUnicastNoMatches, mscVrPpSrsgSrStatsEntry=mscVrPpSrsgSrStatsEntry, mscVrPpFddiETBTbOperTable=mscVrPpFddiETBTbOperTable, mscVrPpSrBpUpTime=mscVrPpSrBpUpTime, mscVrPpSrBpDesignatedPort=mscVrPpSrBpDesignatedPort, mscVrBrUsageState=mscVrBrUsageState, mscVrPpFddiETBPreserveDomain=mscVrPpFddiETBPreserveDomain, mscVrPpSrsgSteSpanMode=mscVrPpSrsgSteSpanMode, mscVrPpTbsgStpProvTable=mscVrPpTbsgStpProvTable, mscVrBrSrbStpStpMode=mscVrBrSrbStpStpMode, mscVrPpSrclLanIdMismatches=mscVrPpSrclLanIdMismatches, mscVrBrSrbLteDomainNumIndex=mscVrBrSrbLteDomainNumIndex, mscVrPpTbpUsageState=mscVrPpTbpUsageState, mscVrBrNsAteDomainNum=mscVrBrNsAteDomainNum, mscVrPpSrBpStpTypeProv=mscVrPpSrBpStpTypeProv, mscVrPpSrclSteRdLimit=mscVrPpSrclSteRdLimit, mscVrPpSrseOperStatusTable=mscVrPpSrseOperStatusTable, mscVrPpSrtBpHopCount=mscVrPpSrtBpHopCount, mscVrPpSrsgStatsTable=mscVrPpSrsgStatsTable, mscVrPpFddiETBPortStateStpControl=mscVrPpFddiETBPortStateStpControl, mscVrPpSrsgSteRdLimit=mscVrPpSrsgSteRdLimit, mscVrPpSrclNsIndex=mscVrPpSrclNsIndex, mscVrPpSrsgStpPortState=mscVrPpSrsgStpPortState, mscVrPpSrsgStpTypeOper=mscVrPpSrsgStpTypeOper, mscVrPpTbpNsOutgoingFilter=mscVrPpTbpNsOutgoingFilter, mscVrPpSrBpPortStateStpControl=mscVrPpSrBpPortStateStpControl, mscVrPpSrclPathCostOper=mscVrPpSrclPathCostOper, mscVrPpTbsgOperationalState=mscVrPpTbsgOperationalState, mscVrPpSrtBpBridgingMode=mscVrPpSrtBpBridgingMode)
mibBuilder.exportSymbols("Nortel-MsCarrier-MscPassport-BridgeMIB", mscVrPpSrsePathCostMethod=mscVrPpSrsePathCostMethod, mscVrPpSrseSrProvTable=mscVrPpSrseSrProvTable, mscVrPpTbseIfIndex=mscVrPpTbseIfIndex, mscVrPpSrtBpOperStatusEntry=mscVrPpSrtBpOperStatusEntry, mscVrPpSrseRowStatusEntry=mscVrPpSrseRowStatusEntry, mscVrPpSrseStaticDiscards=mscVrPpSrseStaticDiscards, mscVrPpTbseStpTypeProv=mscVrPpTbseStpTypeProv, mscVrPpTbseStateTable=mscVrPpTbseStateTable, mscVrPpTbsgProvTable=mscVrPpTbsgProvTable, mscVrPpTbsgOperStatusEntry=mscVrPpTbsgOperStatusEntry, mscVrBrPte=mscVrBrPte, mscVrBrSrbStpStorageType=mscVrBrSrbStpStorageType, mscVrPpSrBpLanIdMismatches=mscVrPpSrBpLanIdMismatches, mscVrPpFddiETBUpTime=mscVrPpFddiETBUpTime, mscPBMacAddress=mscPBMacAddress, mscVrPpFddiETBNsIndex=mscVrPpFddiETBNsIndex, mscVrPpTbpTbProvEntry=mscVrPpTbpTbProvEntry, mscCBMacAddress=mscCBMacAddress, mscVrPpTbclStorageType=mscVrPpTbclStorageType, mscVrPpSrclStateTable=mscVrPpSrclStateTable, mscVrPpSrclBridgingMode=mscVrPpSrclBridgingMode, mscVrPpSrseUpTime=mscVrPpSrseUpTime, mscVrPpTbpStorageType=mscVrPpTbpStorageType, mscVrPpSrtBpAreRdLimit=mscVrPpSrtBpAreRdLimit, mscVrPpSrseOperStatusEntry=mscVrPpSrseOperStatusEntry, mscVrPpTbsgBridgePortType=mscVrPpTbsgBridgePortType, mscVrBrSrbStpRootPortName=mscVrBrSrbStpRootPortName, mscVrPpTbclOperStatusTable=mscVrPpTbclOperStatusTable, mscVrPpSrtBpBridgePortConfig=mscVrPpSrtBpBridgePortConfig, mscVrPpSrsgIndex=mscVrPpSrsgIndex, mscVrBrTbStpProtocolSpec=mscVrBrTbStpProtocolSpec, mscVrPpFddiETBInDiscards=mscVrPpFddiETBInDiscards, mscVrPpSrtBpIndex=mscVrPpSrtBpIndex, mscVrBrNsFirstFilter=mscVrBrNsFirstFilter, mscVrPpSrclPortName=mscVrPpSrclPortName, mscVrBrTbFwdTableNumEntries=mscVrBrTbFwdTableNumEntries, mscVrBrTbSteComponentName=mscVrBrTbSteComponentName, mscCBUsageState=mscCBUsageState, mscVrPpFddiETBRowStatusEntry=mscVrPpFddiETBRowStatusEntry, mscVrPpSrBpApeOutFrames=mscVrPpSrBpApeOutFrames, mscVrPpFddiETBLearningDiscards=mscVrPpFddiETBLearningDiscards, mscVrPpSrclIndex=mscVrPpSrclIndex, mscVrBrTbComponentName=mscVrBrTbComponentName, mscVrPpSrsgStorageType=mscVrPpSrsgStorageType, mscVrBrSrbRowStatusEntry=mscVrBrSrbRowStatusEntry, mscVrPpTbsgAdminState=mscVrPpTbsgAdminState, mscVrBrTbStpBridgeHelloTime=mscVrBrTbStpBridgeHelloTime, mscVrPpSrtBpSrProvEntry=mscVrPpSrtBpSrProvEntry, mscVrPpTbpLearningDiscards=mscVrPpTbpLearningDiscards, mscVrBrNsAte=mscVrBrNsAte, mscVrPpSrclBadAbstractDiscards=mscVrPpSrclBadAbstractDiscards, mscVrBrSrbBridgeLfMode=mscVrBrSrbBridgeLfMode, mscVrBrTbStpRootCost=mscVrBrTbStpRootCost, mscVrBrTbFteDomainNumIndex=mscVrBrTbFteDomainNumIndex, mscVrPpTbsgFragmentIp=mscVrPpTbsgFragmentIp, mscVrBrSrbStpRowStatusTable=mscVrBrSrbStpRowStatusTable, mscVrBrNsRowStatus=mscVrBrNsRowStatus, mscVrPpSrclTinygramFramesOut=mscVrPpSrclTinygramFramesOut, mscVrBrTbNcFteStorageType=mscVrBrTbNcFteStorageType, mscVrBrTbStpStorageType=mscVrBrTbStpStorageType, mscVrPpSrsgExploreFrameTreatment=mscVrPpSrsgExploreFrameTreatment, mscVrPpFddiETBProvEntry=mscVrPpFddiETBProvEntry, mscVrPpSrBpDynamicDiscards=mscVrPpSrBpDynamicDiscards, mscVrPpTbsgPortPriority=mscVrPpTbsgPortPriority, mscVrPpTbsgBadVerifyDiscards=mscVrPpTbsgBadVerifyDiscards, mscVrPpSrclDesignatedBridge=mscVrPpSrclDesignatedBridge, mscVrPpFddiETBTinygramFramesOut=mscVrPpFddiETBTinygramFramesOut, mscVrPpSrsgStpOperTable=mscVrPpSrsgStpOperTable, mscVrBrSrbNumLanIdtEntriesFree=mscVrBrSrbNumLanIdtEntriesFree, mscVrPpTbsgRowStatusTable=mscVrPpTbsgRowStatusTable, mscVrPpFddiETBMtuExceededDiscards=mscVrPpFddiETBMtuExceededDiscards, mscVrPpSrtBpInDiscards=mscVrPpSrtBpInDiscards, mscVrPpSrseSrStatsEntry=mscVrPpSrseSrStatsEntry, mscVrPpTbseFragmentIp=mscVrPpTbseFragmentIp, mscVrPpTbclDynamicEntryDiscards=mscVrPpTbclDynamicEntryDiscards, mscPBComponentName=mscPBComponentName, mscVrPpSrsgOutFilterDiscards=mscVrPpSrsgOutFilterDiscards, mscVrPpSrBpRowStatusEntry=mscVrPpSrBpRowStatusEntry, mscVrPpSrsgPreserveDomain=mscVrPpSrsgPreserveDomain, mscVrPpSrtBpPortPriority=mscVrPpSrtBpPortPriority, mscVrPpTbseStaticEntryDiscards=mscVrPpTbseStaticEntryDiscards, mscVrPpFddiETBStateEntry=mscVrPpFddiETBStateEntry, mscVrPpSrtBpLanIdMismatches=mscVrPpSrtBpLanIdMismatches, mscVrBrSrbNumLanIdtEntriesDenied=mscVrBrSrbNumLanIdtEntriesDenied, mscVrBrSrbStpIndex=mscVrBrSrbStpIndex, mscPBAdminControlEntry=mscPBAdminControlEntry, mscVrPpFddiETBStaticEntryDiscards=mscVrPpFddiETBStaticEntryDiscards, mscVrPpSrBpDesignatedBridge=mscVrPpSrBpDesignatedBridge, mscVrPpTbclNsRowStatus=mscVrPpTbclNsRowStatus, mscVrPpTbsgStpOperTable=mscVrPpTbsgStpOperTable, mscVrPpTbclRowStatusEntry=mscVrPpTbclRowStatusEntry, mscVrBrIndex=mscVrBrIndex, mscVrBrNsAteRowStatusEntry=mscVrBrNsAteRowStatusEntry, mscVrPpSrtBpNsRowStatusEntry=mscVrPpSrtBpNsRowStatusEntry, mscVrPpTbsgServiceClass=mscVrPpTbsgServiceClass, mscVrPpSrclSpecInFrames=mscVrPpSrclSpecInFrames, mscVrPpTbpInFrames=mscVrPpTbpInFrames, mscVrPpSrBpStateTable=mscVrPpSrBpStateTable, mscVrPpSrsgProvEntry=mscVrPpSrsgProvEntry, mscVrPpSrBpIndex=mscVrPpSrBpIndex, mscVrPpSrtBpDownTime=mscVrPpSrtBpDownTime, mscVrBrSrbRowStatusTable=mscVrBrSrbRowStatusTable, mscVrPpSrBpMtuExceededDiscards=mscVrPpSrBpMtuExceededDiscards, mscVrPpSrtBpNsRowStatusTable=mscVrPpSrtBpNsRowStatusTable, mscVrPpSrseTinygramFramesOut=mscVrPpSrseTinygramFramesOut, mscVrPpSrclStpProvEntry=mscVrPpSrclStpProvEntry, mscVrBrTbSteReceivePortIndex=mscVrBrTbSteReceivePortIndex, mscVrBrPteStpType=mscVrBrPteStpType, mscVrPpTbpStatsTable=mscVrPpTbpStatsTable, mscVrPpTbclLearningDiscards=mscVrPpTbclLearningDiscards, mscVrPpSrsgAreRdLimit=mscVrPpSrsgAreRdLimit, mscVrPpTbseTranslateIpx=mscVrPpTbseTranslateIpx, mscVrPpSrsgOperStatusTable=mscVrPpSrsgOperStatusTable, mscVrPpFddiETBStpProvEntry=mscVrPpFddiETBStpProvEntry, mscVrPpSrclLargestFrame=mscVrPpSrclLargestFrame, mscVrPpTbseInFrames=mscVrPpTbseInFrames, mscVrPpSrsgFragmentIp=mscVrPpSrsgFragmentIp, mscVrPpTbsgTbProvTable=mscVrPpTbsgTbProvTable, mscVrPpSrtBpDesignatedPort=mscVrPpSrtBpDesignatedPort, mscVrPpSrBpDesignatedCost=mscVrPpSrBpDesignatedCost, mscVrBrNsAteRowStatus=mscVrBrNsAteRowStatus, mscVrPpSrtBpLearningDiscards=mscVrPpSrtBpLearningDiscards, mscVrPpTbpUpTime=mscVrPpTbpUpTime, mscVrPpSrsgSteOutFrames=mscVrPpSrsgSteOutFrames, mscVrPpSrseOperTable=mscVrPpSrseOperTable, mscVrPpTbpOperationalState=mscVrPpTbpOperationalState, mscVrBrTbFteStorageType=mscVrBrTbFteStorageType, mscVrPpTbpOperEntry=mscVrPpTbpOperEntry, mscVrPpTbpDesignatedRoot=mscVrPpTbpDesignatedRoot, bridgeGroupCA=bridgeGroupCA, mscVrBrSrbStpOperEntry=mscVrBrSrbStpOperEntry, mscVrPpFddiETBTbProvEntry=mscVrPpFddiETBTbProvEntry, mscVrPpSrsgServiceClass=mscVrPpSrsgServiceClass, mscVrPpTbsgSecureOption=mscVrPpTbsgSecureOption, mscVrPpSrBpTinygramFramesIn=mscVrPpSrBpTinygramFramesIn, mscVrPpSrclAdminState=mscVrPpSrclAdminState, mscVrBrSrbStp=mscVrBrSrbStp, mscVrPpTbseTbProvEntry=mscVrPpTbseTbProvEntry, mscVrPpTbsePathCostOper=mscVrPpTbsePathCostOper, mscVrPpTbclNs=mscVrPpTbclNs, mscVrPpTbclBridgingMode=mscVrPpTbclBridgingMode, mscVrPpSrseOperationalState=mscVrPpSrseOperationalState, mscVrPpTbclOutFrames=mscVrPpTbclOutFrames, mscPBOperStatusTable=mscPBOperStatusTable, bridgeGroupCA02A=bridgeGroupCA02A, mscVrPpFddiETBIndex=mscVrPpFddiETBIndex, mscVrPpSrBpStaticDiscards=mscVrPpSrBpStaticDiscards, mscVrPpTbseStatsTable=mscVrPpTbseStatsTable, mscVrPpTbclNsStorageType=mscVrPpTbclNsStorageType, mscVrPpSrclOperationalState=mscVrPpSrclOperationalState, mscVrPpTbseStpOperTable=mscVrPpTbseStpOperTable, mscVrPpTbsgBridgingMode=mscVrPpTbsgBridgingMode, mscVrBrNsLastFilter=mscVrBrNsLastFilter, mscVrPpTbpInFilterDiscards=mscVrPpTbpInFilterDiscards, mscVrPpSrBpStatsTable=mscVrPpSrBpStatsTable, mscVrPpSrsgRowStatusTable=mscVrPpSrsgRowStatusTable, mscVrPpSrclInternalLanId=mscVrPpSrclInternalLanId, mscVrPpTbpOutFrames=mscVrPpTbpOutFrames, mscVrPpTbsgPathCostOper=mscVrPpTbsgPathCostOper, mscVrPpTbclOperTable=mscVrPpTbclOperTable, mscVrPpTbpDesignatedCost=mscVrPpTbpDesignatedCost, mscVrPpSrseStpTypeOper=mscVrPpSrseStpTypeOper, mscVrPpTbseBridgingMode=mscVrPpTbseBridgingMode, mscVrPpTbseStorageType=mscVrPpTbseStorageType, mscVrBrTbSteProvTable=mscVrBrTbSteProvTable, mscVrBrSrbStpProtocolSpec=mscVrBrSrbStpProtocolSpec, mscVrPpSrtBpExploreFrameTreatment=mscVrPpSrtBpExploreFrameTreatment, mscVrPpTbsePortPriority=mscVrPpTbsePortPriority, mscVrBrSrbStpProvTable=mscVrBrSrbStpProvTable, mscVrPpTbclRowStatus=mscVrPpTbclRowStatus, mscVrPpSrclDesignatedCost=mscVrPpSrclDesignatedCost, mscVrPpTbseInFilterDiscards=mscVrPpTbseInFilterDiscards, mscVrPpSrtBpRowStatusEntry=mscVrPpSrtBpRowStatusEntry, mscVrPpTbsePortName=mscVrPpTbsePortName, mscVrPpSrseDupSegmentDiscards=mscVrPpSrseDupSegmentDiscards, mscVrPpTbpPreserveDomain=mscVrPpTbpPreserveDomain, mscVrPpSrsgAdminStatus=mscVrPpSrsgAdminStatus, mscVrPpSrBpOperEntry=mscVrPpSrBpOperEntry, mscCBSnmpOperStatus=mscCBSnmpOperStatus)
| 162.710615 | 12,802 | 0.766016 |
79559e50125c505e05043c46f3ec6ecc79536803 | 11,191 | py | Python | main.py | Mythrillo/discord-bot | 2663c83bc7466ef9d5a1be1916879a942b74dca7 | [
"MIT"
] | null | null | null | main.py | Mythrillo/discord-bot | 2663c83bc7466ef9d5a1be1916879a942b74dca7 | [
"MIT"
] | null | null | null | main.py | Mythrillo/discord-bot | 2663c83bc7466ef9d5a1be1916879a942b74dca7 | [
"MIT"
] | null | null | null | import os
import discord
import io
import aiohttp
from dotenv import load_dotenv
import random
from discord.ext import commands
import bs4 as bs
from urllib.request import urlopen
load_dotenv()
TOKEN = os.getenv("DISCORD_TOKEN")
intents = discord.Intents().all()
bot = commands.Bot(command_prefix="!", intents=intents)
@bot.command(name="olek")
async def on_message(ctx):
await ctx.send("to cwel", delete_after=5)
@bot.command(name="besty")
async def on_message(ctx):
query = urlopen("https://besty.pl/losuj")
soup = bs.BeautifulSoup(query, "html.parser")
posts = soup.find_all("img", {"class": "img-responsive"}, limit=2)
image = posts[1].get("src")
file_type = image.split(".")[-1]
async with aiohttp.ClientSession() as session:
async with session.get(image) as resp:
if resp.status != 200:
await ctx.send("Nie da się ściagnąć ;(")
return
data = io.BytesIO(await resp.read())
await ctx.send(file=discord.File(data, "Super-obrazek." + file_type))
@bot.command(name="role")
async def on_message(ctx):
roles = ["Top", "Mid", "Jungle", "Support", "Marksman"]
if ctx.author.voice and ctx.author.voice.channel:
channel = ctx.guild.get_channel(ctx.author.voice.channel.id)
channel_members = channel.members
for i in range(len(channel_members)):
if channel_members[i].bot:
channel_members.pop(i)
if len(channel_members) != 5:
await ctx.send("Nie ma pięciu osób na kanale.")
return
for i in range(len(channel_members)):
r = random.randint(0, len(roles))
await ctx.send(channel_members[i].mention + ": " + roles[r] + "\n")
roles.pop(r)
else:
await ctx.send("Debilu nie jesteś w kanale dźwiękowym. Weź sie ogarnij.")
@bot.command(name="rule34")
async def on_message(ctx, *args):
# Ściąga losowy obrazek z gelbooru.com lub losowe zdjęcie z zadanego tagu
if not args:
r = random.randint(0, 999501)
query = urlopen("https://rule34.xxx/index.php?page=dapi&s=post&q=index&id=" + str(r))
soup = bs.BeautifulSoup(query, "html.parser")
post = soup.find("post")
image = post.get("file_url")
else:
if len(args) > 1:
tag = "_".join(args).lower()
tag = tag.replace("&", "%20")
else:
tag = args[0]
query = urlopen("https://rule34.xxx/index.php?page=dapi&s=post&q=index&limit=1&tags=" + tag)
soup = bs.BeautifulSoup(query, "html.parser")
count = int(soup.find("posts").get("count"))
if count == 0:
# Próba znalezenia podobnego tagu
await ctx.send("Szukam podobnego tagu.\n")
tag += "~"
query = urlopen("https://rule34.xxx/index.php?page=dapi&s=post&q=index&limit=1&tags=" + tag)
soup = bs.BeautifulSoup(query, "html.parser")
count = int(soup.find("posts").get("count"))
if count == 0:
await ctx.send("Nie ma obrazka z tym tagiem.")
return
# Na rule34.xxx nie trzeba ograniczać głębokości wyszukiwania
pid = random.randint(0, int(count / 100))
query = urlopen("https://rule34.xxx/index.php?page=dapi&s=post&q=index&pid=" + str(pid)
+ "&limit=100&tags=" + tag)
soup = bs.BeautifulSoup(query, "html.parser")
posts = soup.find_all("post")
r = random.randint(0, len(posts)-1)
post = posts[r]
image = post.get("file_url")
file_type = image.split(".")[-1]
async with aiohttp.ClientSession() as session:
async with session.get(image) as resp:
if resp.status != 200:
await ctx.send("Nie da się ściagnąć ;(")
return
data = io.BytesIO(await resp.read())
await ctx.send(file=discord.File(data, "Super-obrazek." + file_type))
@bot.command(name="anime")
async def on_message(ctx, *args):
# Ściąga losowy obrazek z gelbooru.com lub losowe zdjęcie z zadanego tagu
if not args:
query = urlopen("https://gelbooru.com/index.php?page=post&s=random")
soup = bs.BeautifulSoup(query, "html.parser")
image = soup.find(id="image").get("src")
else:
if len(args) > 1:
tag = "_".join(args).lower()
tag = tag.replace("&", "%20")
else:
tag = args[0]
query = urlopen("https://gelbooru.com/index.php?page=dapi&s=post&limit=1&q=index&tags=" + tag + "%20-loli-underage")
soup = bs.BeautifulSoup(query, "html.parser")
count = int(soup.find("posts").get("count"))
if count == 0:
# Próba znalezenia podobnego tagu
await ctx.send("Szukam podobnego tagu.\n")
tag += "~"
query = urlopen("https://gelbooru.com/index.php?page=dapi&s=post&limit=1&q=index&tags=" + tag + "%20-loli-underage")
soup = bs.BeautifulSoup(query, "html.parser")
count = int(soup.find("posts").get("count"))
if count == 0:
await ctx.send("Nie ma obrazka z tym tagiem.")
return
if int(count / 100) > 200:
pid = random.randint(0, 200)
else:
pid = random.randint(0, int(count / 100))
query = urlopen("https://gelbooru.com/index.php?page=dapi&s=post&q=index&limit=100&pid=" + str(pid)
+ "&tags=" + tag + "%20-loli-underage")
soup = bs.BeautifulSoup(query, "html.parser")
posts = soup.find_all("post")
r = random.randint(0, len(posts)-1)
post = posts[r]
image = post.get("file_url")
file_type = image.split(".")[-1]
async with aiohttp.ClientSession() as session:
async with session.get(image) as resp:
if resp.status != 200:
await ctx.send("Nie da się ściagnąć ;(")
return
data = io.BytesIO(await resp.read())
await ctx.send(file=discord.File(data, "Super-obrazek." + file_type))
@bot.command(name="safeAnime")
async def on_message(ctx, *args):
# Ściąga losowy obrazek z safebooru.org lub losowe zdjęcie z zadanego tagu
if not args:
r = random.randint(0, 2000000)
query = urlopen("https://safebooru.org/index.php?page=dapi&s=post&q=index&id=" + str(r))
soup = bs.BeautifulSoup(query, "html.parser")
post = soup.find("post")
image = post.get("file_url")
else:
if len(args) > 1:
tag = "_".join(args).lower()
tag = tag.replace("&", "%20")
else:
tag = args[0]
query = urlopen("https://safebooru.org/index.php?page=dapi&s=post&q=index&limit=1&tags=" + tag)
soup = bs.BeautifulSoup(query, "html.parser")
count = int(soup.find("posts").get("count"))
if count == 0:
# Próba znalezenia podobnego
await ctx.send("Szukam podobnego tagu.\n")
tag += "~"
query = urlopen("https://safebooru.org/index.php?page=dapi&s=post&q=index&limit=1&tags=" + tag)
soup = bs.BeautifulSoup(query, "html.parser")
count = int(soup.find("posts").get("count"))
if count == 0:
await ctx.send("Nie ma obrazka z tym tagiem.")
return
# Ustawiamy max = 200 bo nie pozwala na więcej
if int(count / 100) > 200:
pid = random.randint(0, 200)
else:
pid = random.randint(0, int(count / 100))
query = urlopen("https://safebooru.org/index.php?page=dapi&s=post&q=index&limit=100&pid=" + str(pid)
+ "&tags=" + tag)
soup = bs.BeautifulSoup(query, "html.parser")
posts = soup.find_all("post")
r = random.randint(0, len(posts)-1)
post = posts[r]
image = post.get("file_url")
file_type = image.split(".")[-1]
async with aiohttp.ClientSession() as session:
async with session.get(image) as resp:
if resp.status != 200:
await ctx.send("Nie da się ściagnąć ;(")
return
data = io.BytesIO(await resp.read())
await ctx.send(file=discord.File(data, "Super-obrazek." + file_type))
@bot.command(name="generuj")
async def on_message(ctx, number_of_teams=2, game=None):
number_of_teams = int(number_of_teams)
if number_of_teams < 2:
await ctx.send("Debilu daj więcej drużyn")
elif ctx.author.voice and ctx.author.voice.channel:
channel = ctx.guild.get_channel(ctx.author.voice.channel.id)
channel_members = channel.members
bots_numbers = []
for i in range(len(channel_members)):
if channel_members[i].bot:
bots_numbers.append(i)
for number in bots_numbers:
channel_members.pop(number)
number_of_members = len(channel_members)
if number_of_teams > number_of_members:
await ctx.send("Debilu za dużo drużyn a za mało zawodników")
return
teams = []
if number_of_members % number_of_teams == 0:
team_size = int(number_of_members / number_of_teams)
for i in range(number_of_teams):
team = random.sample(channel_members, team_size)
teams.append(team)
channel_members = list(set(channel_members) - set(team))
else:
r = number_of_members % int(number_of_teams)
team_size = int((number_of_members - r) / number_of_teams)
for i in range(number_of_teams):
team = random.sample(channel_members, team_size)
channel_members = list(set(channel_members) - set(team))
if r != 0:
tmp = random.choice(channel_members)
team.append(tmp)
channel_members.remove(tmp)
r -= 1
teams.append(team)
k = 0
for team in teams:
if game is None:
await ctx.send(
"Drużyna: " + str(k + 1) + "\n" + "\n".join([i.mention for i in team])
)
k += 1
elif game == "cs" and number_of_teams == 2:
if k == 0:
k += 1
await ctx.send(
"Terroryści: " + "\n" + "\n".join([i.mention for i in team])
)
else:
await ctx.send(
"Antyterroryści: " + "\n" + "\n".join([i.mention for i in team])
)
elif game == "lol" and number_of_teams == 2:
if k == 0:
k += 1
await ctx.send(
"Blue side: " + "\n" + "\n".join([i.mention for i in team])
)
else:
await ctx.send(
"Red side: " + "\n" + "\n".join([i.mention for i in team])
)
else:
await ctx.send("Debilu nie jesteś w kanale dźwiękowym. Weź sie ogarnij.")
bot.run(TOKEN)
| 40.400722 | 128 | 0.552587 |
79559eafbaf05bc04d0059822f790e2e0f6175ca | 880 | py | Python | LoginPanelTkinter.py | Btt1996/LoginTkinter | 5a09fc698536e01818befe2b4a7ba90c6a5a87c7 | [
"Apache-2.0"
] | null | null | null | LoginPanelTkinter.py | Btt1996/LoginTkinter | 5a09fc698536e01818befe2b4a7ba90c6a5a87c7 | [
"Apache-2.0"
] | null | null | null | LoginPanelTkinter.py | Btt1996/LoginTkinter | 5a09fc698536e01818befe2b4a7ba90c6a5a87c7 | [
"Apache-2.0"
] | null | null | null | from tkinter import *
from functools import partial
def validateLogin(username, password):
print("username entered :", username.get())
print("password entered :", password.get())
return
#window
tkWindow = Tk()
tkWindow.geometry('400x150')
tkWindow.title('Login’)
#username label and text entry box
usernameLabel = Label(tkWindow, text="User Name").grid(row=0, column=0)
username = StringVar()
usernameEntry = Entry(tkWindow, textvariable=username).grid(row=0, column=1)
#password label and password entry box
passwordLabel = Label(tkWindow,text="Password").grid(row=1, column=0)
password = StringVar()
passwordEntry = Entry(tkWindow, textvariable=password, show='*').grid(row=1, column=1)
validateLogin = partial(validateLogin, username, password)
loginButton = Button(tkWindow, text="Login", command=validateLogin).grid(row=4, column=0)
tkWindow.mainloop()
| 30.344828 | 91 | 0.75 |
79559ebb0f473d00526b66e722668ab527d3da17 | 3,598 | py | Python | tests/python/gaia-ui-tests/gaiatest/tests/functional/clock/test_clock_create_new_alarm.py | DouglasSherk/gaia | ca471df16080c97d0b7c0b830fdbfab3ab065e56 | [
"Apache-2.0"
] | null | null | null | tests/python/gaia-ui-tests/gaiatest/tests/functional/clock/test_clock_create_new_alarm.py | DouglasSherk/gaia | ca471df16080c97d0b7c0b830fdbfab3ab065e56 | [
"Apache-2.0"
] | null | null | null | tests/python/gaia-ui-tests/gaiatest/tests/functional/clock/test_clock_create_new_alarm.py | DouglasSherk/gaia | ca471df16080c97d0b7c0b830fdbfab3ab065e56 | [
"Apache-2.0"
] | null | null | null | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from gaiatest import GaiaTestCase
from gaiatest.apps.clock.app import Clock
from gaiatest.apps.clock.regions.alarm_alert import AlarmAlertScreen
from gaiatest.apps.homescreen.app import Homescreen
class TestClockCreateNewAlarm(GaiaTestCase):
def setUp(self):
GaiaTestCase.setUp(self)
self.clock = Clock(self.marionette)
self.clock.launch()
def test_clock_create_new_alarm(self):
""" Add an alarm and set label of the new alarm
https://moztrap.mozilla.org/manage/case/1772/
https://moztrap.mozilla.org/manage/case/1775/
"""
# Set the time on the device
_seconds_since_epoch = self.marionette.execute_script("""
var today = new Date();
var yr = today.getFullYear();
var mth = today.getMonth();
var day = today.getDate();
return new Date(yr, mth, day, 1, 0, 0).getTime();""")
alarm_label_text = "test4321"
# get the number of alarms set, before adding the new alarm
initial_alarms_count = len(self.clock.alarms)
self.data_layer.set_time(_seconds_since_epoch)
# create a new alarm with the default values that are available
new_alarm = self.clock.tap_new_alarm()
# Ensure label has the default placeholder and text
self.assertEquals(new_alarm.alarm_label_placeholder, 'Alarm name')
# set label
new_alarm.type_alarm_label(alarm_label_text)
self.clock = new_alarm.tap_done()
# verify the banner-countdown message appears
alarm_msg = self.clock.banner_notification
self.assertTrue('The alarm is set for' in alarm_msg, 'Actual banner message was: "' + alarm_msg + '"')
self.clock.dismiss_banner()
# ensure the new alarm has been added and it is displayed
self.assertTrue(initial_alarms_count < len(self.clock.alarms),
'Alarms count did not increment')
# verify the label of alarm
alarms = self.clock.alarms
self.assertEqual(len(alarms), 1)
self.assertEqual(alarms[0].label, alarm_label_text)
alarm_time = self.clock.alarms[0].time()
# tap to Edit alarm
edit_alarm = alarms[0].tap()
# Set the alarm time to 1 min more than the current time
time_picker = edit_alarm.tap_time()
time_picker.add_minute()
time_picker.tap_done()
edit_alarm.tap_done()
self.clock.dismiss_banner()
self.assertNotEqual(self.clock.alarms[0].time, alarm_time)
self.clock.alarms[0].disable()
self.assertFalse(self.clock.alarms[0].is_alarm_active, 'user should be able to turn on the alarm.')
self.clock.alarms[0].enable()
self.clock.dismiss_banner()
self.assertTrue(self.clock.alarms[0].is_alarm_active, 'user should be able to turn off the alarm.')
self.device.touch_home_button()
self.marionette.switch_to_frame()
self.alarm_alert = AlarmAlertScreen(self.marionette)
self.alarm_alert.wait_for_alarm_to_trigger()
# Check that the alarm name is the one we set
self.assertEqual(self.alarm_alert.alarm_label, alarm_label_text)
self.alarm_alert.tap_stop_alarm()
# Switch back to top level now that Clock app is gone
Homescreen(self.marionette).wait_to_be_displayed()
| 36.343434 | 110 | 0.66398 |
79559f203367ac7c1cc09a437ebf20c19f7ce22c | 6,491 | py | Python | qnarre/prep/convert/roberta.py | quantapix/qnarre.com | f51d5945c20ef8182c4aa11f1b407d064c190c70 | [
"MIT"
] | null | null | null | qnarre/prep/convert/roberta.py | quantapix/qnarre.com | f51d5945c20ef8182c4aa11f1b407d064c190c70 | [
"MIT"
] | null | null | null | qnarre/prep/convert/roberta.py | quantapix/qnarre.com | f51d5945c20ef8182c4aa11f1b407d064c190c70 | [
"MIT"
] | null | null | null | # Copyright 2022 Quantapix Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import pathlib
import torch
from argparse import ArgumentParser
from fairseq.models.roberta import RobertaModel as FairseqRobertaModel
from fairseq.modules import TransformerSentenceEncLayer
from transformers.utils import logging
from ..config.bert import PreTrained
from ...models.bert import ForMasked, ForSeqClassifier
logging.set_verbosity_info()
log = logging.get_logger(__name__)
SAMPLE_TEXT = "Hello world! cécé herlolip"
def to_pytorch(src_path, save_path, classification_head):
roberta = FairseqRobertaModel.from_pretrained(src_path)
roberta.eval() # disable drop
roberta_sent_encoder = roberta.model.encoder.sentence_encoder
cfg = PreTrained(
s_vocab=roberta_sent_encoder.embed_tokens.num_embeddings,
d_hidden=roberta.args.encoder_embed_dim,
n_lays=roberta.args.n_enc_lays,
n_heads=roberta.args.n_enc_heads,
d_ffnet=roberta.args.encoder_ffn_embed_dim,
n_pos=514,
n_typ=1,
norm_eps=1e-5,
)
if classification_head:
cfg.n_labels = roberta.model.classification_heads["mnli"].out_proj.weight.shape[0]
print("Our BERT config:", cfg)
m = ForSeqClassifier(cfg) if classification_head else ForMasked(cfg)
m.eval()
m.roberta.embeddings.tok_embed.weight = roberta_sent_encoder.embed_tokens.weight
m.roberta.embeddings.pos_embed.weight = roberta_sent_encoder.embed_positions.weight
m.roberta.embeddings.token_type_embeddings.weight.data = torch.zeros_like(
m.roberta.embeddings.token_type_embeddings.weight
)
m.roberta.embeddings.LayerNorm.weight = roberta_sent_encoder.emb_layer_norm.weight
m.roberta.embeddings.LayerNorm.bias = roberta_sent_encoder.emb_layer_norm.bias
for i in range(cfg.n_lays):
layer = m.roberta.encoder.layer[i]
roberta_layer: TransformerSentenceEncLayer = roberta_sent_encoder.layers[i]
self_attn = layer.attention.self
assert (
roberta_layer.self_attn.k_proj.weight.data.shape
== roberta_layer.self_attn.q_proj.weight.data.shape
== roberta_layer.self_attn.v_proj.weight.data.shape
== torch.Size((cfg.d_hidden, cfg.d_hidden))
)
self_attn.query.weight.data = roberta_layer.self_attn.q_proj.weight
self_attn.query.bias.data = roberta_layer.self_attn.q_proj.bias
self_attn.key.weight.data = roberta_layer.self_attn.k_proj.weight
self_attn.key.bias.data = roberta_layer.self_attn.k_proj.bias
self_attn.value.weight.data = roberta_layer.self_attn.v_proj.weight
self_attn.value.bias.data = roberta_layer.self_attn.v_proj.bias
self_output = layer.attention.output
assert self_output.dense.weight.shape == roberta_layer.self_attn.out_proj.weight.shape
self_output.dense.weight = roberta_layer.self_attn.out_proj.weight
self_output.dense.bias = roberta_layer.self_attn.out_proj.bias
self_output.LayerNorm.weight = roberta_layer.self_attn_layer_norm.weight
self_output.LayerNorm.bias = roberta_layer.self_attn_layer_norm.bias
intermediate = layer.intermediate
assert intermediate.dense.weight.shape == roberta_layer.fc1.weight.shape
intermediate.dense.weight = roberta_layer.fc1.weight
intermediate.dense.bias = roberta_layer.fc1.bias
bert_output = layer.output
assert bert_output.dense.weight.shape == roberta_layer.fc2.weight.shape
bert_output.dense.weight = roberta_layer.fc2.weight
bert_output.dense.bias = roberta_layer.fc2.bias
bert_output.LayerNorm.weight = roberta_layer.final_layer_norm.weight
bert_output.LayerNorm.bias = roberta_layer.final_layer_norm.bias
if classification_head:
m.classifier.dense.weight = roberta.model.classification_heads["mnli"].dense.weight
m.classifier.dense.bias = roberta.model.classification_heads["mnli"].dense.bias
m.classifier.out_proj.weight = roberta.model.classification_heads["mnli"].out_proj.weight
m.classifier.out_proj.bias = roberta.model.classification_heads["mnli"].out_proj.bias
else:
m.lm_head.dense.weight = roberta.model.encoder.lm_head.dense.weight
m.lm_head.dense.bias = roberta.model.encoder.lm_head.dense.bias
m.lm_head.layer_norm.weight = roberta.model.encoder.lm_head.layer_norm.weight
m.lm_head.layer_norm.bias = roberta.model.encoder.lm_head.layer_norm.bias
m.lm_head.decoder.weight = roberta.model.encoder.lm_head.weight
m.lm_head.decoder.bias = roberta.model.encoder.lm_head.bias
input_ids = roberta.encode(SAMPLE_TEXT).unsqueeze(0) # batch of size 1
our_output = m(input_ids)[0]
if classification_head:
their_output = roberta.model.classification_heads["mnli"](
roberta.extract_features(input_ids)
)
else:
their_output = roberta.model(input_ids)[0]
print(our_output.shape, their_output.shape)
max_absolute_diff = torch.max(torch.abs(our_output - their_output)).item()
print(f"max_absolute_diff = {max_absolute_diff}") # ~ 1e-7
success = torch.allclose(our_output, their_output, atol=1e-3)
print("Do both models output the same tensors?", "🔥" if success else "💩")
if not success:
raise Exception("Something went wRoNg")
pathlib.Path(save_path).mkdir(parents=True, exist_ok=True)
print(f"Saving model to {save_path}")
m.save_pretrained(save_path)
if __name__ == "__main__":
x = ArgumentParser()
x.add_argument("--roberta_checkpoint_path", default=None, type=str, required=True)
x.add_argument("--save_path", default=None, type=str, required=True)
x.add_argument("--classification_head", action="store_true")
y = x.parse_args()
to_pytorch(y.roberta_checkpoint_path, y.save_path, y.classification_head)
| 48.804511 | 97 | 0.731474 |
79559f3898e21181d66e3ed3de41db078e4c4af2 | 10,871 | py | Python | home-assistant/custom_components/climate/intesishome.py | grischard/pyIntesisHome | ad6958ccf65d49715a6a72b9d2e10c25043a0eee | [
"MIT"
] | null | null | null | home-assistant/custom_components/climate/intesishome.py | grischard/pyIntesisHome | ad6958ccf65d49715a6a72b9d2e10c25043a0eee | [
"MIT"
] | null | null | null | home-assistant/custom_components/climate/intesishome.py | grischard/pyIntesisHome | ad6958ccf65d49715a6a72b9d2e10c25043a0eee | [
"MIT"
] | null | null | null | """
Support for IntesisHome Smart AC Controllers
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/intesishome/
"""
import logging
import voluptuous as vol
from custom_components import intesishome
from homeassistant.util import Throttle
from datetime import timedelta
from homeassistant.components.climate import ( ClimateDevice,
PLATFORM_SCHEMA, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW,
ATTR_TEMPERATURE, ATTR_OPERATION_MODE)
from homeassistant.const import (TEMP_CELSIUS, CONF_SCAN_INTERVAL, STATE_UNKNOWN)
DEPENDENCIES = ['intesishome']
_LOGGER = logging.getLogger(__name__)
STATE_FAN = 'Fan'
STATE_HEAT = 'Heat'
STATE_COOL = 'Cool'
STATE_DRY = 'Dry'
STATE_AUTO = 'Auto'
STATE_QUIET = 'Quiet'
STATE_LOW = 'Low'
STATE_MEDIUM = 'Medium'
STATE_HIGH = 'High'
STATE_OFF = 'Off'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_SCAN_INTERVAL):
vol.All(vol.Coerce(int), vol.Range(min=1)),
})
# Return cached results if last scan time was less than this value.
# If a persistent connection is established for the controller, changes to values are in realtime.
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=180)
try:
from asyncio import ensure_future
except ImportError:
# Python 3.4.3 and ealier has this as async
# pylint: disable=unused-import
from asyncio import async
ensure_future = async
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Nest thermostat."""
add_devices([IntesisAC(deviceid, device)
for deviceid, device in intesishome.get_devices().items()])
class IntesisAC(ClimateDevice):
def __init__(self, deviceid, device):
"""Initialize the thermostat"""
_LOGGER.info('Added climate device with state: %s',repr(device))
self._deviceid = deviceid
self._devicename = device['name']
self._max_temp = None
self._min_temp = None
self._target_temp = None
self._current_temp = None
self._run_hours = None
self._rssi = None
self._swing = None
self._has_swing_control = False
self._power = STATE_UNKNOWN
self._fan_speed = STATE_UNKNOWN
self._current_operation = STATE_UNKNOWN
self._operation_list = [STATE_AUTO, STATE_COOL, STATE_HEAT, STATE_DRY, STATE_FAN, STATE_OFF]
self._fan_list = [STATE_AUTO, STATE_QUIET, STATE_LOW, STATE_MEDIUM, STATE_HIGH]
self._swing_list = ["Auto/Stop","Swing","Middle"]
# Best guess as which widget represents vertical swing control
if 42 in device.get('widgets'):
self._has_swing_control = True
intesishome.controller.add_update_callback(self.update_callback)
self.update()
@property
def name(self):
"""Return the name of the AC device"""
return self._devicename
@property
def temperature_unit(self):
"""IntesisHome API uses Celsius on the backend - let Home Assistant convert"""
return TEMP_CELSIUS
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
if intesishome.controller.is_connected:
update_type = 'Push'
else:
update_type = 'Poll'
return {
"run_hours": self._run_hours,
"rssi": self._rssi,
"temperature": self._target_temp,
"ha_update_type": update_type,
}
def set_temperature(self, **kwargs):
"""Set new target temperature."""
_LOGGER.debug("IntesisHome Set Temperature=%s")
temperature = kwargs.get(ATTR_TEMPERATURE)
operation_mode = kwargs.get(ATTR_OPERATION_MODE)
if operation_mode:
self._target_temp = temperature
self.set_operation_mode(operation_mode)
else:
if temperature:
intesishome.controller.set_temperature(self._deviceid, temperature)
def set_operation_mode(self, operation_mode):
"""Set operation mode."""
_LOGGER.debug("IntesisHome Set Mode=%s", operation_mode)
if operation_mode == STATE_OFF:
intesishome.controller.set_power_off(self._deviceid)
else:
if intesishome.controller.get_power_state(self._deviceid) == 'off':
intesishome.controller.set_power_on(self._deviceid)
if operation_mode == STATE_HEAT:
intesishome.controller.set_mode_heat(self._deviceid)
elif operation_mode == STATE_COOL:
intesishome.controller.set_mode_cool(self._deviceid)
elif operation_mode == STATE_AUTO:
intesishome.controller.set_mode_auto(self._deviceid)
elif operation_mode == STATE_FAN:
intesishome.controller.set_mode_fan(self._deviceid)
self._target_temp = None
elif operation_mode == STATE_DRY:
intesishome.controller.set_mode_dry(self._deviceid)
if self._target_temp:
intesishome.controller.set_temperature(self._deviceid, self._target_temp)
def set_fan_mode(self, fan):
"""Set fan mode (from quiet, low, medium, high, auto)"""
intesishome.controller.set_fan_speed(self._deviceid, fan.lower())
def set_swing_mode(self, swing):
"""Set the vertical vane."""
if swing == "Auto/Stop":
intesishome.controller.set_vertical_vane(self._deviceid, 'auto/stop')
intesishome.controller.set_horizontal_vane(self._deviceid, 'auto/stop')
elif swing == "Swing":
intesishome.controller.set_vertical_vane(self._deviceid, 'swing')
intesishome.controller.set_horizontal_vane(self._deviceid, 'swing')
elif swing == "Middle":
intesishome.controller.set_vertical_vane(self._deviceid, 'manual3')
intesishome.controller.set_horizontal_vane(self._deviceid, 'swing')
def update(self):
if intesishome.controller.is_disconnected:
self._poll_status(False)
self._current_temp = intesishome.controller.get_temperature(self._deviceid)
self._min_temp = intesishome.controller.get_min_setpoint(self._deviceid)
self._max_temp = intesishome.controller.get_max_setpoint(self._deviceid)
self._rssi = intesishome.controller.get_rssi(self._deviceid)
self._run_hours = intesishome.controller.get_run_hours(self._deviceid)
# Operation mode
mode = intesishome.controller.get_mode(self._deviceid)
if intesishome.controller.get_power_state(self._deviceid) == 'off':
self._current_operation = STATE_OFF
self._fan_speed = None
self._swing = None
elif mode == 'auto':
self._current_operation = STATE_AUTO
elif mode == 'fan':
self._current_operation = STATE_FAN
elif mode == 'heat':
self._current_operation = STATE_HEAT
elif mode == 'dry':
self._current_operation = STATE_DRY
elif mode == 'cool':
self._current_operation = STATE_COOL
else:
self._current_operation = STATE_UNKNOWN
# Target temperature
if self._current_operation in [STATE_OFF,STATE_FAN]:
self._target_temp = None
else:
self._target_temp = intesishome.controller.get_setpoint(self._deviceid)
# Fan speed
fan_speed = intesishome.controller.get_fan_speed(self._deviceid)
if fan_speed:
# Capitalize fan speed from pyintesishome
self._fan_speed = fan_speed[:1].upper() + fan_speed[1:]
# Swing mode
# Climate module only supports one swing setting, so use vertical swing
swing = intesishome.controller.get_vertical_swing(self._deviceid)
if not self._has_swing_control:
# Device doesn't support swing
self._swing = None
elif swing == 'auto/stop':
self._swing = "Auto/Stop"
elif swing == 'swing':
self._swing = "Swing"
elif swing == 'manual3':
self._swing = "Middle"
else:
self._swing = STATE_UNKNOWN
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def _poll_status(self, shouldCallback):
_LOGGER.info("Polling IntesisHome Status via HTTP")
intesishome.controller.poll_status(shouldCallback)
@property
def icon(self):
icon = None
if self._current_operation == STATE_HEAT:
icon = 'mdi:white-balance-sunny'
elif self._current_operation == STATE_FAN:
icon = 'mdi:fan'
elif self._current_operation == STATE_DRY:
icon = 'mdi:water-off'
elif self._current_operation == STATE_COOL:
icon = 'mdi:nest-thermostat'
elif self._current_operation == STATE_AUTO:
icon = 'mdi:cached'
return icon
def update_callback(self):
"""Called when data is received by pyIntesishome"""
_LOGGER.info("IntesisHome sent a status update.")
self.hass.async_add_job(self.update_ha_state,True)
@property
def min_temp(self):
"""Return the minimum temperature from the IntesisHome interface"""
return self._min_temp
@property
def max_temp(self):
"""Return the maximum temperature from the IntesisHome interface"""
return self._max_temp
@property
def should_poll(self):
"""Poll for updates if pyIntesisHome doesn't have a socket open"""
if intesishome.controller.is_connected:
return False
else:
return True
@property
def operation_list(self):
"""List of available operation modes."""
return self._operation_list
@property
def current_fan_mode(self):
"""Return whether the fan is on."""
return self._fan_speed
@property
def current_swing_mode(self):
"""Return current swing mode."""
return self._swing
@property
def fan_list(self):
"""List of available fan modes."""
return self._fan_list
@property
def swing_list(self):
"""List of available swing positions."""
return self._swing_list
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temp
@property
def current_operation(self):
return self._current_operation
@property
def target_temperature(self):
return self._target_temp
@property
def target_temperature_low(self):
return None
@property
def target_temperature_high(self):
return None
@property
def is_away_mode_on(self):
"""Return if away mode is on."""
return None | 33.866044 | 100 | 0.65339 |
79559fa0947ad295dad6a922c84c6ab514769216 | 403 | py | Python | poke_dual_emu/wsgi.py | jacob975/poke_dual_emu | a172bf7763144b28b87ce5a493d4ccbff038fbe7 | [
"MIT"
] | null | null | null | poke_dual_emu/wsgi.py | jacob975/poke_dual_emu | a172bf7763144b28b87ce5a493d4ccbff038fbe7 | [
"MIT"
] | null | null | null | poke_dual_emu/wsgi.py | jacob975/poke_dual_emu | a172bf7763144b28b87ce5a493d4ccbff038fbe7 | [
"MIT"
] | null | null | null | """
WSGI config for poke_dual_emu project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'poke_dual_emu.settings')
application = get_wsgi_application()
| 23.705882 | 78 | 0.791563 |
7955a0948378640590504acd9636e6003d586daa | 1,657 | py | Python | car5.py | maahi07m/Hackerrank | 6aa984690b34ccc9cd8119d42aa75c0b049afbbc | [
"MIT"
] | null | null | null | car5.py | maahi07m/Hackerrank | 6aa984690b34ccc9cd8119d42aa75c0b049afbbc | [
"MIT"
] | null | null | null | car5.py | maahi07m/Hackerrank | 6aa984690b34ccc9cd8119d42aa75c0b049afbbc | [
"MIT"
] | 3 | 2020-04-04T11:13:20.000Z | 2020-05-18T15:58:54.000Z | """
>>> car = Car(color="Red", max_speed=250, acceleration=10, tyre_friction=3)
>>> car.start_engine()
>>> car.accelerate()
>>> car.current_speed
10
>>> car.apply_brakes()
>>> car.current_speed
7
>>> car.apply_brakes()
>>> car.current_speed
4
>>> car.apply_brakes()
>>> car.current_speed
1
>>> car.apply_brakes()
>>> car.current_speed
0
"""
class Car:
# TODO: write your code here
def __init__(self,color,max_speed,acceleration,tyre_friction):
self.color = color
self.max_speed = max_speed
self.current_speed = acceleration
self.tyre_friction = tyre_friction
self.engine = 0
def accelerate(self):
if self.engine == 0:
pass
else:
self.current_speed = acceleration
def start_engine(self):
self.engine = 1
def apply_brakes(self):
self.current_speed -= tyre_friction
if self.current_speed < 0:
self.current_speed = 0
else:
self.current_speed
if __name__ == "__main__":
import json
detail = json.loads(input())
color = detail["color"]
max_speed = float(detail["max_speed"])
acceleration = float(detail["acceleration"])
tyre_friction = float(detail["tyre_friction"])
car = Car(color=color, max_speed=max_speed, acceleration=acceleration,
tyre_friction=tyre_friction)
car.start_engine()
car.accelerate()
print(car.current_speed)
car.apply_brakes()
print(car.current_speed)
car.apply_brakes()
print(car.current_speed)
car.apply_brakes()
print(car.current_speed)
car.apply_brakes()
print(car.current_speed)
| 23.671429 | 75 | 0.642124 |
7955a1224ada62d4ff1b8466f376e248ad53633f | 788 | py | Python | caelum-py-14-cap4/forca.py | herculeshssj/python | c1a0134c405cd7e68fd6d9103894c83d002daa7e | [
"MIT"
] | null | null | null | caelum-py-14-cap4/forca.py | herculeshssj/python | c1a0134c405cd7e68fd6d9103894c83d002daa7e | [
"MIT"
] | null | null | null | caelum-py-14-cap4/forca.py | herculeshssj/python | c1a0134c405cd7e68fd6d9103894c83d002daa7e | [
"MIT"
] | null | null | null | print('*********************************')
print('***Bem vindo ao jogo da Forca!***')
print('*********************************')
palavra_secreta = 'banana'
letras_acertadas = ['_', '_', '_', '_', '_', '_']
acertou = False
enforcou = False
erros = 0
print(letras_acertadas)
while (not acertou and not enforcou):
chute = input('Qual letra? ')
if (chute in palavra_secreta):
posicao = 0
for letra in palavra_secreta:
if (chute.upper() == letra.upper()):
letras_acertadas[posicao] = letra
posicao += 1
else:
erros += 1
acertou = '_' not in letras_acertadas
enforcou = erros == 6
print(letras_acertadas)
if(acertou):
print('Você ganhou!!')
else:
print('Você perdeu!!')
print('Fim do jogo') | 22.514286 | 49 | 0.53934 |
7955a1c28681bff4c9f154e05d02d16a973ceb94 | 5,133 | py | Python | search/storage/storage.py | JinHai-CN/phantoscope | 1148a30bd379691220e46520248f76615f1d86d3 | [
"Apache-2.0"
] | null | null | null | search/storage/storage.py | JinHai-CN/phantoscope | 1148a30bd379691220e46520248f76615f1d86d3 | [
"Apache-2.0"
] | null | null | null | search/storage/storage.py | JinHai-CN/phantoscope | 1148a30bd379691220e46520248f76615f1d86d3 | [
"Apache-2.0"
] | null | null | null | import os
import json
import logging
from milvus import Milvus, MetricType
from minio import Minio
from common.config import MILVUS_ADDR, MILVUS_PORT
from common.error import MilvusError, S3Error
from common.const import MINIO_BUCKET_PUBLIC_POLICY
from common.config import MINIO_ADDR, MINIO_ACCESS_KEY, MINIO_SECRET_KEY
logger = logging.getLogger(__name__)
class Storage:
def __init__(self):
pass
# code blow this comment need to be rewrite
type_mapping = {
"l2": MetricType.L2
}
class MilvusIns:
@staticmethod
def new_milvus_collection(name, dimension, index_file_size, metric_type):
metric_type = type_mapping.get(metric_type, MetricType.L2)
milvus = Milvus()
try:
milvus.connect(MILVUS_ADDR, MILVUS_PORT)
parma = {
"collection_name": name,
"dimension": dimension,
"index_file_size": index_file_size,
"metric_type": metric_type
}
res = milvus.create_collection(parma)
if not res.OK():
raise MilvusError("There has some error when create milvus collection", res)
except Exception as e:
raise MilvusError("There has some error when create milvus collection", e)
@staticmethod
def del_milvus_collection(name):
milvus = Milvus()
try:
milvus.connect(MILVUS_ADDR, MILVUS_PORT)
res = milvus.drop_collection(collection_name=name)
if not res.OK():
raise MilvusError("There has some error when drop milvus collection", res)
except Exception as e:
raise MilvusError("There has some error when delete milvus collection", e)
@staticmethod
def insert_vectors(name, vectors):
milvus = Milvus()
try:
milvus.connect(MILVUS_ADDR, MILVUS_PORT)
res, ids = milvus.insert(collection_name=name, records=vectors)
if not res.OK():
raise MilvusError("There has some error when insert vectors", res)
return ids
except Exception as e:
logger.error("There has some error when insert vectors", exc_info=True)
raise MilvusError("There has some error when insert vectors", e)
@staticmethod
def search_vectors(name, vector, topk, nprobe):
milvus = Milvus()
search_param = {'nprobe': nprobe}
try:
milvus.connect(MILVUS_ADDR, MILVUS_PORT)
res, ids = milvus.search(collection_name=name, query_records=vector, top_k=topk, params=search_param)
if not res.OK():
raise MilvusError("There has some error when insert vectors", res)
return ids
except Exception as e:
raise MilvusError("There has some error when insert vectors", e)
@staticmethod
def del_vectors(collection_name, ids):
milvus = Milvus()
try:
milvus.connect(MILVUS_ADDR, MILVUS_PORT)
milvus.delete_by_id(collection_name=collection_name, id_array=ids)
except Exception as e:
raise MilvusError("There has some error when delete vectors", e)
class S3Ins:
@classmethod
def new_minio_client(cls):
return Minio(
MINIO_ADDR,
access_key=MINIO_ACCESS_KEY,
secret_key=MINIO_SECRET_KEY,
secure=False
)
@classmethod
def new_s3_buckets(cls, names, region=None):
try:
minioClient = cls.new_minio_client()
for x in names:
minioClient.make_bucket(x)
minioClient.set_bucket_policy(x, json.dumps(gen_public_policy(x)))
except Exception as e:
logger.error("There has some error when create s3 buckets", exc_info=True)
raise S3Error("There has some error when create s3 buckets", e)
@classmethod
def del_s3_buckets(cls, names):
try:
minioClient = cls.new_minio_client()
for x in names:
minioClient.remove_bucket(x)
except Exception as e:
raise S3Error("There has some error when delete s3 buckets", e)
@classmethod
def upload2bucket(cls, bucket_name, file_path, file_name):
try:
minioClient = cls.new_minio_client()
with open(file_path, 'rb') as f:
file_stat = os.stat(file_path)
minioClient.put_object(bucket_name, file_name, f, file_stat.st_size)
except Exception as e:
raise S3Error("There has some error when put file to s3 bucket", e)
@classmethod
def del_object(cls, bucket_name, object_name):
try:
minioClient = cls.new_minio_client()
minioClient.remove_object(bucket_name, object_name)
except Exception as e:
raise S3Error("There has some error when delete object", e)
def gen_public_policy(name):
prefix = "arn:aws:s3:::{}{}"
policy = MINIO_BUCKET_PUBLIC_POLICY.copy()
policy["Statement"][0]["Resource"] = [prefix.format(name, ""), prefix.format(name, "/*")]
return policy
| 35.645833 | 113 | 0.631794 |
7955a21717825a0432fb75fbc15b773a035449c1 | 95 | py | Python | mlserve/__init__.py | jettify/mlserve | 571152e4475738e0b01fcbde166d95a3636b3c5f | [
"Apache-2.0"
] | 17 | 2018-08-06T09:38:17.000Z | 2018-08-14T10:55:58.000Z | mlserve/__init__.py | ml-libs/mlserve | 571152e4475738e0b01fcbde166d95a3636b3c5f | [
"Apache-2.0"
] | 63 | 2018-09-07T21:40:16.000Z | 2022-02-10T17:11:13.000Z | mlserve/__init__.py | jettify/mlserve | 571152e4475738e0b01fcbde166d95a3636b3c5f | [
"Apache-2.0"
] | 1 | 2019-05-06T10:18:59.000Z | 2019-05-06T10:18:59.000Z | from .schema_builder import build_schema
__all__ = ('build_schema',)
__version__ = '0.0.1a6'
| 15.833333 | 40 | 0.747368 |
7955a3c681bd43d12a26a2746a4d4f81640aebf9 | 8,809 | py | Python | III godina/Zimski semestar/UVI/Kolokvijumi/Inteligentni sistemi 1/I_Kolokvijum/2018-2019/Popravni/Resenja_studenata/CLIPS/Mihajlo_Jankovic_44-19/Zadatak_CLIPS_Experta.py | TodorovicSrdjan/PMF | d3deee21697dad300177fca6090c56d3b8d0cb76 | [
"MIT"
] | null | null | null | III godina/Zimski semestar/UVI/Kolokvijumi/Inteligentni sistemi 1/I_Kolokvijum/2018-2019/Popravni/Resenja_studenata/CLIPS/Mihajlo_Jankovic_44-19/Zadatak_CLIPS_Experta.py | TodorovicSrdjan/PMF | d3deee21697dad300177fca6090c56d3b8d0cb76 | [
"MIT"
] | null | null | null | III godina/Zimski semestar/UVI/Kolokvijumi/Inteligentni sistemi 1/I_Kolokvijum/2018-2019/Popravni/Resenja_studenata/CLIPS/Mihajlo_Jankovic_44-19/Zadatak_CLIPS_Experta.py | TodorovicSrdjan/PMF | d3deee21697dad300177fca6090c56d3b8d0cb76 | [
"MIT"
] | null | null | null | from experta import *
from experta.utils import freeze
import schema
#watch('ACTIVATIONS','RULES','FACTS')
class Faza(Fact):
pass
class Laptop(Fact):
model = Field(str)
procesor = Field(schema.Or("i3","i5","i7"))
ssd = Field(schema.Or("DA","NE"))
ram = Field(schema.Or(4,8,12,16))
gpu = Field(schema.Or(1,2,3,4))
dijagonala_ekrana = Field(float)
cena = Field(lambda x: isinstance(x,int) and x > 0)
cf = Field(lambda x: isinstance(x,int) and -100 <= x <= 100, default = 0)
flag = Field(list, False, freeze([]))
class ZeljeKorisnika(Laptop):
zeljena_cena = Field(lambda x: isinstance(x,int) and x > 0)
max_cena = Field(lambda x: isinstance(x,int) and x > 0)
class Engine(KnowledgeEngine):
laptopovi = list()
@DefFacts()
def ucitavanje_cinjenica(self):
yield Faza("Unos")
yield Laptop(model = "Asus",
procesor = "i7",
ssd = "DA",
ram = 16,
gpu = 4,
dijagonala_ekrana = 19.5,
cena = 110000)
yield Laptop(model = "Acer",
procesor = "i5",
ssd = "NE",
ram = 8,
gpu = 2,
dijagonala_ekrana = 18.7,
cena = 75000)
yield Laptop(model = "Lenovo",
procesor = "i3",
ssd = "NE",
ram = 8,
gpu = 2,
dijagonala_ekrana = 16.7,
cena = 85000)
# Unos korisnika
@Rule(Faza("Unos"))
def unos_korisnika(self):
self.declare(ZeljeKorisnika(procesor = "i7",
ssd = "DA",
ram = 16,
gpu = 4,
dijagonala_ekrana = 18.7,
zeljena_cena = 60000,
max_cena = 85000))
'''
self.declare(ZeljeKorisnika(procesor = str(input()),
ssd = str(input()),
ram = int(input()),
gpu = int(input()),
dijagonala_ekrana = float(input()),
zeljena_cena = int(input()),
max_cena = int(input())))
'''
# Kontrola toka: Unos -> Brisi
@Rule(
AS.faza << Faza("Unos"),
salience = -10
)
def kontrola_unos_brisi(self,faza):
self.retract(faza)
self.declare(Faza("Brisi"))
@Rule(
Faza("Brisi"),
AS.laptop << Laptop(cena = MATCH.cena),
ZeljeKorisnika(max_cena = MATCH.max),
TEST(lambda cena,max: cena > max)
)
def obrisi_preskupe(self,laptop):
print("Izbrisan je model {}".format(laptop["model"]))
self.retract(laptop)
# Kontrola toka: Brisi -> CF
@Rule(
AS.faza << Faza("Brisi"),
salience = -10
)
def kontrola_brisi_cf(self,faza):
self.retract(faza)
self.declare(Faza("CF"))
@Rule(
Faza("CF"),
AS.laptop << Laptop(cena = MATCH.cena, cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(zeljena_cena = MATCH.zeljena),
TEST(lambda cena, zeljena, flag: cena + 10000 < zeljena and flag.count("jeftiniji") == 0)
)
def jeftiniji(self,laptop,cf,flag):
self.modify(laptop, cf = cf + 25, flag = list(flag) + ["jeftiniji"])
@Rule(
Faza("CF"),
AS.laptop << Laptop(cena = MATCH.cena, cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(zeljena_cena = MATCH.zeljena),
TEST(lambda cena,zeljena,flag: cena - 10000 > zeljena and flag.count("skuplji") == 0)
)
def skuplji(self,laptop,cf,flag):
self.modify(laptop, cf = cf - 20, flag = list(flag) + ["skuplji"])
@Rule(
Faza("CF"),
AS.laptop << Laptop(procesor = MATCH.procesor, cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(procesor = MATCH.zeljeni_procesor),
TEST(lambda procesor, zeljeni_procesor, flag: procesor != zeljeni_procesor and flag.count("tip_procesora") == 0)
)
def tip_procesora(self,laptop,cf,flag):
self.modify(laptop, cf = cf - 20, flag = list(flag) + ["tip_procesora"])
@Rule(
Faza("CF"),
AS.laptop << Laptop(ssd = "NE", cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(ssd = "DA"),
TEST(lambda flag: flag.count("zeli_ssd") == 0)
)
def zeli_ssd(self,laptop,cf,flag):
self.modify(laptop, cf = cf - 15, flag = list(flag) + ["zeli_ssd"])
@Rule(
Faza("CF"),
AS.laptop << Laptop(ssd = "NE", cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(ssd = "DA"),
TEST(lambda flag: flag.count("zeli_ssd") == 0)
)
def zeli_ssd(self,laptop,cf,flag):
self.modify(laptop, cf = cf - 15, flag = list(flag) + ["zeli_ssd"])
@Rule(
Faza("CF"),
AS.laptop << Laptop(ssd = "DA", cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(ssd = "NE"),
TEST(lambda flag: flag.count("ne_zeli_ssd") == 0)
)
def ne_zeli_ssd(self,laptop,cf,flag):
self.modify(laptop, cf = cf + 10, flag = list(flag) + ["ne_zeli_ssd"])
@Rule(
Faza("CF"),
AS.laptop << Laptop(ram = MATCH.ram, cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(ram = MATCH.zeljeni_ram),
TEST(lambda ram, zeljeni_ram, flag: ram < zeljeni_ram and flag.count("manje_rama") == 0)
)
def manje_rama(self,laptop,cf,flag):
self.modify(laptop, cf = cf - 15, flag = list(flag) + ["manje_rama"])
@Rule(
Faza("CF"),
AS.laptop << Laptop(ram = MATCH.ram, cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(ram = MATCH.zeljeni_ram),
TEST(lambda ram, zeljeni_ram, flag: ram > zeljeni_ram and flag.count("vise_rama") == 0)
)
def vise_rama(self,laptop,cf,ram,zeleni_ram,flag):
koef = (ram - zeleni_ram) / 4
self.modify(laptop, cf = cf + 10 * koef, flag = list(flag) + ["vise_rama"])
@Rule(
Faza("CF"),
AS.laptop << Laptop(gpu = MATCH.gpu, cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(gpu = MATCH.zeljeni_gpu),
TEST(lambda gpu, zeljeni_gpu, flag: gpu < zeljeni_gpu and flag.count("manje_gpu") == 0)
)
def manje_gpu(self,laptop,cf,flag):
self.modify(laptop, cf = cf - 15, flag = list(flag) + ["manje_gpu"])
@Rule(
Faza("CF"),
AS.laptop << Laptop(gpu = MATCH.gpu, cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(gpu = MATCH.zeljeni_gpu),
TEST(lambda gpu, zeljeni_gpu, flag: gpu > zeljeni_gpu and flag.count("vise_gpu") == 0)
)
def vise_gpu(self,laptop,gpu,zeleni_gpu,cf,flag):
koef = (gpu - zeleni_gpu) / 4
self.modify(laptop, cf = cf + 10 * koef, flag = list(flag) + ["vise_gpu"])
@Rule(
Faza("CF"),
AS.laptop << Laptop(dijagonala_ekrana = MATCH.dijagonala, cf = MATCH.cf, flag = MATCH.flag),
ZeljeKorisnika(dijagonala_ekrana = MATCH.zeljena_dijagonala),
TEST(lambda dijagonala, zeljena_dijagonala, flag: dijagonala == zeljena_dijagonala and flag.count("odgovarajuca_dijagonala") == 0)
)
def odgovarajuca_dijagonala(self,laptop,cf,flag):
self.modify(laptop, cf = cf + 15, flag = list(flag) + ["odgovarajuca_dijagonala"])
# Kontrola toka:
@Rule(
AS.faza << Faza("CF"),
salience = -10
)
def kontrola_cf_zavrsna(self,faza):
self.retract(faza)
self.declare(Faza("Zavrsna"))
@Rule(
Faza("Zavrsna"),
AS.laptop << Laptop(model = MATCH.model, cf = MATCH.cf, cena = MATCH.cena)
)
def lista_laptopova(self,laptop,model,cf,cena):
self.laptopovi.append({"model":model,"cf":cf,"cena":cena})
self.retract(laptop)
engine = Engine()
engine.reset()
engine.run()
lista = engine.laptopovi
def sortiraj_cena(x):
return x['cena']
def sortiraj_cf(x):
return x['cf']
lista.sort(key=sortiraj_cf)
print("Sortirano po CF u opadajucem redosledu:")
for el in lista:
print("Model = {}, CF = {}, Cena = {}".format(el['model'],el['cf'],el['cena']))
lista.sort(key=sortiraj_cena)
print("Sortirano po ceni u rastucem redosledu:")
for el in lista:
print("Model = {}, CF = {}, Cena = {}".format(el['model'],el['cf'],el['cena']))
| 36.102459 | 139 | 0.522534 |
7955a421980e8acf92e869dfd770221c6489268d | 7,022 | py | Python | lbrynet/utils.py | walidmujahid/lbry | e4c3e038b613f8e84fbe6e9227913c9c42146eaa | [
"MIT"
] | null | null | null | lbrynet/utils.py | walidmujahid/lbry | e4c3e038b613f8e84fbe6e9227913c9c42146eaa | [
"MIT"
] | null | null | null | lbrynet/utils.py | walidmujahid/lbry | e4c3e038b613f8e84fbe6e9227913c9c42146eaa | [
"MIT"
] | null | null | null | import base64
import codecs
import datetime
import random
import socket
import string
import json
import traceback
import functools
import logging
import pkg_resources
from twisted.python.failure import Failure
from twisted.internet import defer
from lbrynet.schema.claim import ClaimDict
from lbrynet.cryptoutils import get_lbry_hash_obj
log = logging.getLogger(__name__)
# defining these time functions here allows for easier overriding in testing
def now():
return datetime.datetime.now()
def utcnow():
return datetime.datetime.utcnow()
def isonow():
"""Return utc now in isoformat with timezone"""
return utcnow().isoformat() + 'Z'
def today():
return datetime.datetime.today()
def timedelta(**kwargs):
return datetime.timedelta(**kwargs)
def datetime_obj(*args, **kwargs):
return datetime.datetime(*args, **kwargs)
def call_later(delay, func, *args, **kwargs):
# Import here to ensure that it gets called after installing a reactor
# see: http://twistedmatrix.com/documents/current/core/howto/choosing-reactor.html
from twisted.internet import reactor
return reactor.callLater(delay, func, *args, **kwargs)
def safe_start_looping_call(looping_call, interval_sec):
if not looping_call.running:
looping_call.start(interval_sec)
def safe_stop_looping_call(looping_call):
if looping_call.running:
looping_call.stop()
def generate_id(num=None):
h = get_lbry_hash_obj()
if num is not None:
h.update(str(num).encode())
else:
h.update(str(random.getrandbits(512)).encode())
return h.digest()
def version_is_greater_than(a, b):
"""Returns True if version a is more recent than version b"""
return pkg_resources.parse_version(a) > pkg_resources.parse_version(b)
def rot13(some_str):
return codecs.encode(some_str, 'rot_13')
def deobfuscate(obfustacated):
return base64.b64decode(rot13(obfustacated)).decode()
def obfuscate(plain):
return rot13(base64.b64encode(plain).decode())
def check_connection(server="lbry.io", port=80, timeout=5):
"""Attempts to open a socket to server:port and returns True if successful."""
log.debug('Checking connection to %s:%s', server, port)
try:
server = socket.gethostbyname(server)
conn = socket.create_connection((server, port), timeout)
conn.close()
log.debug('Connection successful')
return True
except (socket.gaierror, socket.herror) as ex:
log.warning("Failed to connect to %s:%s. Unable to resolve domain. Trying to bypass DNS",
server, port)
try:
server = "8.8.8.8"
port = 53
socket.create_connection((server, port), timeout)
log.debug('Connection successful')
return True
except Exception as ex:
log.error("Failed to connect to %s:%s. Maybe the internet connection is not working",
server, port)
return False
except Exception as ex:
log.error("Failed to connect to %s:%s. Maybe the internet connection is not working",
server, port)
return False
def random_string(length=10, chars=string.ascii_lowercase):
return ''.join([random.choice(chars) for _ in range(length)])
def short_hash(hash_str):
return hash_str[:6]
def get_sd_hash(stream_info):
if not stream_info:
return None
if isinstance(stream_info, ClaimDict):
return stream_info.source_hash
result = stream_info.get('claim', {}).\
get('value', {}).\
get('stream', {}).\
get('source', {}).\
get('source')
if not result:
log.warning("Unable to get sd_hash")
return result
def json_dumps_pretty(obj, **kwargs):
return json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '), **kwargs)
class DeferredLockContextManager:
def __init__(self, lock):
self._lock = lock
def __enter__(self):
yield self._lock.acquire()
def __exit__(self, exc_type, exc_val, exc_tb):
yield self._lock.release()
@defer.inlineCallbacks
def DeferredDict(d, consumeErrors=False):
keys = []
dl = []
response = {}
for k, v in d.items():
keys.append(k)
dl.append(v)
results = yield defer.DeferredList(dl, consumeErrors=consumeErrors)
for k, (success, result) in zip(keys, results):
if success:
response[k] = result
defer.returnValue(response)
class DeferredProfiler:
def __init__(self):
self.profile_results = {}
def add_result(self, fn, start_time, finished_time, stack, success):
self.profile_results[fn].append((start_time, finished_time, stack, success))
def show_profile_results(self, fn):
profile_results = list(self.profile_results[fn])
call_counts = {
caller: [(start, finished, finished - start, success)
for (start, finished, _caller, success) in profile_results
if _caller == caller]
for caller in {result[2] for result in profile_results}
}
log.info("called %s %i times from %i sources\n", fn.__name__, len(profile_results), len(call_counts))
for caller in sorted(list(call_counts.keys()), key=lambda c: len(call_counts[c]), reverse=True):
call_info = call_counts[caller]
times = [r[2] for r in call_info]
own_time = sum(times)
times.sort()
longest = 0 if not times else times[-1]
shortest = 0 if not times else times[0]
log.info(
"%i successes and %i failures\nlongest %f, shortest %f, avg %f\ncaller:\n%s",
len([r for r in call_info if r[3]]),
len([r for r in call_info if not r[3]]),
longest, shortest, own_time / float(len(call_info)), caller
)
def profiled_deferred(self, reactor=None):
if not reactor:
from twisted.internet import reactor
def _cb(result, fn, start, caller_info):
got_error = isinstance(result, (Failure, Exception))
self.add_result(fn, start, reactor.seconds(), caller_info, not got_error)
if got_error:
raise result
else:
return result
def _profiled_deferred(fn):
reactor.addSystemEventTrigger("after", "shutdown", self.show_profile_results, fn)
self.profile_results[fn] = []
@functools.wraps(fn)
def _wrapper(*args, **kwargs):
caller_info = "".join(traceback.format_list(traceback.extract_stack()[-3:-1]))
start = reactor.seconds()
d = defer.maybeDeferred(fn, *args, **kwargs)
d.addBoth(_cb, fn, start, caller_info)
return d
return _wrapper
return _profiled_deferred
_profiler = DeferredProfiler()
profile_deferred = _profiler.profiled_deferred
| 30.530435 | 109 | 0.638992 |
7955a45dbfb9cfbccc5bc5e47aebdada6818b1fd | 6,400 | py | Python | python/runSlomo.py | lotress/MoePhoto | 6f47515d2cf236773a46413f57839565fa665796 | [
"Apache-2.0"
] | 9 | 2019-03-27T15:01:11.000Z | 2021-02-23T15:21:57.000Z | python/runSlomo.py | lotress/MoePhoto | 6f47515d2cf236773a46413f57839565fa665796 | [
"Apache-2.0"
] | null | null | null | python/runSlomo.py | lotress/MoePhoto | 6f47515d2cf236773a46413f57839565fa665796 | [
"Apache-2.0"
] | 3 | 2019-03-27T15:01:31.000Z | 2021-01-26T03:18:09.000Z | '''
super slomo
code refered from https://github.com/avinashpaliwal/Super-SloMo.git
'''
# pylint: disable=E1101
import logging
import torch
from slomo import UNet, backWarp
from imageProcess import initModel, getStateDict, getPadBy32, doCrop, identity, Option, extend
from config import config
log = logging.getLogger('Moe')
modelPath = './model/slomo/SuperSloMo.ckpt'
RefTime = 2
WindowSize = 2
ramCoef = [.95 / x for x in (8100., 2484., 8100., 2466., 4014., 1080.)]
getFlowComp = lambda *_: UNet(6, 4)
getFlowIntrp = lambda *_: UNet(20, 5)
getFlowBack = lambda opt: backWarp(opt.width, opt.height, config.device(), config.dtype())
getBatchSize = lambda load, ramCoef: max(1, int((config.calcFreeMem() / load) * ramCoef))
modules = dict(
flowComp={'weight': 'state_dictFC', 'f': getFlowComp, 'outShape': (1, 4, 1, 1)},
ArbTimeFlowIntrp={'weight': 'state_dictAT', 'f': getFlowIntrp, 'outShape': (1, 5, 1, 1)})
def newOpt(func, ramCoef, align=32, padding=45, scale=1, **_):
opt = Option()
opt.modelCached = func
opt.ramCoef = ramCoef
opt.align = align
opt.padding = padding
opt.scale = scale
opt.squeeze = identity
opt.unsqueeze = identity
return opt
def getOptS(modelPath, modules, ramCoef):
opt = Option(modelPath)
weights = getStateDict(modelPath)
opt.modules = modules
opt.ramOffset = config.getRunType() * len(modules)
for i, key in enumerate(modules):
m = modules[key]
wKey = m['weight']
constructor = m.get('f', 0)
rc = m['ramCoef'][config.getRunType()] if 'ramCoef' in m else ramCoef[opt.ramOffset + i]
o = dict((k, m[k]) for k in ('align', 'padding', 'scale') if k in m)
model = initModel(opt, weights[wKey], key, constructor)
if 'outShape' in m:
opt.__dict__[key] = newOpt(model, rc, **o)
else:
model.ramCoef = rc
opt.__dict__[key] = model
return opt
def setOutShape(opt, height, width):
load = width * height
od = opt.__dict__
for key, o in opt.modules.items():
batchSize = opt.bf(load, od[key].ramCoef)
if 'outShape' in o:
q = o['outShape']
od[key].outShape = [batchSize, *q[1:-2], int(height * q[-2]), int(width * q[-1])]
if 'staticDims' in o:
for i in o['staticDims']:
od[key].outShape[i] = q[i]
if 'streams' in o and (not 0 in o.get('staticDims', {})):
for name in o['streams']:
od[name].send((None, batchSize))
return opt
def getOptP(opt, bf=getBatchSize):
opt.startPadding = 0
opt.i = 0
opt.bf = bf
return opt
extendRes = lambda res, item: res.extend(item) if type(item) == list else (None if item is None else res.append(item))
def makeStreamFunc(func, node, opt, nodes, name, padStates, initFunc, pushFunc):
for n in nodes:
node.append(n)
def f(x):
node.reset()
node.trace(0, p='{} start'.format(name))
if not opt.i:
setOutShape(opt, *initFunc(opt, x))
if opt.end:
for s in padStates:
s.setPadding(opt.end)
opt.end = 0
if opt.start:
opt.startPadding = opt.start
for s in padStates:
s.setPadding(opt.start)
opt.start = 0
last = True if x is None else None
if not last:
pushFunc(opt.pad(x.unsqueeze(0)))
opt.i += 1
out = []
extend(out, opt.out.send(last))
node.trace()
while last:
try:
extend(out, opt.out.send(last))
except StopIteration: break
res = []
for item in out:
extendRes(res, func(opt.unpad(item)))
return res
return f
def getOpt(option):
opt = getOptS(modelPath, modules, ramCoef)
opt.flowBackWarp = None
opt.outStart = 0
opt.batchSize = 0
opt.sf = option['sf']
opt.bf = getBatchSize
if opt.sf < 2:
raise RuntimeError('Error: --sf/slomo factor has to be at least 2')
return opt
def doSlomo(func, node, opt):
# Temporary fix for issue #7 https://github.com/avinashpaliwal/Super-SloMo/issues/7 -
# - Removed per channel mean subtraction for CPU.
def f(data):
node.reset()
node.trace(0, p='slomo start')
batchSize = len(data)
if not batchSize or len(data[0]) < 2:
return
if opt.flowBackWarp is None:
width, height, opt.pad, opt.unpad = getPadBy32(data[0][0], opt)
opt.width = width
opt.height = height
opt.flowBackWarp = initModel(opt, None, None, getFlowBack)
setOutShape(opt, height, width)
opt.batchSize = opt.flowComp.outShape[0]
log.info('Slomo batch size={}'.format(opt.batchSize))
flowBackWarp = opt.flowBackWarp
opt.flowComp.outShape[0] = batchSize
opt.ArbTimeFlowIntrp.outShape[0] = batchSize
sf = opt.sf
tempOut = [0 for _ in range(batchSize * sf + 1)]
# Save reference frames
tempOut[0] = data[0][0]
for i, frames in enumerate(data):
tempOut[(i + 1) * sf] = frames[1]
# Load data
I0 = opt.pad(torch.stack([frames[0] for frames in data]))
I1 = opt.pad(torch.stack([frames[1] for frames in data]))
flowOut = doCrop(opt.flowComp, torch.cat((I0, I1), dim=1))
F_0_1 = flowOut[:,:2,:,:]
F_1_0 = flowOut[:,2:,:,:]
node.trace()
# Generate intermediate frames
for intermediateIndex in range(1, sf):
t = intermediateIndex / sf
temp = -t * (1 - t)
fCoeff = (temp, t * t, (1 - t) * (1 - t), temp)
wCoeff = (1 - t, t)
F_t_0 = fCoeff[0] * F_0_1 + fCoeff[1] * F_1_0
F_t_1 = fCoeff[2] * F_0_1 + fCoeff[3] * F_1_0
g_I0_F_t_0 = flowBackWarp(I0, F_t_0)
g_I1_F_t_1 = flowBackWarp(I1, F_t_1)
intrpOut = doCrop(opt.ArbTimeFlowIntrp, torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1))
F_t_0_f = intrpOut[:, :2, :, :] + F_t_0
F_t_1_f = intrpOut[:, 2:4, :, :] + F_t_1
V_t_0 = torch.sigmoid(intrpOut[:, 4:5, :, :])
V_t_1 = 1 - V_t_0
g_I0_F_t_0_f = flowBackWarp(I0, F_t_0_f)
g_I1_F_t_1_f = flowBackWarp(I1, F_t_1_f)
Ft_p = (wCoeff[0] * V_t_0 * g_I0_F_t_0_f + wCoeff[1] * V_t_1 * g_I1_F_t_1_f) / (wCoeff[0] * V_t_0 + wCoeff[1] * V_t_1)
# Save intermediate frame
for i in range(batchSize):
tempOut[intermediateIndex + i * sf] = opt.unpad(Ft_p[i].detach())
node.trace()
if data is None and opt.outEnd:
tempOut = tempOut[:opt.outEnd]
opt.outEnd = 0
res = []
for item in tempOut[opt.outStart:]:
extendRes(res, func(item))
opt.outStart = max(0, opt.outStart - len(tempOut))
return res
return f | 31.840796 | 125 | 0.6275 |
7955a5217d15029178525b234480b6f3e7e11385 | 6,212 | py | Python | Athos/TFCompiler/ProcessTFGraph.py | shahakash28/EzPC-Aligned | 1887e5be2b6476ded285c15e134a7de92121a480 | [
"MIT"
] | null | null | null | Athos/TFCompiler/ProcessTFGraph.py | shahakash28/EzPC-Aligned | 1887e5be2b6476ded285c15e134a7de92121a480 | [
"MIT"
] | null | null | null | Athos/TFCompiler/ProcessTFGraph.py | shahakash28/EzPC-Aligned | 1887e5be2b6476ded285c15e134a7de92121a480 | [
"MIT"
] | null | null | null | '''
Authors: Nishant Kumar.
Copyright:
Copyright (c) 2020 Microsoft Research
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'SeeDot')) #Add SeeDot directory to path
import Graph, AST.AST as AST, _pickle as pickle, os
from TFNodesAST import TFNodesAST
from AST.PrintAST import PrintAST
from AST.MtdAST import MtdAST
def checkTFNodeNameForEq(curNodeOp:str, givenOp:str):
return (curNodeOp == "\"" + givenOp + "\"")
def generateASTForNode(graph, curNode, dictNodeNameToOutVarStr, extraNodeInfoDict):
curNodeOp = curNode.getOp()
ast = None
func = getattr(TFNodesAST, curNodeOp[1:-1]) #To remove the " at the begin and end
(assignedVarAST, curAST) = func(graph, curNode, dictNodeNameToOutVarStr, extraNodeInfoDict)
return (assignedVarAST, curAST)
#Takes the graph DS and outputs IR in SeeDot for the same
def generateIRCode(graph, extraInfoDict):
program = None
innerMostLetASTNode = None
dictNodeNameToOutVarStr = {}
outVarCt = 0
outVarPrefix = "J"
mtdAST = MtdAST()
for curNode in graph.getAllNodesRef():
for curInp in curNode.getInputsRef():
assert(curInp in dictNodeNameToOutVarStr) #Consequence of topological sorting of the TF graph
(assignedVarAST, curAst) = generateASTForNode(graph, curNode, dictNodeNameToOutVarStr, extraInfoDict)
mtdForCurAST = {AST.ASTNode.mtdKeyTFOpName : curNode.getOp()[1:-1],
AST.ASTNode.mtdKeyTFNodeName : curNode.getName()[1:-1]}
if (curAst is None):
dictNodeNameToOutVarStr[curNode.getName()] = None
continue
curOutVarStr = outVarPrefix + str(outVarCt)
curOutVarAstNode = (assignedVarAST if assignedVarAST else AST.ID(curOutVarStr))
if program:
assert(type(innerMostLetASTNode) is AST.Let)
newNode = AST.Let(curOutVarAstNode, curAst, curOutVarAstNode)
mtdAST.visit(newNode, mtdForCurAST)
innerMostLetASTNode.expr = newNode
innerMostLetASTNode = newNode
else:
innerMostLetASTNode = AST.Let(AST.ID(curOutVarStr), curAst, curOutVarAstNode)
mtdAST.visit(innerMostLetASTNode, mtdForCurAST)
innerMostLetASTNode.depth = 0
program = innerMostLetASTNode
dictNodeNameToOutVarStr[curNode.getName()] = curOutVarStr
outVarCt += 1
return (program, dictNodeNameToOutVarStr)
def readSizeInfo(fileName):
allLines = None
with open(fileName) as f:
allLines = f.readlines()
sizeInfo = {}
for line in allLines:
tokens = line.split()
nodeName = tokens[0]
tokens = tokens[1:]
nodeOPSize = []
if (not tokens):
nodeOPSize = [1]
else:
for curDimStr in tokens:
if (curDimStr == ''): continue
nodeOPSize.append(int(curDimStr))
sizeInfo[nodeName] = nodeOPSize
return sizeInfo
# Since later on in the pipeline, the placeholder nodes which come up as cin statements
# are to be excluded from the timing calculation, output all such PlaceHolder nodes together first.
# This doesn't violate the topological ordering because all such PlaceHolder nodes are leaf nodes
# in the graph.
def prefixAllPlaceHolderNodes(graph):
allNodes = graph.getAllNodesRef()
placeHolderNodes = []
remNodes = []
for curNode in allNodes:
if (curNode.getOp() == "\"Placeholder\"" or curNode.getOp() == "\"VariableV2\""):
# Assert this is indeed a leaf node
assert(len(curNode.getInputsRef()) == 0)
placeHolderNodes.append(curNode)
else:
remNodes.append(curNode)
graph.setNodesList(placeHolderNodes + remNodes)
def main():
sys.setrecursionlimit(10000)
# First read the graph file
if (len(sys.argv) < 2):
print("TF python file unspecified.", file=sys.stderr)
exit(1)
filename = sys.argv[1]
folderName = os.path.dirname(filename)
graphFileName = os.path.join(folderName, 'graphDef.mtdata')
graph = Graph.Graph()
with open(graphFileName) as file:
graph.readFromFilePointer(file)
# Read the sizeInfo also
sizeInfoFileName = os.path.join(folderName, 'sizeInfo.mtdata')
sizeInfo = readSizeInfo(sizeInfoFileName)
# Place all PlaceHolder nodes together at the beginning
prefixAllPlaceHolderNodes(graph)
# Re-format the input names of nodes
for curNode in graph.getAllNodesRef():
inputsRef = curNode.getInputsRef()
for i,curInput in enumerate(inputsRef):
if (curInput.startswith('"^')):
# My hypothesis from empirical observation is that inputs which have '^' ahead of the node name
# denote control flow dependency and not data dependency.
# For all purposes for this compilation, control and data dependency is considered same.
# The reasoning being that everything is serial -- and graph execution is done in a
# a topological sort.
inputsRef[i] = '"' + curInput.split('^')[-1]
# Create extra info dict
# Format : (sizeInfo)
extraInfoDict = {}
for k,v in sizeInfo.items():
extraInfoDict["\"" + k + "\""] = (v,)
for curNode in graph.getAllNodesRef():
if (curNode.getName() not in extraInfoDict):
extraInfoDict[curNode.getName()] = (None,)
print("Generating code from TF graph def : ", graphFileName, " ...")
(program, dictNodeNameToOutVarStr) = generateIRCode(graph, extraInfoDict)
print("SeeDot AST generation done. Pickling the AST.")
with open(os.path.join(folderName, 'astOutput.pkl'), 'wb') as f:
pickle.dump(program, f)
if __name__ == "__main__":
main()
| 37.197605 | 103 | 0.748712 |
7955a64a5fada8d0b9d801437d6666a9d409cf56 | 855 | py | Python | CDD/Lista/18.py | isisestevan/ATIVIDADES-UIT-1-PERIODO | 3274d4ee39349eaa54d333999048b75d33b76b30 | [
"MIT"
] | null | null | null | CDD/Lista/18.py | isisestevan/ATIVIDADES-UIT-1-PERIODO | 3274d4ee39349eaa54d333999048b75d33b76b30 | [
"MIT"
] | null | null | null | CDD/Lista/18.py | isisestevan/ATIVIDADES-UIT-1-PERIODO | 3274d4ee39349eaa54d333999048b75d33b76b30 | [
"MIT"
] | null | null | null | n=int(input("Digite o tamanho da lista"))
lista= []
print("Digite o produto e seu preço respectivamente") # Digite o nome - e o preço logo a frente.Ex: ovo-7;
for i in range(n):
lista.append(input())
#Ordeno a lista
lista.sort()
print("Lista Ordenada:",lista)
#Atribuo a lista para uma string
Stringlista= " ".join(lista)
#Separo o nome do preço
Stringlista.split()
listaPreco=[]
listaProduto=[]
#Para cada caractere na String ele verifica se é numero e adiciona na lista de preços
for caractere in Stringlista:
if caractere.isdigit():
listaPreco.append(caractere)
#lista de Produto recebe todas as palavras da String, para cada caractere na string que nao for numero ele adiciona na lista de Produto
listaProduto = ''.join( x for x in Stringlista if not x.isdigit())
print("Lista Produtos:",listaProduto)
print("Lista preço:",listaPreco)
| 31.666667 | 135 | 0.74386 |
7955a7140be55801c72fcc0a5c9fda6c4e44f88e | 6,805 | py | Python | conda/gateways/connection/session.py | areading314/conda | c9aa50360af308048f57bc2d1c9ae5707e057e3b | [
"BSD-3-Clause"
] | 1 | 2021-03-19T23:50:53.000Z | 2021-03-19T23:50:53.000Z | conda/gateways/connection/session.py | areading314/conda | c9aa50360af308048f57bc2d1c9ae5707e057e3b | [
"BSD-3-Clause"
] | null | null | null | conda/gateways/connection/session.py | areading314/conda | c9aa50360af308048f57bc2d1c9ae5707e057e3b | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2012 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
from __future__ import absolute_import, division, print_function, unicode_literals
from logging import getLogger
from threading import local
from . import (AuthBase, BaseAdapter, HTTPAdapter, Session, _basic_auth_str,
extract_cookies_to_jar, get_auth_from_url, get_netrc_auth)
from .adapters.ftp import FTPAdapter
from .adapters.localfs import LocalFSAdapter
from .adapters.s3 import S3Adapter
from ..anaconda_client import read_binstar_tokens
from ..._vendor.auxlib.ish import dals
from ...base.constants import CONDA_HOMEPAGE_URL
from ...base.context import context
from ...common.compat import iteritems, with_metaclass
from ...common.url import (add_username_and_password, get_proxy_username_and_pass,
split_anaconda_token, urlparse)
from ...exceptions import ProxyError
log = getLogger(__name__)
RETRIES = 3
class EnforceUnusedAdapter(BaseAdapter):
def send(self, request, *args, **kwargs):
message = dals("""
EnforceUnusedAdapter called with url %s
This command is using a remote connection in offline mode.
""" % request.url)
raise RuntimeError(message)
def close(self):
raise NotImplementedError()
class CondaSessionType(type):
"""
Takes advice from https://github.com/requests/requests/issues/1871#issuecomment-33327847
and creates one Session instance per thread.
"""
def __new__(mcs, name, bases, dct):
dct['_thread_local'] = local()
return super(CondaSessionType, mcs).__new__(mcs, name, bases, dct)
def __call__(cls):
try:
return cls._thread_local.session
except AttributeError:
session = cls._thread_local.session = super(CondaSessionType, cls).__call__()
return session
@with_metaclass(CondaSessionType)
class CondaSession(Session):
def __init__(self):
super(CondaSession, self).__init__()
self.auth = CondaHttpAuth() # TODO: should this just be for certain protocol adapters?
proxies = context.proxy_servers
if proxies:
self.proxies = proxies
if context.offline:
unused_adapter = EnforceUnusedAdapter()
self.mount("http://", unused_adapter)
self.mount("https://", unused_adapter)
self.mount("ftp://", unused_adapter)
self.mount("s3://", unused_adapter)
else:
# Configure retries
http_adapter = HTTPAdapter(max_retries=context.remote_max_retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
self.mount("ftp://", FTPAdapter())
self.mount("s3://", S3Adapter())
self.mount("file://", LocalFSAdapter())
self.headers['User-Agent'] = context.user_agent
self.verify = context.ssl_verify
if context.client_ssl_cert_key:
self.cert = (context.client_ssl_cert, context.client_ssl_cert_key)
elif context.client_ssl_cert:
self.cert = context.client_ssl_cert
class CondaHttpAuth(AuthBase):
# TODO: make this class thread-safe by adding some of the requests.auth.HTTPDigestAuth() code
def __call__(self, request):
request.url = CondaHttpAuth.add_binstar_token(request.url)
self._apply_basic_auth(request)
request.register_hook('response', self.handle_407)
return request
@staticmethod
def _apply_basic_auth(request):
# this logic duplicated from Session.prepare_request and PreparedRequest.prepare_auth
url_auth = get_auth_from_url(request.url)
auth = url_auth if any(url_auth) else None
if auth is None:
# look for auth information in a .netrc file
auth = get_netrc_auth(request.url)
if isinstance(auth, tuple) and len(auth) == 2:
request.headers['Authorization'] = _basic_auth_str(*auth)
return request
@staticmethod
def add_binstar_token(url):
clean_url, token = split_anaconda_token(url)
if not token and context.add_anaconda_token:
for binstar_url, token in iteritems(read_binstar_tokens()):
if clean_url.startswith(binstar_url):
log.debug("Adding anaconda token for url <%s>", clean_url)
from ...models.channel import Channel
channel = Channel(clean_url)
channel.token = token
return channel.url(with_credentials=True)
return url
@staticmethod
def handle_407(response, **kwargs): # pragma: no cover
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
This method is modeled after
* requests.auth.HTTPDigestAuth.handle_401()
* requests.auth.HTTPProxyAuth
* the previous conda.fetch.handle_proxy_407()
It both adds 'username:password' to the proxy URL, as well as adding a
'Proxy-Authorization' header. If any of this is incorrect, please file an issue.
"""
# kwargs = {'verify': True, 'cert': None, 'proxies': OrderedDict(), 'stream': False,
# 'timeout': (3.05, 60)}
if response.status_code != 407:
return response
# Consume content and release the original connection
# to allow our new request to reuse the same one.
response.content
response.close()
proxies = kwargs.pop('proxies')
proxy_scheme = urlparse(response.url).scheme
if proxy_scheme not in proxies:
raise ProxyError(dals("""
Could not find a proxy for %r. See
%s/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.
""" % (proxy_scheme, CONDA_HOMEPAGE_URL)))
# fix-up proxy_url with username & password
proxy_url = proxies[proxy_scheme]
username, password = get_proxy_username_and_pass(proxy_scheme)
proxy_url = add_username_and_password(proxy_url, username, password)
proxy_authorization_header = _basic_auth_str(username, password)
proxies[proxy_scheme] = proxy_url
kwargs['proxies'] = proxies
prep = response.request.copy()
extract_cookies_to_jar(prep._cookies, response.request, response.raw)
prep.prepare_cookies(prep._cookies)
prep.headers['Proxy-Authorization'] = proxy_authorization_header
_response = response.connection.send(prep, **kwargs)
_response.history.append(response)
_response.request = prep
return _response
| 36.196809 | 97 | 0.658339 |
7955a8601309bd45485e3bacf7ccdba4879eda92 | 2,073 | py | Python | moto/databrew/responses.py | symroe/moto | 4e106995af6f2820273528fca8a4e9ee288690a5 | [
"Apache-2.0"
] | null | null | null | moto/databrew/responses.py | symroe/moto | 4e106995af6f2820273528fca8a4e9ee288690a5 | [
"Apache-2.0"
] | null | null | null | moto/databrew/responses.py | symroe/moto | 4e106995af6f2820273528fca8a4e9ee288690a5 | [
"Apache-2.0"
] | null | null | null | import json
from urllib.parse import urlparse
from moto.core.responses import BaseResponse
from moto.core.utils import amzn_request_id
from .models import databrew_backends
class DataBrewResponse(BaseResponse):
SERVICE_NAME = "databrew"
@property
def databrew_backend(self):
"""Return backend instance specific for this region."""
return databrew_backends[self.region]
@property
def parameters(self):
return json.loads(self.body)
@amzn_request_id
def create_recipe(self):
# https://docs.aws.amazon.com/databrew/latest/dg/API_CreateRecipe.html
recipe_description = self.parameters.get("Description")
recipe_steps = self.parameters.get("Steps")
recipe_name = self.parameters.get("Name")
tags = self.parameters.get("Tags")
return json.dumps(
self.databrew_backend.create_recipe(
recipe_name, recipe_description, recipe_steps, tags
).as_dict()
)
@amzn_request_id
def list_recipes(self):
# https://docs.aws.amazon.com/databrew/latest/dg/API_ListRecipes.html
next_token = self._get_param("NextToken", self._get_param("nextToken"))
max_results = self._get_int_param(
"MaxResults", self._get_int_param("maxResults")
)
# pylint: disable=unexpected-keyword-arg, unbalanced-tuple-unpacking
recipe_list, next_token = self.databrew_backend.list_recipes(
next_token=next_token, max_results=max_results
)
return json.dumps(
{
"Recipes": [recipe.as_dict() for recipe in recipe_list],
"NextToken": next_token,
}
)
@amzn_request_id
def describe_recipe_response(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
parsed_url = urlparse(full_url)
recipe_name = parsed_url.path.rstrip("/").rsplit("/", 1)[1]
recipe = self.databrew_backend.get_recipe(recipe_name)
return json.dumps(recipe.as_dict())
| 33.435484 | 79 | 0.66136 |
7955a8f8d30526f4a0465d615b91f123b24e364f | 6,120 | py | Python | app.py | githubwyj/ueditor-for-python | 047db4b4d1aff643476d93cb5d13560f73fba2a4 | [
"Apache-2.0"
] | null | null | null | app.py | githubwyj/ueditor-for-python | 047db4b4d1aff643476d93cb5d13560f73fba2a4 | [
"Apache-2.0"
] | 2 | 2020-03-12T10:44:53.000Z | 2020-03-12T10:44:54.000Z | app.py | githubwyj/ueditor-for-python | 047db4b4d1aff643476d93cb5d13560f73fba2a4 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import re
import json
from flask import Flask, request, render_template, url_for, make_response
from uploader import Uploader
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/upload/', methods=['GET', 'POST', 'OPTIONS'])
def upload():
"""UEditor文件上传接口
config 配置文件
result 返回结果
"""
mimetype = 'application/json'
result = {}
action = request.args.get('action')
# 解析JSON格式的配置文件
with open(os.path.join(app.static_folder, 'ueditor', 'php',
'config.json')) as fp:
try:
# 删除 `/**/` 之间的注释
CONFIG = json.loads(re.sub(r'\/\*.*\*\/', '', fp.read()))
except:
CONFIG = {}
if action == 'config':
# 初始化时,返回配置文件给客户端
result = CONFIG
elif action in ('uploadimage', 'uploadfile', 'uploadvideo'):
# 图片、文件、视频上传
if action == 'uploadimage':
fieldName = CONFIG.get('imageFieldName')
config = {
"pathFormat": CONFIG['imagePathFormat'],
"maxSize": CONFIG['imageMaxSize'],
"allowFiles": CONFIG['imageAllowFiles']
}
elif action == 'uploadvideo':
fieldName = CONFIG.get('videoFieldName')
config = {
"pathFormat": CONFIG['videoPathFormat'],
"maxSize": CONFIG['videoMaxSize'],
"allowFiles": CONFIG['videoAllowFiles']
}
else:
fieldName = CONFIG.get('fileFieldName')
config = {
"pathFormat": CONFIG['filePathFormat'],
"maxSize": CONFIG['fileMaxSize'],
"allowFiles": CONFIG['fileAllowFiles']
}
if fieldName in request.files:
field = request.files[fieldName]
uploader = Uploader(field, config, app.static_folder)
result = uploader.getFileInfo()
else:
result['state'] = '上传接口出错'
elif action in ('uploadscrawl'):
# 涂鸦上传
fieldName = CONFIG.get('scrawlFieldName')
config = {
"pathFormat": CONFIG.get('scrawlPathFormat'),
"maxSize": CONFIG.get('scrawlMaxSize'),
"allowFiles": CONFIG.get('scrawlAllowFiles'),
"oriName": "scrawl.png"
}
if fieldName in request.form:
field = request.form[fieldName]
uploader = Uploader(field, config, app.static_folder, 'base64')
result = uploader.getFileInfo()
else:
result['state'] = '上传接口出错'
elif action in ('catchimage'):
config = {
"pathFormat": CONFIG['catcherPathFormat'],
"maxSize": CONFIG['catcherMaxSize'],
"allowFiles": CONFIG['catcherAllowFiles'],
"oriName": "remote.png"
}
fieldName = CONFIG['catcherFieldName']
if fieldName in request.form:
# 这里比较奇怪,远程抓图提交的表单名称不是这个
source = []
elif '%s[]' % fieldName in request.form:
# 而是这个
source = request.form.getlist('%s[]' % fieldName)
_list = []
for imgurl in source:
uploader = Uploader(imgurl, config, app.static_folder, 'remote')
info = uploader.getFileInfo()
_list.append({
'state': info['state'],
'url': info['url'],
'original': info['original'],
'source': imgurl,
})
result['state'] = 'SUCCESS' if len(_list) > 0 else 'ERROR'
result['list'] = _list
elif action in ('listimage', 'listfile'):
allowFiles = []
listSize = 20
path = ""
if (action == 'listfile'):
allowFiles = CONFIG['fileManagerAllowFiles']
listSize = CONFIG['fileManagerListSize']
path = CONFIG['fileManagerListPath']
else:
allowFiles = CONFIG['imageManagerAllowFiles']
listSize = CONFIG['imageManagerListSize']
path = CONFIG['imageManagerListPath']
size = int(request.args.get('size', listSize))
start = int(request.args.get('start', 0))
end = start + size
path = app.static_folder + path
files = getfiles(app.root_path, path, allowFiles, [])
lens = len(files)
# 倒序
# files.reverse()
i = min(end, lens) - 1
list = []
for index in range(len(files)):
if (i < lens and i >= 0 and i >= start):
list.append(files[i])
i = i - 1
files = []
# min = min(end, lens)
# list = files[:min(end, lens)]
result["state"] = "SUCCESS"
result["list"] = list
result["start"] = start
result["total"] = lens
else:
result['state'] = '请求地址出错'
result = json.dumps(result)
if 'callback' in request.args:
callback = request.args.get('callback')
if re.match(r'^[\w_]+$', callback):
result = '%s(%s)' % (callback, result)
mimetype = 'application/javascript'
else:
result = json.dumps({'state': 'callback参数不合法'})
res = make_response(result)
res.mimetype = mimetype
res.headers['Access-Control-Allow-Origin'] = '*'
res.headers['Access-Control-Allow-Headers'] = 'X-Requested-With,X_Requested_With'
return res
def getfiles(root_path, path, allowFiles, files):
if (os.path.exists(path)):
fs = os.listdir(path)
for f1 in fs:
tmp_path = os.path.join(path, f1)
if not os.path.isdir(tmp_path):
fx = os.path.splitext(tmp_path)[1]
if (fx in allowFiles):
file = {'url': (tmp_path.replace(root_path, '')).replace("\\", "/"),
'mtime': os.path.getmtime(tmp_path)}
files.append(file)
else:
getfiles(root_path, tmp_path, allowFiles, files)
return files
else:
return []
if __name__ == '__main__':
app.run(debug=True)
| 31.384615 | 88 | 0.526961 |
7955a97163ab84649b2e6990125fbb4a742b6d3e | 1,072 | py | Python | Auto_clic_react.py | titouan-joseph/AutoClicMessenger | e00f46c7d03f27deb7b7d9c2cc5758d8901227c7 | [
"MIT"
] | null | null | null | Auto_clic_react.py | titouan-joseph/AutoClicMessenger | e00f46c7d03f27deb7b7d9c2cc5758d8901227c7 | [
"MIT"
] | null | null | null | Auto_clic_react.py | titouan-joseph/AutoClicMessenger | e00f46c7d03f27deb7b7d9c2cc5758d8901227c7 | [
"MIT"
] | null | null | null | from selenium import webdriver
from selenium.webdriver.firefox.options import Options
import time
import credentials
if __name__ == '__main__':
options = Options()
options.add_argument("--headless")
profile = webdriver.FirefoxProfile()
profile.set_preference("media.volume_scale", "0.0")
driver = webdriver.Firefox(profile, executable_path="Z:\\Projet\\Coucou cliqueur\\geckodriver.exe", options=options)
driver.get(f"https://www.messenger.com/t/{credentials.GROUPE}")
driver.find_element_by_name("email").send_keys(credentials.USERNAME)
driver.find_element_by_name("pass").send_keys(credentials.PASSWORD)
driver.find_element_by_name("login").click()
while True:
try:
if driver.find_elements_by_class_name("_ih3")[-1].text != credentials.NAME:
driver.find_element_by_class_name("_5j_u").click()
time.sleep(1)
print("clic")
else:
print("un tour de boucle")
time.sleep(5)
except:
pass
| 34.580645 | 120 | 0.652985 |
7955a9d2bbc215504900b8236b80b6a75b95b7a8 | 1,544 | py | Python | usaspending_api/common/operations_reporter.py | gaybro8777/usaspending-api | fe9d730acd632401bbbefa168e3d86d59560314b | [
"CC0-1.0"
] | null | null | null | usaspending_api/common/operations_reporter.py | gaybro8777/usaspending-api | fe9d730acd632401bbbefa168e3d86d59560314b | [
"CC0-1.0"
] | null | null | null | usaspending_api/common/operations_reporter.py | gaybro8777/usaspending-api | fe9d730acd632401bbbefa168e3d86d59560314b | [
"CC0-1.0"
] | null | null | null | import json
class OpsReporter:
"""
Object to load useful operational metrics during routine script executions
When instantiating a new OpsReporter class, metrics can be loaded as keyword args
Later, additional data can be added just like adding/updating values in a dictionary.
At the end of the script, send the JSON to the intended destination.
"""
required_keys = ("job_name", "duration", "iso_start_datetime", "end_status")
def __init__(self, **kwargs):
self._internal_dict = {}
if kwargs:
self._internal_dict = kwargs
def __getattr__(self, key):
try:
return self._internal_dict[key]
except KeyError:
raise AttributeError(key)
def __getitem__(self, key):
try:
return self._internal_dict[key]
except KeyError:
raise AttributeError(key)
def __setitem__(self, key, value):
self._internal_dict[key] = value
def __str__(self):
return self.__repr__()
def __repr__(self):
return "OpsReporter({})".format(", ".join(["{}:{}".format(k, v) for k, v in self._internal_dict.items()]))
def json_dump(self):
self._verify_required_keys()
return json.dumps(self._internal_dict)
def _verify_required_keys(self):
missing_required_keys = set(self.required_keys) - set(self._internal_dict.keys())
if missing_required_keys:
raise Exception("Missing required keys: {}".format(missing_required_keys))
| 32.166667 | 114 | 0.647668 |
7955a9f6e0639d3707f2ae86b36c9e4b1fd8bada | 3,302 | py | Python | kydb/union.py | juggler99/kydb | bd0e1d11989e418c0ac5165b9c4752b42c074e7b | [
"MIT"
] | null | null | null | kydb/union.py | juggler99/kydb | bd0e1d11989e418c0ac5165b9c4752b42c074e7b | [
"MIT"
] | 14 | 2020-09-01T20:19:13.000Z | 2020-11-08T19:45:05.000Z | kydb/union.py | juggler99/kydb | bd0e1d11989e418c0ac5165b9c4752b42c074e7b | [
"MIT"
] | 3 | 2020-09-04T15:17:25.000Z | 2020-10-18T00:36:03.000Z |
from .base import BaseDB
from .interface import KYDBInterface
from typing import Tuple
from contextlib import ExitStack
def front_db_func(self, func_name, *args, **kwargs):
return getattr(self.dbs[0], func_name)(*args, **kwargs)
def first_success_db_func(self, func_name, *args, **kwargs):
first_error = None
for db in self.dbs:
try:
return getattr(db, func_name)(*args, **kwargs)
except KeyError as err:
if not first_error:
first_error = err
continue
raise first_error
def any_db_func(self, func_name, *args, **kwargs):
return any(getattr(db, func_name)(*args, **kwargs) for db in self.dbs)
def all_db_func(self, func_name, *args, **kwargs):
for db in self.dbs:
getattr(db, func_name)(*args, **kwargs)
def create_func(func_prototype, func_name):
# Using partial loses the self when constructing
# class using type. So use this function
def f(self, *args, **kwargs):
return func_prototype(self, func_name, *args, **kwargs)
return f
UNION_DB_BASE_FUNCS = [
('__getitem__', first_success_db_func),
('__setitem__', front_db_func),
('delete', front_db_func),
('rmdir', front_db_func),
('rm_tree', front_db_func),
('new', front_db_func),
('exists', any_db_func),
('refresh', all_db_func),
('read', first_success_db_func),
('mkdir', front_db_func),
('is_dir', any_db_func),
('upload_objdb_config', front_db_func)
]
UnionDBBase = type(
'UnionDBBase',
(KYDBInterface,),
{k: create_func(v, k) for k, v in UNION_DB_BASE_FUNCS}
)
class UnionDB(UnionDBBase):
"""UnionDB
The URL used on *connect* can be a semi-colon separated string.
This would create a Union Database.
Connecting::
db = kydb.connect('memory://unittest;s3://my-unittest-fixture')
OR::
db = kydb.connect('redis://hotfixes.epythoncloud.io;'
'dynamodb://my-prod-src-db')
Reading and writing::
db1, db2 = db.dbs
db1['/foo'] = 1
db2['/bar'] = 2
(db['/foo'], db['/bar']) # return (1, 2)
# Although db2 has /foo, it is db1's /foo that the union returns
db2['/foo'] = 3
db['/foo'] # return 1
# writing always happens on the front db
db['/foo'] = 4
db1['/foo'] # returns 4
db2['/foo'] # returns 3
"""
def __init__(self, dbs: Tuple[BaseDB]):
self.dbs = dbs
def cache_context(self) -> 'KYDBInterface':
with ExitStack() as stack:
for db in self.dbs:
stack.enter_context(db.cache_context())
return stack
def list_dir(self, folder: str, include_dir=True, page_size=200):
res = set()
for db in self.dbs:
try:
res.update(db.list_dir(folder, include_dir, page_size))
except KeyError:
pass
for key in res:
yield key
def ls(self, folder: str, include_dir=True):
return list(self.list_dir(folder, include_dir))
def __repr__(self):
"""
The representation of the db.
i.e. <UnionDB redis://my-redis-host/source;
kydb.S3 s3://my-s3-prod-source>
"""
return f'<{type(self).__name__} ' + \
';'.join(db.url for db in self.dbs) + '>'
| 24.827068 | 74 | 0.604785 |
7955aa1a6fec3e3ec8a2417b226c2dfc5253d75c | 20,205 | py | Python | python/dgl/transform/module.py | lfchener/dgl | 77f4287a4118db64c46f4f413a426e1419a09d53 | [
"Apache-2.0"
] | 1 | 2021-11-07T18:53:42.000Z | 2021-11-07T18:53:42.000Z | python/dgl/transform/module.py | lfchener/dgl | 77f4287a4118db64c46f4f413a426e1419a09d53 | [
"Apache-2.0"
] | null | null | null | python/dgl/transform/module.py | lfchener/dgl | 77f4287a4118db64c46f4f413a426e1419a09d53 | [
"Apache-2.0"
] | null | null | null | ##
# Copyright 2019-2021 Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Modules for transform"""
from .. import convert
from .. import backend as F
from . import functional
__all__ = [
'BaseTransform',
'AddSelfLoop',
'RemoveSelfLoop',
'AddReverse',
'ToSimple',
'LineGraph',
'KHopGraph',
'AddMetaPaths',
'Compose'
]
def update_graph_structure(g, data_dict, copy_edata=True):
r"""
Description
-----------
Update the structure of a graph.
Parameters
----------
g : DGLGraph
The graph to update.
data_dict : graph data
The dictionary data for constructing a heterogeneous graph.
copy_edata : bool
If True, it will copy the edge features to the updated graph.
Returns
-------
DGLGraph
The updated graph.
"""
device = g.device
idtype = g.idtype
num_nodes_dict = dict()
for ntype in g.ntypes:
num_nodes_dict[ntype] = g.num_nodes(ntype)
new_g = convert.heterograph(data_dict, num_nodes_dict=num_nodes_dict,
idtype=idtype, device=device)
# Copy features
for ntype in g.ntypes:
for key, feat in g.nodes[ntype].data.items():
new_g.nodes[ntype].data[key] = feat
if copy_edata:
for c_etype in g.canonical_etypes:
for key, feat in g.edges[c_etype].data.items():
new_g.edges[c_etype].data[key] = feat
return new_g
class BaseTransform:
r"""
Description
-----------
An abstract class for writing transforms.
"""
def __call__(self, g):
raise NotImplementedError
def __repr__(self):
return self.__class__.__name__ + '()'
class AddSelfLoop(BaseTransform):
r"""
Description
-----------
Add self-loops for each node in the graph and return a new graph.
For heterogeneous graphs, self-loops are added only for edge types with same
source and destination node types.
Parameters
----------
allow_duplicate : bool, optional
If False, it will first remove self-loops to prevent duplicate self-loops.
new_etypes : bool, optional
If True, it will add an edge type 'self' per node type, which holds self-loops.
Example
-------
>>> import dgl
>>> from dgl import AddSelfLoop
Case1: Add self-loops for a homogeneous graph
>>> transform = AddSelfLoop()
>>> g = dgl.graph(([1, 1], [1, 2]))
>>> new_g = transform(g)
>>> print(new_g.edges())
(tensor([1, 0, 1, 2]), tensor([2, 0, 1, 2]))
Case2: Add self-loops for a heterogeneous graph
>>> g = dgl.heterograph({
... ('user', 'plays', 'game'): ([0], [1]),
... ('user', 'follows', 'user'): ([1], [2])
... })
>>> new_g = transform(g)
>>> print(new_g.edges(etype='plays'))
(tensor([0]), tensor([1]))
>>> print(new_g.edges(etype='follows'))
(tensor([1, 0, 1, 2]), tensor([2, 0, 1, 2]))
Case3: Add self-etypes for a heterogeneous graph
>>> transform = AddSelfLoop(new_etypes=True)
>>> new_g = transform(g)
>>> print(new_g.edges(etype='follows'))
(tensor([1, 0, 1, 2]), tensor([2, 0, 1, 2]))
>>> print(new_g.edges(etype=('game', 'self', 'game')))
(tensor([0, 1]), tensor([0, 1]))
"""
def __init__(self, allow_duplicate=False, new_etypes=False):
self.allow_duplicate = allow_duplicate
self.new_etypes = new_etypes
def transform_etype(self, c_etype, g):
r"""
Description
-----------
Transform the graph corresponding to a canonical edge type.
Parameters
----------
c_etype : tuple of str
A canonical edge type.
g : DGLGraph
The graph.
Returns
-------
DGLGraph
The transformed graph.
"""
utype, _, vtype = c_etype
if utype != vtype:
return g
if not self.allow_duplicate:
g = functional.remove_self_loop(g, etype=c_etype)
return functional.add_self_loop(g, etype=c_etype)
def __call__(self, g):
for c_etype in g.canonical_etypes:
g = self.transform_etype(c_etype, g)
if self.new_etypes:
device = g.device
idtype = g.idtype
data_dict = dict()
# Add self etypes
for ntype in g.ntypes:
nids = F.arange(0, g.num_nodes(ntype), idtype, device)
data_dict[(ntype, 'self', ntype)] = (nids, nids)
# Copy edges
for c_etype in g.canonical_etypes:
data_dict[c_etype] = g.edges(etype=c_etype)
g = update_graph_structure(g, data_dict)
return g
class RemoveSelfLoop(BaseTransform):
r"""
Description
-----------
Remove self-loops for each node in the graph and return a new graph.
For heterogeneous graphs, this operation only applies to edge types with same
source and destination node types.
Example
-------
>>> import dgl
>>> from dgl import RemoveSelfLoop
Case1: Remove self-loops for a homogeneous graph
>>> transform = RemoveSelfLoop()
>>> g = dgl.graph(([1, 1], [1, 2]))
>>> new_g = transform(g)
>>> print(new_g.edges())
(tensor([1]), tensor([2]))
Case2: Remove self-loops for a heterogeneous graph
>>> g = dgl.heterograph({
... ('user', 'plays', 'game'): ([0, 1], [1, 1]),
... ('user', 'follows', 'user'): ([1, 2], [2, 2])
... })
>>> new_g = transform(g)
>>> print(new_g.edges(etype='plays'))
(tensor([0, 1]), tensor([1, 1]))
>>> print(new_g.edges(etype='follows'))
(tensor([1]), tensor([2]))
"""
def transform_etype(self, c_etype, g):
r"""
Description
-----------
Transform the graph corresponding to a canonical edge type.
Parameters
----------
c_etype : tuple of str
A canonical edge type.
g : DGLGraph
The graph.
Returns
-------
DGLGraph
The transformed graph.
"""
utype, _, vtype = c_etype
if utype == vtype:
g = functional.remove_self_loop(g, etype=c_etype)
return g
def __call__(self, g):
for c_etype in g.canonical_etypes:
g = self.transform_etype(c_etype, g)
return g
class AddReverse(BaseTransform):
r"""
Description
-----------
Add a reverse edge :math:`(i,j)` for each edge :math:`(j,i)` in the input graph and
return a new graph.
For a heterogeneous graph, it adds a "reverse" edge type for each edge type
to hold the reverse edges. For example, for a canonical edge type ('A', 'r', 'B'),
it adds a canonical edge type ('B', 'rev_r', 'A').
Parameters
----------
copy_edata : bool, optional
If True, the features of the reverse edges will be identical to the original ones.
sym_new_etype : bool, optional
If False, it will not add a reverse edge type if the source and destination node type
in a canonical edge type are identical. Instead, it will directly add edges to the
original edge type.
Example
-------
The following example uses PyTorch backend.
>>> import dgl
>>> import torch
>>> from dgl import AddReverse
Case1: Add reverse edges for a homogeneous graph
>>> transform = AddReverse()
>>> g = dgl.graph(([0], [1]))
>>> g.edata['w'] = torch.ones(1, 2)
>>> new_g = transform(g)
>>> print(new_g.edges())
(tensor([0, 1]), tensor([1, 0]))
>>> print(new_g.edata['w'])
tensor([[1., 1.],
[0., 0.]])
Case2: Add reverse edges for a homogeneous graph and copy edata
>>> transform = AddReverse(copy_edata=True)
>>> new_g = transform(g)
>>> print(new_g.edata['w'])
tensor([[1., 1.],
[1., 1.]])
Case3: Add reverse edges for a heterogeneous graph
>>> g = dgl.heterograph({
... ('user', 'plays', 'game'): ([0, 1], [1, 1]),
... ('user', 'follows', 'user'): ([1, 2], [2, 2])
... })
>>> new_g = transform(g)
>>> print(new_g.canonical_etypes)
[('game', 'rev_plays', 'user'), ('user', 'follows', 'user'), ('user', 'plays', 'game')]
>>> print(new_g.edges(etype='rev_plays'))
(tensor([1, 1]), tensor([0, 1]))
>>> print(new_g.edges(etype='follows'))
(tensor([1, 2, 2, 2]), tensor([2, 2, 1, 2]))
"""
def __init__(self, copy_edata=False, sym_new_etype=False):
self.copy_edata = copy_edata
self.sym_new_etype = sym_new_etype
def transform_symmetric_etype(self, c_etype, g, data_dict):
r"""
Description
-----------
Transform the graph corresponding to a symmetric canonical edge type.
Parameters
----------
c_etype : tuple of str
A canonical edge type.
g : DGLGraph
The graph.
data_dict : dict
The edge data to update.
"""
if self.sym_new_etype:
self.transform_asymmetric_etype(c_etype, g, data_dict)
else:
src, dst = g.edges(etype=c_etype)
src, dst = F.cat([src, dst], dim=0), F.cat([dst, src], dim=0)
data_dict[c_etype] = (src, dst)
def transform_asymmetric_etype(self, c_etype, g, data_dict):
r"""
Description
-----------
Transform the graph corresponding to an asymmetric canonical edge type.
Parameters
----------
c_etype : tuple of str
A canonical edge type.
g : DGLGraph
The graph.
data_dict : dict
The edge data to update.
"""
utype, etype, vtype = c_etype
src, dst = g.edges(etype=c_etype)
data_dict.update({
c_etype: (src, dst),
(vtype, 'rev_{}'.format(etype), utype): (dst, src)
})
def transform_etype(self, c_etype, g, data_dict):
r"""
Description
-----------
Transform the graph corresponding to a canonical edge type.
Parameters
----------
c_etype : tuple of str
A canonical edge type.
g : DGLGraph
The graph.
data_dict : dict
The edge data to update.
"""
utype, _, vtype = c_etype
if utype == vtype:
self.transform_symmetric_etype(c_etype, g, data_dict)
else:
self.transform_asymmetric_etype(c_etype, g, data_dict)
def __call__(self, g):
data_dict = dict()
for c_etype in g.canonical_etypes:
self.transform_etype(c_etype, g, data_dict)
new_g = update_graph_structure(g, data_dict, copy_edata=False)
# Copy and expand edata
for c_etype in g.canonical_etypes:
utype, etype, vtype = c_etype
if utype != vtype or self.sym_new_etype:
rev_c_etype = (vtype, 'rev_{}'.format(etype), utype)
for key, feat in g.edges[c_etype].data.items():
new_g.edges[c_etype].data[key] = feat
if self.copy_edata:
new_g.edges[rev_c_etype].data[key] = feat
else:
for key, feat in g.edges[c_etype].data.items():
new_feat = feat if self.copy_edata else F.zeros(
F.shape(feat), F.dtype(feat), F.context(feat))
new_g.edges[c_etype].data[key] = F.cat([feat, new_feat], dim=0)
return new_g
class ToSimple(BaseTransform):
r"""
Description
-----------
Convert a graph to a simple graph without parallel edges and return a new graph.
Parameters
----------
return_counts : str, optional
The edge feature name to hold the edge count in the original graph.
aggregator : str, optional
The way to coalesce features of duplicate edges.
* ``'arbitrary'``: select arbitrarily from one of the duplicate edges
* ``'sum'``: take the sum over the duplicate edges
* ``'mean'``: take the mean over the duplicate edges
Example
-------
The following example uses PyTorch backend.
>>> import dgl
>>> import torch
>>> from dgl import ToSimple
Case1: Convert a homogeneous graph to a simple graph
>>> transform = ToSimple()
>>> g = dgl.graph(([0, 1, 1], [1, 2, 2]))
>>> g.edata['w'] = torch.tensor([[0.1], [0.2], [0.3]])
>>> sg = transform(g)
>>> print(sg.edges())
(tensor([0, 1]), tensor([1, 2]))
>>> print(sg.edata['count'])
tensor([1, 2])
>>> print(sg.edata['w'])
tensor([[0.1000], [0.2000]])
Case2: Convert a heterogeneous graph to a simple graph
>>> g = dgl.heterograph({
... ('user', 'follows', 'user'): ([0, 1, 1], [1, 2, 2]),
... ('user', 'plays', 'game'): ([0, 1, 0], [1, 1, 1])
... })
>>> sg = transform(g)
>>> print(sg.edges(etype='follows'))
(tensor([0, 1]), tensor([1, 2]))
>>> print(sg.edges(etype='plays'))
(tensor([0, 1]), tensor([1, 1]))
"""
def __init__(self, return_counts='count', aggregator='arbitrary'):
self.return_counts = return_counts
self.aggregator = aggregator
def __call__(self, g):
return functional.to_simple(g,
return_counts=self.return_counts,
copy_edata=True,
aggregator=self.aggregator)
class LineGraph(BaseTransform):
r"""
Description
-----------
Return the line graph of the input graph.
The line graph :math:`L(G)` of a given graph :math:`G` is a graph where
the nodes in :math:`L(G)` correspond to the edges in :math:`G`. For a pair
of edges :math:`(u, v)` and :math:`(v, w)` in :math:`G`, there will be an
edge from the node corresponding to :math:`(u, v)` to the node corresponding to
:math:`(v, w)` in :math:`L(G)`.
This module only works for homogeneous graphs.
Parameters
----------
backtracking : bool, optional
If False, there will be an edge from the line graph node corresponding to
:math:`(u, v)` to the line graph node corresponding to :math:`(v, u)`.
Example
-------
The following example uses PyTorch backend.
>>> import dgl
>>> import torch
>>> from dgl import LineGraph
Case1: Backtracking is True
>>> transform = LineGraph()
>>> g = dgl.graph(([0, 1, 1], [1, 0, 2]))
>>> g.ndata['h'] = torch.tensor([[0.], [1.], [2.]])
>>> g.edata['w'] = torch.tensor([[0.], [0.1], [0.2]])
>>> new_g = transform(g)
>>> print(new_g)
Graph(num_nodes=3, num_edges=3,
ndata_schemes={'w': Scheme(shape=(1,), dtype=torch.float32)}
edata_schemes={})
>>> print(new_g.edges())
(tensor([0, 0, 1]), tensor([1, 2, 0]))
Case2: Backtracking is False
>>> transform = LineGraph(backtracking=False)
>>> new_g = transform(g)
>>> print(new_g.edges())
(tensor([0]), tensor([2]))
"""
def __init__(self, backtracking=True):
self.backtracking = backtracking
def __call__(self, g):
return functional.line_graph(g, backtracking=self.backtracking, shared=True)
class KHopGraph(BaseTransform):
r"""
Description
-----------
Return the graph whose edges connect the :math:`k`-hop neighbors of the original graph.
This module only works for homogeneous graphs.
Parameters
----------
k : int
The number of hops.
Example
-------
>>> import dgl
>>> from dgl import KHopGraph
>>> transform = KHopGraph(2)
>>> g = dgl.graph(([0, 1], [1, 2]))
>>> new_g = transform(g)
>>> print(new_g.edges())
(tensor([0]), tensor([2]))
"""
def __init__(self, k):
self.k = k
def __call__(self, g):
return functional.khop_graph(g, self.k)
class AddMetaPaths(BaseTransform):
r"""
Description
-----------
Add new edges to an input graph based on given metapaths, as described in
`Heterogeneous Graph Attention Network <https://arxiv.org/abs/1903.07293>`__. Formally,
a metapath is a path of the form
.. math::
\mathcal{V}_1 \xrightarrow{R_1} \mathcal{V}_2 \xrightarrow{R_2} \ldots
\xrightarrow{R_{\ell-1}} \mathcal{V}_{\ell}
in which :math:`\mathcal{V}_i` represents a node type and :math:`\xrightarrow{R_j}`
represents a relation type connecting its two adjacent node types. The adjacency matrix
corresponding to the metapath is obtained by sequential multiplication of adjacency matrices
along the metapath.
Parameters
----------
metapaths : dict[str, list]
The metapaths to add, mapping a metapath name to a metapath. For example,
:attr:`{'co-author': [('person', 'author', 'paper'), ('paper', 'authored by', 'person')]}`
keep_orig_edges : bool, optional
If True, it will keep the edges of the original graph. Otherwise, it will drop them.
Example
-------
>>> import dgl
>>> from dgl import AddMetaPaths
>>> transform = AddMetaPaths({
... 'accepted': [('person', 'author', 'paper'), ('paper', 'accepted', 'venue')],
... 'rejected': [('person', 'author', 'paper'), ('paper', 'rejected', 'venue')]
... })
>>> g = dgl.heterograph({
... ('person', 'author', 'paper'): ([0, 0, 1], [1, 2, 2]),
... ('paper', 'accepted', 'venue'): ([1], [0]),
... ('paper', 'rejected', 'venue'): ([2], [1])
... })
>>> new_g = transform(g)
>>> print(new_g.edges(etype=('person', 'accepted', 'venue')))
(tensor([0]), tensor([0]))
>>> print(new_g.edges(etype=('person', 'rejected', 'venue')))
(tensor([0, 1]), tensor([1, 1]))
"""
def __init__(self, metapaths, keep_orig_edges=True):
self.metapaths = metapaths
self.keep_orig_edges = keep_orig_edges
def __call__(self, g):
data_dict = dict()
for meta_etype, metapath in self.metapaths.items():
meta_g = functional.metapath_reachable_graph(g, metapath)
u_type = metapath[0][0]
v_type = metapath[-1][-1]
data_dict[(u_type, meta_etype, v_type)] = meta_g.edges()
if self.keep_orig_edges:
for c_etype in g.canonical_etypes:
data_dict[c_etype] = g.edges(etype=c_etype)
new_g = update_graph_structure(g, data_dict, copy_edata=True)
else:
new_g = update_graph_structure(g, data_dict, copy_edata=False)
return new_g
class Compose(BaseTransform):
r"""
Description
-----------
Create a transform composed of multiple transforms in sequence.
Parameters
----------
transforms : list of Callable
A list of transform objects to apply in order. A transform object should inherit
:class:`~dgl.BaseTransform` and implement :func:`~dgl.BaseTransform.__call__`.
Example
-------
>>> import dgl
>>> from dgl import transform as T
>>> g = dgl.graph(([0, 0], [1, 1]))
>>> transform = T.Compose([T.ToSimple(), T.AddReverse()])
>>> new_g = transform(g)
>>> print(new_g.edges())
(tensor([0, 1]), tensor([1, 0]))
"""
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, g):
for transform in self.transforms:
g = transform(g)
return g
def __repr__(self):
args = [' ' + str(transform) for transform in self.transforms]
return self.__class__.__name__ + '([\n' + ',\n'.join(args) + '\n])'
| 29.933333 | 98 | 0.567434 |
7955aa361efefff1c23a81745469e3c48065ad8f | 634 | py | Python | examples/batch_restoreAr.py | jinji-hanbin/python-sdk | db11d919e0db433d6f12507a58a149191d7ccd35 | [
"MIT"
] | 494 | 2015-01-08T09:56:30.000Z | 2022-03-28T03:15:22.000Z | examples/batch_restoreAr.py | jinji-hanbin/python-sdk | db11d919e0db433d6f12507a58a149191d7ccd35 | [
"MIT"
] | 189 | 2015-01-26T06:46:19.000Z | 2022-03-28T04:06:16.000Z | examples/batch_restoreAr.py | jinji-hanbin/python-sdk | db11d919e0db433d6f12507a58a149191d7ccd35 | [
"MIT"
] | 306 | 2015-01-24T05:56:05.000Z | 2021-12-21T12:15:57.000Z | # -*- coding: utf-8 -*-
# flake8: noqa
"""
批量解冻文件
https://developer.qiniu.com/kodo/api/1250/batch
"""
from qiniu import build_batch_restoreAr, Auth, BucketManager
# 七牛账号的公钥和私钥
access_key = '<access_key>'
secret_key = '<secret_key>'
q = Auth(access_key, secret_key)
bucket = BucketManager(q)
# 存储空间
bucket_name = "空间名"
# 字典的键为需要解冻的文件,值为解冻有效期1-7
ops = build_batch_restoreAr(bucket_name,
{"test00.png": 1,
"test01.jpeg": 2,
"test02.mp4": 3
}
)
ret, info = bucket.batch(ops)
print(info)
| 19.8125 | 60 | 0.547319 |
7955aa9dac23fe21c34488ddd6f9a591636d8fb9 | 234 | py | Python | Algorithms/365.water-and-jug-problem/water-and-jug-problem.py | OctopusLian/leetcode-solutions | 40920d11c584504e805d103cdc6ef3f3774172b3 | [
"MIT"
] | 1 | 2020-12-01T18:35:24.000Z | 2020-12-01T18:35:24.000Z | Algorithms/365.water-and-jug-problem/water-and-jug-problem.py | OctopusLian/leetcode-solutions | 40920d11c584504e805d103cdc6ef3f3774172b3 | [
"MIT"
] | 18 | 2020-11-10T05:48:29.000Z | 2020-11-26T08:39:20.000Z | Algorithms/365.water-and-jug-problem/water-and-jug-problem.py | OctopusLian/leetcode-solutions | 40920d11c584504e805d103cdc6ef3f3774172b3 | [
"MIT"
] | 5 | 2020-11-09T07:43:00.000Z | 2021-12-02T14:59:37.000Z | class Solution:
def canMeasureWater(self, x: int, y: int, z: int) -> bool:
if x + y < z:
return False
if x == 0 or y == 0:
return z == 0 or x + y == z
return z % math.gcd(x, y) == 0
| 29.25 | 62 | 0.448718 |
7955aaf1802af296d7436c7f6b2a7074d06835f1 | 9,199 | py | Python | filters/tomopy_misc_phantom.py | voxie-viewer/voxie | d2b5e6760519782e9ef2e51f5322a3baa0cb1198 | [
"MIT"
] | 4 | 2016-06-03T18:41:43.000Z | 2020-04-17T20:28:58.000Z | filters/tomopy_misc_phantom.py | voxie-viewer/voxie | d2b5e6760519782e9ef2e51f5322a3baa0cb1198 | [
"MIT"
] | null | null | null | filters/tomopy_misc_phantom.py | voxie-viewer/voxie | d2b5e6760519782e9ef2e51f5322a3baa0cb1198 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file has been modified to remove some dependencies and functionality
# #########################################################################
# Copyright (c) 2015-2019, UChicago Argonne, LLC. All rights reserved. #
# #
# Copyright 2015-2019. UChicago Argonne, LLC. This software was produced #
# under U.S. Government contract DE-AC02-06CH11357 for Argonne National #
# Laboratory (ANL), which is operated by UChicago Argonne, LLC for the #
# U.S. Department of Energy. The U.S. Government has rights to use, #
# reproduce, and distribute this software. NEITHER THE GOVERNMENT NOR #
# UChicago Argonne, LLC MAKES ANY WARRANTY, EXPRESS OR IMPLIED, OR #
# ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE. If software is #
# modified to produce derivative works, such modified software should #
# be clearly marked, so as not to confuse it with the version available #
# from ANL. #
# #
# Additionally, redistribution and use in source and binary forms, with #
# or without modification, are permitted provided that the following #
# conditions are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of UChicago Argonne, LLC, Argonne National #
# Laboratory, ANL, the U.S. Government, nor the names of its #
# contributors may be used to endorse or promote products derived #
# from this software without specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY UChicago Argonne, LLC AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UChicago #
# Argonne, LLC OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, #
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, #
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; #
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER #
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT #
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN #
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
# #########################################################################
"""
Module for generating synthetic phantoms.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
__author__ = "Doga Gursoy"
__copyright__ = "Copyright (c) 2015, UChicago Argonne, LLC."
__docformat__ = 'restructuredtext en'
__all__ = ['shepp3d',
'phantom']
def _totuple(size, dim):
"""
Converts size to tuple.
"""
if not isinstance(size, tuple):
if dim == 2:
size = (size, size)
elif dim == 3:
size = (size, size, size)
return size
def shepp3d(size=128, dtype='float32'):
"""
Load 3D Shepp-Logan image array.
Parameters
----------
size : int or tuple, optional
Size of the 3D data.
dtype : str, optional
The desired data-type for the array.
Returns
-------
ndarray
Output 3D test image.
"""
size = _totuple(size, 3)
shepp_params = _array_to_params(_get_shepp_array())
return phantom(size, shepp_params, dtype).clip(0, np.inf)
def phantom(size, params, dtype='float32'):
"""
Generate a cube of given size using a list of ellipsoid parameters.
Parameters
----------
size: tuple of int
Size of the output cube.
params: list of dict
List of dictionaries with the parameters defining the ellipsoids
to include in the cube.
dtype: str, optional
Data type of the output ndarray.
Returns
-------
ndarray
3D object filled with the specified ellipsoids.
"""
# instantiate ndarray cube
obj = np.zeros(size, dtype=dtype)
# define coords
coords = _define_coords(size)
# recursively add ellipsoids to cube
for param in params:
_ellipsoid(param, out=obj, coords=coords)
return obj
def _ellipsoid(params, shape=None, out=None, coords=None):
"""
Generate a cube containing an ellipsoid defined by its parameters.
If out is given, fills the given cube instead of creating a new one.
"""
# handle inputs
if shape is None and out is None:
raise ValueError("You need to set shape or out")
if out is None:
out = np.zeros(shape)
if shape is None:
shape = out.shape
if len(shape) == 1:
shape = shape, shape, shape
elif len(shape) == 2:
shape = shape[0], shape[1], 1
elif len(shape) > 3:
raise ValueError("input shape must be lower or equal to 3")
if coords is None:
coords = _define_coords(shape)
# rotate coords
coords = _transform(coords, params)
# recast as ndarray
coords = np.asarray(coords)
np.square(coords, out=coords)
ellip_mask = coords.sum(axis=0) <= 1.
ellip_mask.resize(shape)
# fill ellipsoid with value
out[ ellip_mask ] += params['A']
return out
def _rotation_matrix(p):
"""
Defines an Euler rotation matrix from angles phi, theta and psi.
"""
cphi = np.cos(np.radians(p['phi']))
sphi = np.sin(np.radians(p['phi']))
ctheta = np.cos(np.radians(p['theta']))
stheta = np.sin(np.radians(p['theta']))
cpsi = np.cos(np.radians(p['psi']))
spsi = np.sin(np.radians(p['psi']))
alpha = [[cpsi * cphi - ctheta * sphi * spsi,
cpsi * sphi + ctheta * cphi * spsi,
spsi * stheta],
[-spsi * cphi - ctheta * sphi * cpsi,
-spsi * sphi + ctheta * cphi * cpsi,
cpsi * stheta],
[stheta * sphi,
-stheta * cphi,
ctheta]]
return np.asarray(alpha)
def _define_coords(shape):
"""
Generate a tuple of coords in 3D with a given shape.
"""
mgrid = np.lib.index_tricks.nd_grid()
cshape = np.asarray(1j) * shape
x, y, z = mgrid[-1:1:cshape[0], -1:1:cshape[1], -1:1:cshape[2]]
return x, y, z
def _transform(coords, p):
"""
Apply rotation, translation and rescaling to a 3-tuple of coords.
"""
alpha = _rotation_matrix(p)
out_coords = np.tensordot(alpha, coords, axes=1)
_shape = (3,) + (1,) * ( out_coords.ndim - 1 )
_dt = out_coords.dtype
M0 = np.array([p['x0'], p['y0'], p['z0']], dtype=_dt).reshape(_shape)
sc = np.array([p['a'], p['b'], p['c']], dtype=_dt).reshape(_shape)
out_coords -= M0
out_coords /= sc
return out_coords
def _get_shepp_array():
"""
Returns the parameters for generating modified Shepp-Logan phantom.
"""
shepp_array = [
[1., .6900, .920, .810, 0., 0., 0., 90., 90., 90.],
[-.8, .6624, .874, .780, 0., -.0184, 0., 90., 90., 90.],
[-.2, .1100, .310, .220, .22, 0., 0., -108., 90., 100.],
[-.2, .1600, .410, .280, -.22, 0., 0., 108., 90., 100.],
[.1, .2100, .250, .410, 0., .35, -.15, 90., 90., 90.],
[.1, .0460, .046, .050, 0., .1, .25, 90., 90., 90.],
[.1, .0460, .046, .050, 0., -.1, .25, 90., 90., 90.],
[.1, .0460, .023, .050, -.08, -.605, 0., 90., 90., 90.],
[.1, .0230, .023, .020, 0., -.606, 0., 90., 90., 90.],
[.1, .0230, .046, .020, .06, -.605, 0., 90., 90., 90.]]
return shepp_array
def _array_to_params(array):
"""
Converts list to a dictionary.
"""
# mandatory parameters to define an ellipsoid
params_tuple = [
'A',
'a', 'b', 'c',
'x0', 'y0', 'z0',
'phi', 'theta', 'psi']
array = np.asarray(array)
out = []
for i in range(array.shape[0]):
tmp = dict()
for k, j in zip(params_tuple, list(range(array.shape[1]))):
tmp[k] = array[i, j]
out.append(tmp)
return out
| 36.796 | 75 | 0.544842 |
7955abf9168a5a82755f715ca1ac4f93ae816863 | 598 | py | Python | biometrics/utils.py | msk-access/biometrics | 9af070295c05d65d539ab3aeeb93287d10d097bc | [
"Apache-2.0"
] | 1 | 2021-01-14T16:48:37.000Z | 2021-01-14T16:48:37.000Z | biometrics/utils.py | msk-access/biometrics | 9af070295c05d65d539ab3aeeb93287d10d097bc | [
"Apache-2.0"
] | 29 | 2019-12-06T16:10:47.000Z | 2021-06-10T20:01:06.000Z | biometrics/utils.py | msk-access/biometrics | 9af070295c05d65d539ab3aeeb93287d10d097bc | [
"Apache-2.0"
] | null | null | null | import logging
def get_logger(debug=False):
FORMAT = '%(levelname)s - %(asctime)-15s: %(message)s'
logging.basicConfig(format=FORMAT)
logger = logging.getLogger("biometrics")
if debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
return logger
def standardize_sex_nomenclature(val):
if val is None:
return None
# Potential inputs
female = ['female', 'f', 'Female', 'F']
male = ['Male', 'M', 'male', 'm']
if val in female:
return 'F'
elif val in male:
return 'M'
return None
| 18.6875 | 58 | 0.602007 |
7955ac576d4c10ad7ea6f8cfcedbc8a51ce73e68 | 2,793 | py | Python | Python web scraper.py | VarshaChanaliya/Python-web-crawler | 5f858acf53acd9ee4f76462e410b98c7838daedd | [
"Apache-2.0"
] | null | null | null | Python web scraper.py | VarshaChanaliya/Python-web-crawler | 5f858acf53acd9ee4f76462e410b98c7838daedd | [
"Apache-2.0"
] | null | null | null | Python web scraper.py | VarshaChanaliya/Python-web-crawler | 5f858acf53acd9ee4f76462e410b98c7838daedd | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# # Importing Required libraries
# In[62]:
import requests
import bs4
import articleDateExtractor
from newspaper import Article
import pandas as pd
# # Creating generalized code for News analysis
# In[129]:
class news_analysis:
def __init__(self,keyword):
self.keyword = keyword
self.url = "https://www.google.com/search?q={0}&source=lnms&tbm=nws".format(self.keyword)
def run(self): # <run> function returns the list of links associated with the keywords from google search engine
page = "&page="
count = 1
pagesToScrape = 4
list_links=[]
while count <= pagesToScrape:
scrapeURL = self.url + page + str(count)
req = requests.get(scrapeURL)
soup = bs4.BeautifulSoup(req.text, 'lxml')
links = soup.find_all('a')
for link in links:
l = link['href']
if l[0:4]=='/url':
list_links.append(l[7:])
count+=1
return list_links
def parse_href(self,list_links): #<parse_href> function parses the <href> component only from the links
href = []
for link in links:
if ".html" in link:
h = link.split('&')[0]
href.append(h)
return href
def process_articles(self,list_of_links): #<process_articles> funtion extracts the useful content from the links related to the keywords
Title = []
Text = []
Summary = []
for link in list_of_links:
url = link
article = Article(url, language = 'en')
article.download()
article.parse()
article.nlp()
title = article.title
text = article.text
summary = article.summary
Title.append(title)
Text.append(text)
Summary.append(summary)
return Title, Text, Summary
def get_date(self,list_of_links): #<get_date> function extracts the publishing date of the news articles
dates = []
for link in list_of_links:
date = articleDateExtractor.extractArticlePublishedDate(link)
dates.append(date)
return dates
# In[99]:
news1 = news_analysis('Haryana Cabinet Approves Delhi-Gurugram-SNB RRTS Corridor')
links = news1.run()
valid_links = news1.parse_href(links)
Headline, Article, News_summary = news1.process_articles(valid_links)
dates = news1.get_date(valid_links)
news_data = pd.DataFrame({'Headlines':Headline, 'News Items': News_summary, 'News Aricle': Article, 'date-time': dates})
output = news_data.to_excel('News Data.xlsx')
| 31.033333 | 142 | 0.590047 |
7955ace2b87278ff222d39a9f1052af54d2479f0 | 8,543 | py | Python | insights/combiners/tests/test_netstat.py | mglantz/insights-core | 6f20bbbe03f53ee786f483b2a28d256ff1ad0fd4 | [
"Apache-2.0"
] | 121 | 2017-05-30T20:23:25.000Z | 2022-03-23T12:52:15.000Z | insights/combiners/tests/test_netstat.py | mglantz/insights-core | 6f20bbbe03f53ee786f483b2a28d256ff1ad0fd4 | [
"Apache-2.0"
] | 1,977 | 2017-05-26T14:36:03.000Z | 2022-03-31T10:38:53.000Z | insights/combiners/tests/test_netstat.py | mglantz/insights-core | 6f20bbbe03f53ee786f483b2a28d256ff1ad0fd4 | [
"Apache-2.0"
] | 244 | 2017-05-30T20:22:57.000Z | 2022-03-26T10:09:39.000Z | from ...parsers.ip import IpLinkInfo
from ...parsers.netstat import Netstat_I
from insights.util import keys_in
from ..netstat import NetworkStats
from ...tests import context_wrap
NETSTAT_I = """
Kernel Interface table
Iface MTU Met RX-OK RX-ERR RX-DRP RX-OVR TX-OK TX-ERR TX-DRP TX-OVR Flg
bond0 1500 0 845265 0 0 0 1753 0 0 0 BMmRU
bond1 1500 0 842447 0 0 0 4233 0 0 0 BMmRU
eth0 1500 0 422518 0 0 0 1703 0 0 0 BMsRU
eth1 1500 0 422747 0 0 0 50 0 0 0 BMsRU
eth2 1500 0 421192 0 0 0 3674 0 0 0 BMsRU
eth3 1500 0 421255 0 0 0 559 0 0 0 BMsRU
lo 65536 0 0 0 0 0 0 0 0 0 LRU
""".strip()
IP_S_LINK = """
1: enp0s3: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc pfifo_fast state UP mode DEFAULT qlen 1000
link/ether 08:00:27:4a:c5:ef brd ff:ff:ff:ff:ff:ff
RX: bytes packets errors dropped overrun mcast
1113685 2244 0 0 0 0
TX: bytes packets errors dropped carrier collsns
550754 1407 0 0 0 0
2: lo: <LOOPBACK,UP,LOWER_UP> mtu 65536 qdisc noqueue state UNKNOWN mode DEFAULT
link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00
RX: bytes packets errors dropped overrun mcast
884 98 0 0 0 0
TX: bytes packets errors dropped carrier collsns
884 100 0 0 0 0
3: enp0s8: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc pfifo_fast state UP mode DEFAULT qlen 1000
link/ether 08:00:27:db:86:9e brd ff:ff:ff:ff:ff:ff
RX: bytes packets errors dropped overrun mcast
0 6 0 0 0 0
TX: bytes packets errors dropped carrier collsns
0 4 0 0 0 0
4: enp0s9: <BROADCAST,UP,MULTICAST> mtu 1500 qdisc pfifo_fast state UP mode DEFAULT qlen 1000
link/ether 08:00:27:a6:bd:65 brd ff:ff:ff:ff:ff:ff
RX: bytes packets errors dropped overrun mcast
0 8 0 0 0 0
TX: bytes packets errors dropped carrier collsns
0 12 0 0 0 0
""".strip()
IP_S_LINK_2 = """
1: lo: <LOOPBACK,UP,LOWER_UP> mtu 65536 qdisc noqueue state UNKNOWN mode DEFAULT group default qlen 1000
link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00
RX: bytes packets errors dropped overrun mcast
1736 20 0 0 0 0
TX: bytes packets errors dropped carrier collsns
1736 20 0 0 0 0
2: enp0s31f6: <NO-CARRIER,BROADCAST,MULTICAST,UP> mtu 1500 qdisc fq_codel state DOWN mode DEFAULT group default qlen 1000
link/ether c8:5b:76:3f:14:d5 brd ff:ff:ff:ff:ff:ff
RX: bytes packets errors dropped overrun mcast
0 0 0 0 0 0
TX: bytes packets errors dropped carrier collsns
0 0 0 0 0 0
3: wlp4s0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP mode DORMANT group default qlen 1000
link/ether e4:a4:71:ae:70:b3 brd ff:ff:ff:ff:ff:ff
RX: bytes packets errors dropped overrun mcast
96421231 90178 0 0 0 0
TX: bytes packets errors dropped carrier collsns
7341914 51363 0 0 0 0
4: virbr0: <NO-CARRIER,BROADCAST,MULTICAST,UP> mtu 1500 qdisc noqueue state DOWN mode DEFAULT group default qlen 1000
link/ether 52:54:00:97:c0:bf brd ff:ff:ff:ff:ff:ff
RX: bytes packets errors dropped overrun mcast
0 0 0 0 0 0
TX: bytes packets errors dropped carrier collsns
0 0 0 0 0 0
5: virbr0-nic: <BROADCAST,MULTICAST> mtu 1500 qdisc fq_codel master virbr0 state DOWN mode DEFAULT group default qlen 1000
link/ether 52:54:00:97:c0:bf brd ff:ff:ff:ff:ff:ff
RX: bytes packets errors dropped overrun mcast
0 0 0 0 0 0
TX: bytes packets errors dropped carrier collsns
0 0 0 0 0 0
6: tun0: <POINTOPOINT,MULTICAST,NOARP,UP,LOWER_UP> mtu 1360 qdisc fq_codel state UNKNOWN mode DEFAULT group default qlen 100
link/none
RX: bytes packets errors dropped overrun mcast
14991388 21827 0 0 0 0
TX: bytes packets errors dropped carrier collsns
1448430 22760 0 0 0 0
""".strip()
NETSTAT_I_2 = """
Kernel Interface table
Iface MTU RX-OK RX-ERR RX-DRP RX-OVR TX-OK TX-ERR TX-DRP TX-OVR Flg
enp0s31f 1500 0 0 0 0 0 0 0 0 BMU
lo 65536 20 0 0 0 20 0 0 0 LRU
tun0 1360 21898 0 0 0 22839 0 0 0 MOPRU
virbr0 1500 0 0 0 0 0 0 0 0 BMU
wlp4s0 1500 90309 0 0 0 51481 0 0 0 BMRU
""".strip()
NETSTAT_I_NO = """
""".strip()
IP_S_LINK_NO = """
""".strip()
def test_ip_data_Link():
link_info = IpLinkInfo(context_wrap(IP_S_LINK))
if_list = link_info.active
assert len(if_list) == 4
assert keys_in(["lo", "enp0s3", "enp0s8", "enp0s9"], if_list)
assert sorted(link_info.active) == sorted(['lo', 'enp0s3', 'enp0s8', 'enp0s9'])
lo = link_info["lo"]
assert lo["mac"] == "00:00:00:00:00:00"
assert lo["flags"] == ["LOOPBACK", "UP", "LOWER_UP"]
assert lo["type"] == "loopback"
assert lo["mtu"] == 65536
assert lo["rx_packets"] == 98
assert lo["tx_packets"] == 100
assert lo["index"] == 2
enp0s3 = link_info["enp0s3"]
assert enp0s3["mac"] == "08:00:27:4a:c5:ef"
assert enp0s3["flags"] == ["BROADCAST", "MULTICAST", "UP", "LOWER_UP"]
assert enp0s3["type"] == "ether"
assert enp0s3["mtu"] == 1500
assert enp0s3["rx_packets"] == 2244
assert enp0s3["tx_packets"] == 1407
assert enp0s3["index"] == 1
def test_get_netstat_i():
netstat = Netstat_I(context_wrap(NETSTAT_I))
nstat = NetworkStats(netstat, None)
result = nstat.group_by_iface
assert len(result) == 7
assert result["bond0"] == {
"MTU": "1500", "Met": "0", "RX-OK": "845265", "RX-ERR": "0",
"RX-DRP": "0", "RX-OVR": "0", "TX-OK": "1753", "TX-ERR": "0",
"TX-DRP": "0", "TX-OVR": "0", "Flg": "BMmRU"
}
assert result["eth0"] == {
"MTU": "1500", "Met": "0", "RX-OK": "422518", "RX-ERR": "0",
"RX-DRP": "0", "RX-OVR": "0", "TX-OK": "1703", "TX-ERR": "0",
"TX-DRP": "0", "TX-OVR": "0", "Flg": "BMsRU"
}
def test_combined():
context = context_wrap(NETSTAT_I)
nstat = Netstat_I(context)
networkstats = NetworkStats(nstat, None)
assert networkstats.data[0]["Iface"] == "bond0"
result = networkstats.group_by_iface
assert len(result) == 7
assert result["bond0"] == {
"MTU": "1500", "Met": "0", "RX-OK": "845265", "RX-ERR": "0",
"RX-DRP": "0", "RX-OVR": "0", "TX-OK": "1753", "TX-ERR": "0",
"TX-DRP": "0", "TX-OVR": "0", "Flg": "BMmRU"
}
context = context_wrap(IP_S_LINK)
linkinfo = IpLinkInfo(context)
networkstats = NetworkStats(None, linkinfo)
result = networkstats.group_by_iface
# the order of this structure is a cpython implementation detail
# assert networkstats.data[0]["Iface"] == "lo"
assert len(result) == 4
assert result["lo"] == {
'RX-OK': 98, 'TX-OK': 100, 'MTU': 65536, 'RX-ERR': 0,
'TX-DRP': 0, 'TX-ERR': 0, 'RX-DRP': 0, 'RX-OVR': 0, 'Flg': 'LRU'
}
assert result["enp0s8"] == {
'RX-OK': 6, 'TX-DRP': 0, 'TX-OK': 4, 'MTU': 1500,
'RX-ERR': 0, 'TX-ERR': 0, 'RX-DRP': 0, 'RX-OVR': 0, 'Flg': 'BMRU'
}
context = context_wrap(NETSTAT_I_2)
nstat = Netstat_I(context)
networkstats = NetworkStats(nstat, None)
result = networkstats.group_by_iface
assert len(result) == 5
context = context_wrap(IP_S_LINK_NO)
linkinfo = IpLinkInfo(context)
networkstats = NetworkStats(None, linkinfo)
result = networkstats.group_by_iface
assert len(result) == 0
context = context_wrap(IP_S_LINK_2)
linkinfo = IpLinkInfo(context)
networkstats = NetworkStats(None, linkinfo)
result = networkstats.group_by_iface
assert len(result) == 6
| 44.264249 | 124 | 0.563034 |
7955ad5fa5bdd4cf675b2424ebc582fc08b94b70 | 1,490 | py | Python | dashboard/dashboard/common/bot_configurations.py | Martijnve23/catapult | 5c63b19d221af6a12889e8727acc85d93892cab7 | [
"BSD-3-Clause"
] | 1,894 | 2015-04-17T18:29:53.000Z | 2022-03-28T22:41:06.000Z | dashboard/dashboard/common/bot_configurations.py | Martijnve23/catapult | 5c63b19d221af6a12889e8727acc85d93892cab7 | [
"BSD-3-Clause"
] | 4,640 | 2015-07-08T16:19:08.000Z | 2019-12-02T15:01:27.000Z | dashboard/dashboard/common/bot_configurations.py | Martijnve23/catapult | 5c63b19d221af6a12889e8727acc85d93892cab7 | [
"BSD-3-Clause"
] | 698 | 2015-06-02T19:18:35.000Z | 2022-03-29T16:57:15.000Z | # Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import string
from google.appengine.ext import ndb
from dashboard.common import namespaced_stored_object
BOT_CONFIGURATIONS_KEY = 'bot_configurations'
def Get(name):
configurations = namespaced_stored_object.Get(BOT_CONFIGURATIONS_KEY) or {}
configuration = configurations.get(name)
if configuration is None:
raise ValueError('Bot configuration not found: "%s"' % (name,))
if 'alias' in configuration:
return configurations[configuration['alias']]
return configuration
@ndb.tasklet
def GetAliasesAsync(bot):
aliases = {bot}
configurations = yield namespaced_stored_object.GetAsync(
BOT_CONFIGURATIONS_KEY)
if not configurations or bot not in configurations:
raise ndb.Return(aliases)
if 'alias' in configurations[bot]:
bot = configurations[bot]['alias']
aliases.add(bot)
for name, configuration in configurations.items():
if configuration.get('alias') == bot:
aliases.add(name)
raise ndb.Return(aliases)
def List():
bot_configurations = namespaced_stored_object.Get(BOT_CONFIGURATIONS_KEY)
canonical_names = [
name for name, value in bot_configurations.items() if 'alias' not in value
]
return sorted(canonical_names, key=string.lower)
| 29.8 | 80 | 0.76443 |
7955ad737aace06e1bdc3f5847f7f8c319bb15ae | 5,123 | py | Python | door.py | Ish-Co/Alpha_beta-Windows- | 104157ea7e7636f935423063c1a217cbd29f9191 | [
"MIT"
] | null | null | null | door.py | Ish-Co/Alpha_beta-Windows- | 104157ea7e7636f935423063c1a217cbd29f9191 | [
"MIT"
] | null | null | null | door.py | Ish-Co/Alpha_beta-Windows- | 104157ea7e7636f935423063c1a217cbd29f9191 | [
"MIT"
] | null | null | null | import tkinter as tk
# Creating an instance of the class
root = tk.Tk ()
door = tk.Canvas (root , width=1000 , height=800 , bg='white')
door.pack ()
def create_doorframe(self , x , y , c , door_width=30 , door_offset=1.5 , frame_width=4.75 , opens='rt' , facing='n'):
# OPTION A , B , C , OR D
if (facing == 'n') or (facing == 's'):
y1 = y + (4.75 * c)
x1 = x + (1.65 * c)
y2 = y1 + (0.66 * c)
x2 = x1 - (2.30 * c)
y3 = y1 + (0.37 * c)
y4 = y3 - (5.49 * c)
x3 = x2 + (2.30 * c)
y5 = y4 - (0.29 * c)
# The line below will create the right trim corner #
# [ #
self.create_line (x , y , x , y1 , x1 , y1 , x1 , y2 , x2 , y3 , x2 , y4 , x3 , y5 , x3 , y , x , y)
x -= (door_width + 1.30) * c
x1 = x - (1.65 * c)
x2 = x1 + (2.30 * c)
x3 = x2 - (2.30 * c)
# The line below will create the left trim corner #
# ] #
self.create_line (x , y , x , y1 , x1 , y1 , x1 , y2 , x2 , y3 , x2 , y4 , x3 , y5 , x3 , y , x , y)
x += (door_width + 1.30) * c
x4 = x - (0.65 * c)
x5 = x4 - (door_width * c)
# OPTION A OR B
if (facing == 'n'):
y6 = y + (door_offset * c)
# Create a line between the two corners #
# ]---------------[ #
self.create_line (x4 , y6 , x5 , y6)
x7 = x4 - (door_width * c)
x8 = x4 + (door_width * c)
y7 = y - (door_width * c)
y8 = y + (door_width * c)
# OPTION A
if (opens == 'rt'):
# The line below will create the door arc #
self.create_arc (x7 , y7 , x8 , y8 , start=90 , extent=90)
# OPTION B
else:
x7 -= (door_width * c)
# The line below will create the door arc #
self.create_arc (x4 , y7 , x7 , y8 , start=0 , extent=90)
# OPTION C OR D
else:
y6 = y + ((door_offset + 1.75) * c)
# Create a line between the two corners #
# ]---------------[ #
self.create_line (x4 , y6 , x5 , y6)
y += (frame_width * c)
y7 = y - (door_width * c)
y8 = y + (door_width * c)
x7 = x4 - (door_width * c)
# OPTION C
if (opens == 'rt'):
x7 -= (door_width * c)
# The line below will create the door arc #
self.create_arc (x7 , y7 , x4 , y8 , start=270 , extent=90)
# OPTION D
else:
# The line below will create the door arc #
self.create_arc (x7 , y7 , x4 , y8 , start=180 , extent=90)
else:
print ("function is under constraction!")
def create_window (self , x , y, c , width_a = 31 , width_b = 28, offset = 2.5 , direction = 'n'):
if (direction == 'n') or (direction == 's'):
self.create_rectangle (x , y+ (offset * c), x + ((width_a*c)), y+((offset*c)/2))
self.create_rectangle (x + (width_a*c) , y, x + ((width_a+width_b) * c) , y + ((offset * c) / 2))
else:
self.create_rectangle (y + (offset * c), x , y + ((offset * c) / 2), x + ((width_a * c)))
self.create_rectangle (y , x + (width_a * c) , y + ((offset * c) / 2), x + ((width_a + width_b) * c) )
def create_wall (self, x , y, c , length, width = 4.75, orientation = 'horz'):
if (orientation == 'horz'):
self.create_rectangle(x,y, x + (length*c), y + (width*c), fill = '#000fff000', outline = '#000fff000')
return x + (length*c)
else:
self.create_rectangle (y , x , y + (width * c) , x + (length * c) , fill='#000fff000' , outline='#000fff000')
return y + (length*c)
c = 2
### Master Room
x1 = create_wall(door, 10,10,c, 165, orientation = 'horz')
#print("x : ", x)
y1 = create_wall(door, 10,10,c, 161.5, orientation = 'vert')
#print("x : ", x)
#print("y : ", y)
x2 = create_wall(door, 10,y1,c, 165, orientation = 'horz')
#print("x : ", x1)
y2 = create_wall(door, 10,x1,c, 161.5, orientation = 'vert')
#print("x : ", x1)
#print("y : ", y1)
### Guess Room
#create_wall(door,400,400,c,50)
#create_window(door, 650, 400,5, direction='n')
#create_wall(door,400,946,5,50, direction='e')
# create_doorframe (door , 600 , 400 , 12 , door_width=15 , opens='rt' , facing='n') # TEST CALL FUNCTION TO DRAW OPTION A
# create_doorframe (door , 600 , 400 , 12 , door_width=15 , opens='lt' , facing='n') # TEST CALL FUNCTION TO DRAW OPTION B
# create_doorframe (door , 600 , 400 , 12 , door_width=15 , opens='rt' , facing='s') # TEST CALL FUNCTION TO DRAW OPTION C
# create_doorframe (door , 600 , 400 , 12 , door_width=15 , opens='lt' , facing='s') # TEST CALL FUNCTION TO DRAW OPTION D
root.mainloop()
| 43.786325 | 123 | 0.465352 |
7955ae8ce5a28cf8ab024b26dcf3059bdd7db2a3 | 744 | py | Python | FishCDailyQuestion/ex001-010/Python3_004/004_06.py | YorkFish/git_study | 6e023244daaa22e12b24e632e76a13e5066f2947 | [
"MIT"
] | null | null | null | FishCDailyQuestion/ex001-010/Python3_004/004_06.py | YorkFish/git_study | 6e023244daaa22e12b24e632e76a13e5066f2947 | [
"MIT"
] | null | null | null | FishCDailyQuestion/ex001-010/Python3_004/004_06.py | YorkFish/git_study | 6e023244daaa22e12b24e632e76a13e5066f2947 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# coding:utf-8
# 这里的输入限制格式,也限制了年份,如 20,000 年
date = input("Please enter the date(e.g. 20190101): ")
year_input = int(date[:4]) # 前 4 位数表示年份
month_input = int(date[4:6]) # 5、6 两位表示月份
day_input = int(date[6:]) # 7、8 两位表示日
md_input = int(date[4:]) # 最后 4 位表示月、日
days_lst = [0, 31, 28, 31, 30, 31, \
30, 31, 31, 30, 31, 30, 31] # 平年列表(前面多了个 0)
days_sum = 0 # 总天数
if (year_input%4 == 0 and year_input%100 != 0) or year_input%400 == 0:
if md_input > 228: # 判断输入的日子是否大于 2 月 28
days_sum = 1
else:
days_sum = 0
for i in days_lst[:month_input]: # 平年列表首位为 0,所以 month_input 不必减 1
days_sum += i
days_sum += day_input
print("\nCalculation results:", days_sum)
| 26.571429 | 70 | 0.600806 |
7955aeca0abd865d852dd7ea5f82ce21c2ae4e77 | 694 | py | Python | examples/load_conf/load_conf_demo.py | Jie-Yuan/MeUtils | 2bb191b0d35b809af037c0f65b37570b8828bea3 | [
"Apache-2.0"
] | 3 | 2020-12-03T07:30:02.000Z | 2021-02-07T13:37:33.000Z | examples/load_conf/load_conf_demo.py | Jie-Yuan/MeUtils | 2bb191b0d35b809af037c0f65b37570b8828bea3 | [
"Apache-2.0"
] | null | null | null | examples/load_conf/load_conf_demo.py | Jie-Yuan/MeUtils | 2bb191b0d35b809af037c0f65b37570b8828bea3 | [
"Apache-2.0"
] | 1 | 2021-02-07T13:37:38.000Z | 2021-02-07T13:37:38.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Project : MeUtils.
# @File : load_conf_demo
# @Time : 2021/1/27 8:38 下午
# @Author : yuanjie
# @Email : yuanjie@xiaomi.com
# @Software : PyCharm
# @Description :
from meutils.pipe import *
class Config(BaseConfig):
name: str = ''
age: int = 666
betterme: List[str]=[]
c = Config.parse_zk('/mipush/myconf')
print(Besttable.draw_dict(c.dict()))
# print(Config.parse_yaml('./myconf.yaml').dict())
#
# print(Config.parse_zk('/mipush/bot').betterme)
# print(Config.parse_zk('/mipush/bot').__getattribute__('betterme'))
# os.environ['name'] = '123456789'
# print(Config.parse_env().dict())
| 21.030303 | 68 | 0.619597 |
7955b0463912e0ffada4322f71f346846327047d | 1,079 | py | Python | algorithms/MSM.py | feftio/random-numbers | a84be0957f84e3d70c7a45145cb451974abc9828 | [
"MIT"
] | null | null | null | algorithms/MSM.py | feftio/random-numbers | a84be0957f84e3d70c7a45145cb451974abc9828 | [
"MIT"
] | null | null | null | algorithms/MSM.py | feftio/random-numbers | a84be0957f84e3d70c7a45145cb451974abc9828 | [
"MIT"
] | null | null | null | from utils import digits_up, digits_down
def _MSM(z, n):
sequence = []
for _ in range(n):
(z, counter) = digits_up(z) if (
z > -1 and z < 1) else (z, len(str(z)))
z = int(z ** 2 %
(10 ** (counter + counter // 2)) / (10 ** (counter // 2)))
sequence.append(digits_down(z)[0])
return sequence
# z = int(z % (10 ** 6) / (10 ** 2)) # 4 counter = 7
# z = int(z % (10 ** 7) / (10 ** 2)) # 5 counter = 9
# z = int(z % (10 ** 9) / (10 ** 3)) # 6 counter = 11
# z = int(z % (10 ** 10) / (10 ** 3)) # 7 counter = 13
# z = int(z % (10 ** 12) / (10 ** 4)) # 8 counter = 15
def MSM(cli, name):
cli.out(f'Вы выбрали [magenta]{name}[/magenta].')
z0 = cli.float('Введите начальное значение (z0): ')
n = cli.int('Введите количество генерируемых чисел (n): ')
cli.table(['Число'], _MSM(z0, n),
autoheader='z(i)', autoformat='z({})')
if __name__ == "__main__":
z0 = 0.8933 # Начальное значение
n = 3 # Количество генерируемых чисел
print(_MSM(z0, n))
| 32.69697 | 79 | 0.493976 |
7955b2412241d7c1ebeb98e0dd424fb0693c2548 | 7,629 | py | Python | astroquery/vizier/tests/test_vizier.py | amsuhane/astroquery | 9c6ad92c9c52fc82816958e50354465c3ba73618 | [
"BSD-3-Clause"
] | null | null | null | astroquery/vizier/tests/test_vizier.py | amsuhane/astroquery | 9c6ad92c9c52fc82816958e50354465c3ba73618 | [
"BSD-3-Clause"
] | null | null | null | astroquery/vizier/tests/test_vizier.py | amsuhane/astroquery | 9c6ad92c9c52fc82816958e50354465c3ba73618 | [
"BSD-3-Clause"
] | null | null | null | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import requests
from numpy import testing as npt
import pytest
from astropy.table import Table
import astropy.units as u
import six
from six.moves import urllib_parse as urlparse
from ... import vizier
from ...utils import commons
from ...utils.testing_tools import MockResponse
if six.PY3:
str, = six.string_types
VO_DATA = {'HIP,NOMAD,UCAC': "viz.xml",
'NOMAD,UCAC': "viz.xml",
'B/iram/pdbi': "afgl2591_iram.xml",
'J/ApJ/706/83': "kang2010.xml"}
def data_path(filename):
data_dir = os.path.join(os.path.dirname(__file__), 'data')
return os.path.join(data_dir, filename)
@pytest.fixture
def patch_post(request):
try:
mp = request.getfixturevalue("monkeypatch")
except AttributeError: # pytest < 3
mp = request.getfuncargvalue("monkeypatch")
mp.setattr(requests.Session, 'request', post_mockreturn)
return mp
def post_mockreturn(self, method, url, data=None, timeout=10, files=None,
params=None, headers=None, **kwargs):
if method != 'POST':
raise ValueError("A 'post request' was made with method != POST")
datad = dict([urlparse.parse_qsl(d)[0] for d in data.split('\n')])
filename = data_path(VO_DATA[datad['-source']])
content = open(filename, "rb").read()
return MockResponse(content, **kwargs)
def parse_objname(obj):
d = {'AFGL 2591': commons.ICRSCoordGenerator(307.35388 * u.deg,
40.18858 * u.deg)}
return d[obj]
@pytest.fixture
def patch_coords(request):
try:
mp = request.getfixturevalue("monkeypatch")
except AttributeError: # pytest < 3
mp = request.getfuncargvalue("monkeypatch")
mp.setattr(commons, 'parse_coordinates', parse_objname)
return mp
@pytest.mark.parametrize(('dim', 'expected_out'),
[(5 * u.deg, ('d', 5)),
(5 * u.arcmin, ('m', 5)),
(5 * u.arcsec, ('s', 5)),
(0.314 * u.rad, ('d', 18)),
('5d5m5.5s', ('d', 5.0846))
])
def test_parse_angle(dim, expected_out):
actual_out = vizier.core._parse_angle(dim)
actual_unit, actual_value = actual_out
expected_unit, expected_value = expected_out
assert actual_unit == expected_unit
npt.assert_approx_equal(actual_value, expected_value, significant=2)
def test_parse_angle_err():
with pytest.raises(Exception):
vizier.core._parse_angle(5 * u.kg)
@pytest.mark.parametrize(('filepath'),
list(set(VO_DATA.values())))
def test_parse_result_verbose(filepath, capsys):
with open(data_path(filepath), 'rb') as f:
table_contents = f.read()
response = MockResponse(table_contents)
vizier.core.Vizier._parse_result(response)
out, err = capsys.readouterr()
assert out == ''
@pytest.mark.parametrize(('filepath', 'objlen'),
[('viz.xml', 231),
('afgl2591_iram.xml', 1),
('kang2010.xml', 1),
]
) # TODO: 1->50 because it is just 1 table
def test_parse_result(filepath, objlen):
table_contents = open(data_path(filepath), 'rb').read()
response = MockResponse(table_contents)
result = vizier.core.Vizier._parse_result(response)
assert isinstance(result, commons.TableList)
assert len(result) == objlen
assert isinstance(result[result.keys()[0]], Table)
def test_query_region_async(patch_post):
target = commons.ICRSCoordGenerator(ra=299.590, dec=35.201,
unit=(u.deg, u.deg))
response = vizier.core.Vizier.query_region_async(
target, radius=5 * u.deg, catalog=["HIP", "NOMAD", "UCAC"])
assert response is not None
def test_query_region(patch_post):
target = commons.ICRSCoordGenerator(ra=299.590, dec=35.201,
unit=(u.deg, u.deg))
result = vizier.core.Vizier.query_region(target,
radius=5 * u.deg,
catalog=["HIP", "NOMAD", "UCAC"])
assert isinstance(result, commons.TableList)
def test_query_regions(patch_post):
"""
This ONLY tests that calling the function works -
the data currently used for the test is *NOT* appropriate
for the multi-object query. There is no test for parsing
that return (yet - but see test_multicoord in remote_data)
"""
targets = commons.ICRSCoordGenerator(ra=[299.590, 299.90],
dec=[35.201, 35.201],
unit=(u.deg, u.deg))
vizier.core.Vizier.query_region(targets,
radius=5 * u.deg,
catalog=["HIP", "NOMAD", "UCAC"])
def test_query_object_async(patch_post):
response = vizier.core.Vizier.query_object_async(
"HD 226868", catalog=["NOMAD", "UCAC"])
assert response is not None
def test_query_object(patch_post):
result = vizier.core.Vizier.query_object(
"HD 226868", catalog=["NOMAD", "UCAC"])
assert isinstance(result, commons.TableList)
def test_query_another_object(patch_post, patch_coords):
result = vizier.core.Vizier.query_region("AFGL 2591", radius='0d5m',
catalog="B/iram/pdbi")
assert isinstance(result, commons.TableList)
def test_get_catalogs_async(patch_post):
response = vizier.core.Vizier.get_catalogs_async('J/ApJ/706/83')
assert response is not None
def test_get_catalogs(patch_post):
result = vizier.core.Vizier.get_catalogs('J/ApJ/706/83')
assert isinstance(result, commons.TableList)
class TestVizierKeywordClass:
def test_init(self):
v = vizier.core.VizierKeyword(keywords=['cobe', 'xmm'])
assert v.keyword_dict is not None
def test_keywords(self, recwarn):
vizier.core.VizierKeyword(keywords=['xxx', 'coBe'])
w = recwarn.pop(UserWarning)
# warning must be emitted
assert (str(w.message) == 'xxx : No such keyword')
class TestVizierClass:
def test_init(self):
v = vizier.core.Vizier()
assert v.keywords is None
assert v.columns == ["*"]
assert v.column_filters == {}
def test_keywords(self):
v = vizier.core.Vizier(keywords=['optical', 'chandra', 'ans'])
assert str(v.keywords) == ('-kw.Mission=ANS\n-kw.Mission='
'Chandra\n-kw.Wavelength=optical')
v = vizier.core.Vizier(keywords=['xy', 'optical'])
assert str(v.keywords) == '-kw.Wavelength=optical'
v.keywords = ['optical', 'cobe']
assert str(v.keywords) == '-kw.Mission=COBE\n-kw.Wavelength=optical'
del v.keywords
assert v.keywords is None
def test_columns(self):
v = vizier.core.Vizier(columns=['Vmag', 'B-V', '_RAJ2000', '_DEJ2000'])
assert len(v.columns) == 4
def test_columns_unicode(self):
v = vizier.core.Vizier(columns=[u'Vmag', u'B-V',
u'_RAJ2000', u'_DEJ2000'])
assert len(v.columns) == 4
def test_column_filters(self):
v = vizier.core.Vizier(column_filters={'Vmag': '>10'})
assert len(v.column_filters) == 1
def test_column_filters_unicode(self):
v = vizier.core.Vizier(column_filters={u'Vmag': u'>10'})
assert len(v.column_filters) == 1
| 34.835616 | 79 | 0.604011 |
7955b29c486276b284571d2cf5890037aea2429c | 9,825 | py | Python | src/oci/load_balancer/models/create_certificate_details.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 249 | 2017-09-11T22:06:05.000Z | 2022-03-04T17:09:29.000Z | src/oci/load_balancer/models/create_certificate_details.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 228 | 2017-09-11T23:07:26.000Z | 2022-03-23T10:58:50.000Z | src/oci/load_balancer/models/create_certificate_details.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 224 | 2017-09-27T07:32:43.000Z | 2022-03-25T16:55:42.000Z | # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class CreateCertificateDetails(object):
"""
The configuration details for adding a certificate bundle to a listener.
For more information on SSL certficate configuration, see
`Managing SSL Certificates`__.
**Warning:** Oracle recommends that you avoid using any confidential information when you supply string values using the API.
__ https://docs.cloud.oracle.com/Content/Balance/Tasks/managingcertificates.htm
"""
def __init__(self, **kwargs):
"""
Initializes a new CreateCertificateDetails object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param passphrase:
The value to assign to the passphrase property of this CreateCertificateDetails.
:type passphrase: str
:param private_key:
The value to assign to the private_key property of this CreateCertificateDetails.
:type private_key: str
:param public_certificate:
The value to assign to the public_certificate property of this CreateCertificateDetails.
:type public_certificate: str
:param ca_certificate:
The value to assign to the ca_certificate property of this CreateCertificateDetails.
:type ca_certificate: str
:param certificate_name:
The value to assign to the certificate_name property of this CreateCertificateDetails.
:type certificate_name: str
"""
self.swagger_types = {
'passphrase': 'str',
'private_key': 'str',
'public_certificate': 'str',
'ca_certificate': 'str',
'certificate_name': 'str'
}
self.attribute_map = {
'passphrase': 'passphrase',
'private_key': 'privateKey',
'public_certificate': 'publicCertificate',
'ca_certificate': 'caCertificate',
'certificate_name': 'certificateName'
}
self._passphrase = None
self._private_key = None
self._public_certificate = None
self._ca_certificate = None
self._certificate_name = None
@property
def passphrase(self):
"""
Gets the passphrase of this CreateCertificateDetails.
A passphrase for encrypted private keys. This is needed only if you created your certificate with a passphrase.
:return: The passphrase of this CreateCertificateDetails.
:rtype: str
"""
return self._passphrase
@passphrase.setter
def passphrase(self, passphrase):
"""
Sets the passphrase of this CreateCertificateDetails.
A passphrase for encrypted private keys. This is needed only if you created your certificate with a passphrase.
:param passphrase: The passphrase of this CreateCertificateDetails.
:type: str
"""
self._passphrase = passphrase
@property
def private_key(self):
"""
Gets the private_key of this CreateCertificateDetails.
The SSL private key for your certificate, in PEM format.
Example:
-----BEGIN RSA PRIVATE KEY-----
jO1O1v2ftXMsawM90tnXwc6xhOAT1gDBC9S8DKeca..JZNUgYYwNS0dP2UK
tmyN+XqVcAKw4HqVmChXy5b5msu8eIq3uc2NqNVtR..2ksSLukP8pxXcHyb
+sEwvM4uf8qbnHAqwnOnP9+KV9vds6BaH1eRA4CHz..n+NVZlzBsTxTlS16
/Umr7wJzVrMqK5sDiSu4WuaaBdqMGfL5hLsTjcBFD..Da2iyQmSKuVD4lIZ
...
-----END RSA PRIVATE KEY-----
:return: The private_key of this CreateCertificateDetails.
:rtype: str
"""
return self._private_key
@private_key.setter
def private_key(self, private_key):
"""
Sets the private_key of this CreateCertificateDetails.
The SSL private key for your certificate, in PEM format.
Example:
-----BEGIN RSA PRIVATE KEY-----
jO1O1v2ftXMsawM90tnXwc6xhOAT1gDBC9S8DKeca..JZNUgYYwNS0dP2UK
tmyN+XqVcAKw4HqVmChXy5b5msu8eIq3uc2NqNVtR..2ksSLukP8pxXcHyb
+sEwvM4uf8qbnHAqwnOnP9+KV9vds6BaH1eRA4CHz..n+NVZlzBsTxTlS16
/Umr7wJzVrMqK5sDiSu4WuaaBdqMGfL5hLsTjcBFD..Da2iyQmSKuVD4lIZ
...
-----END RSA PRIVATE KEY-----
:param private_key: The private_key of this CreateCertificateDetails.
:type: str
"""
self._private_key = private_key
@property
def public_certificate(self):
"""
Gets the public_certificate of this CreateCertificateDetails.
The public certificate, in PEM format, that you received from your SSL certificate provider.
Example:
-----BEGIN CERTIFICATE-----
MIIC2jCCAkMCAg38MA0GCSqGSIb3DQEBBQUAMIGbM..QswCQYDVQQGEwJKU
A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxE..TAPBgNVBAoTCEZyY
MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWB..gNVBAMTD0ZyYW5rN
YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmc..mFuazRkZC5jb20wH
...
-----END CERTIFICATE-----
:return: The public_certificate of this CreateCertificateDetails.
:rtype: str
"""
return self._public_certificate
@public_certificate.setter
def public_certificate(self, public_certificate):
"""
Sets the public_certificate of this CreateCertificateDetails.
The public certificate, in PEM format, that you received from your SSL certificate provider.
Example:
-----BEGIN CERTIFICATE-----
MIIC2jCCAkMCAg38MA0GCSqGSIb3DQEBBQUAMIGbM..QswCQYDVQQGEwJKU
A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxE..TAPBgNVBAoTCEZyY
MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWB..gNVBAMTD0ZyYW5rN
YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmc..mFuazRkZC5jb20wH
...
-----END CERTIFICATE-----
:param public_certificate: The public_certificate of this CreateCertificateDetails.
:type: str
"""
self._public_certificate = public_certificate
@property
def ca_certificate(self):
"""
Gets the ca_certificate of this CreateCertificateDetails.
The Certificate Authority certificate, or any interim certificate, that you received from your SSL certificate provider.
Example:
-----BEGIN CERTIFICATE-----
MIIEczCCA1ugAwIBAgIBADANBgkqhkiG9w0BAQQFAD..AkGA1UEBhMCR0Ix
EzARBgNVBAgTClNvbWUtU3RhdGUxFDASBgNVBAoTC0..0EgTHRkMTcwNQYD
VQQLEy5DbGFzcyAxIFB1YmxpYyBQcmltYXJ5IENlcn..XRpb24gQXV0aG9y
aXR5MRQwEgYDVQQDEwtCZXN0IENBIEx0ZDAeFw0wMD..TUwMTZaFw0wMTAy
...
-----END CERTIFICATE-----
:return: The ca_certificate of this CreateCertificateDetails.
:rtype: str
"""
return self._ca_certificate
@ca_certificate.setter
def ca_certificate(self, ca_certificate):
"""
Sets the ca_certificate of this CreateCertificateDetails.
The Certificate Authority certificate, or any interim certificate, that you received from your SSL certificate provider.
Example:
-----BEGIN CERTIFICATE-----
MIIEczCCA1ugAwIBAgIBADANBgkqhkiG9w0BAQQFAD..AkGA1UEBhMCR0Ix
EzARBgNVBAgTClNvbWUtU3RhdGUxFDASBgNVBAoTC0..0EgTHRkMTcwNQYD
VQQLEy5DbGFzcyAxIFB1YmxpYyBQcmltYXJ5IENlcn..XRpb24gQXV0aG9y
aXR5MRQwEgYDVQQDEwtCZXN0IENBIEx0ZDAeFw0wMD..TUwMTZaFw0wMTAy
...
-----END CERTIFICATE-----
:param ca_certificate: The ca_certificate of this CreateCertificateDetails.
:type: str
"""
self._ca_certificate = ca_certificate
@property
def certificate_name(self):
"""
**[Required]** Gets the certificate_name of this CreateCertificateDetails.
A friendly name for the certificate bundle. It must be unique and it cannot be changed.
Valid certificate bundle names include only alphanumeric characters, dashes, and underscores.
Certificate bundle names cannot contain spaces. Avoid entering confidential information.
Example: `example_certificate_bundle`
:return: The certificate_name of this CreateCertificateDetails.
:rtype: str
"""
return self._certificate_name
@certificate_name.setter
def certificate_name(self, certificate_name):
"""
Sets the certificate_name of this CreateCertificateDetails.
A friendly name for the certificate bundle. It must be unique and it cannot be changed.
Valid certificate bundle names include only alphanumeric characters, dashes, and underscores.
Certificate bundle names cannot contain spaces. Avoid entering confidential information.
Example: `example_certificate_bundle`
:param certificate_name: The certificate_name of this CreateCertificateDetails.
:type: str
"""
self._certificate_name = certificate_name
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 36.524164 | 245 | 0.677252 |
7955b3a849ccfe8aeb39a308f43430eb21202f00 | 6,434 | py | Python | mindspore/nn/probability/bijector/softplus.py | GuoSuiming/mindspore | 48afc4cfa53d970c0b20eedfb46e039db2a133d5 | [
"Apache-2.0"
] | 4 | 2021-01-26T09:14:01.000Z | 2021-01-26T09:17:24.000Z | mindspore/nn/probability/bijector/softplus.py | forwhat461/mindspore | 59a277756eb4faad9ac9afcc7fd526e8277d4994 | [
"Apache-2.0"
] | null | null | null | mindspore/nn/probability/bijector/softplus.py | forwhat461/mindspore | 59a277756eb4faad9ac9afcc7fd526e8277d4994 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Softplus Bijector"""
import numpy as np
from mindspore.ops import operations as P
from mindspore.nn.layer.activation import LogSigmoid
from ..distribution._utils.custom_ops import exp_generic, log_generic
from .bijector import Bijector
class Softplus(Bijector):
r"""
Softplus Bijector.
This Bijector performs the operation:
.. math::
Y = \frac{\log(1 + e ^ {kX})}{k}
where k is the sharpness factor.
Args:
sharpness (float, list, numpy.ndarray, Tensor): The scale factor. Default: 1.0.
name (str): The name of the Bijector. Default: 'Softplus'.
Supported Platforms:
``Ascend`` ``GPU``
Note:
The dtype of `sharpness` must be float.
Raises:
TypeError: When the dtype of the sharpness is not float.
Examples:
>>> import mindspore
>>> import mindspore.nn as nn
>>> import mindspore.nn.probability.bijector as msb
>>> from mindspore import Tensor
>>>
>>> # To initialize a Softplus bijector of sharpness 2.0.
>>> softplus = msb.Softplus(2.0)
>>> # To use a ScalarAffine bijector in a network.
>>> value = Tensor([1, 2, 3], dtype=mindspore.float32)
>>> ans1 = softplus.forward(value)
>>> print(ans1.shape)
(3,)
>>> ans2 = softplus.inverse(value)
>>> print(ans2.shape)
(3,)
>>> ans3 = softplus.forward_log_jacobian(value)
>>> print(ans3.shape)
(3,)
>>> ans4 = softplus.inverse_log_jacobian(value)
>>> print(ans4.shape)
(3,)
"""
def __init__(self,
sharpness=1.0,
name='Softplus'):
"""
Constructor of Softplus Bijector.
"""
param = dict(locals())
param['param_dict'] = {'sharpness': sharpness}
super(Softplus, self).__init__(name=name, dtype=None, param=param)
self._sharpness = self._add_parameter(sharpness, 'sharpness')
self.exp = exp_generic
self.log = log_generic
self.expm1 = P.Expm1()
self.abs = P.Abs()
self.dtypeop = P.DType()
self.cast = P.Cast()
self.fill = P.Fill()
self.greater = P.Greater()
self.less = P.Less()
self.log_sigmoid = LogSigmoid()
self.logicalor = P.LogicalOr()
self.select = P.Select()
self.shape = P.Shape()
self.sigmoid = P.Sigmoid()
self.softplus = self._softplus
self.inverse_softplus = self._inverse_softplus
self.threshold = np.log(np.finfo(np.float32).eps) + 1
self.tiny = np.exp(self.threshold)
def _softplus(self, x):
too_small = self.less(x, self.threshold)
too_large = self.greater(x, -self.threshold)
too_small_value = self.exp(x)
too_large_value = x
ones = self.fill(self.dtypeop(x), self.shape(x), 1.0)
too_small_or_too_large = self.logicalor(too_small, too_large)
x = self.select(too_small_or_too_large, ones, x)
y = self.log(self.exp(x) + 1.0)
return self.select(too_small, too_small_value, self.select(too_large, too_large_value, y))
def _inverse_softplus(self, x):
r"""
.. math::
f(x) = \frac{\log(1 + e^{x}))}
f^{-1}(y) = \frac{\log(e^{y} - 1)}
"""
too_small = self.less(x, self.tiny)
too_large = self.greater(x, -self.threshold)
too_small_value = self.log(x)
too_large_value = x
ones = self.fill(self.dtypeop(x), self.shape(x), 1.0)
too_small_or_too_large = self.logicalor(too_small, too_large)
x = self.select(too_small_or_too_large, ones, x)
y = x + self.log(self.abs(self.expm1(-x)))
return self.select(too_small, too_small_value, self.select(too_large, too_large_value, y))
@property
def sharpness(self):
return self._sharpness
def extend_repr(self):
if self.is_scalar_batch:
str_info = f'sharpness = {self.sharpness}'
else:
str_info = f'batch_shape = {self.batch_shape}'
return str_info
def _forward(self, x):
x = self._check_value_dtype(x)
sharpness_local = self.cast_param_by_value(x, self.sharpness)
scaled_value = sharpness_local * x
forward_v = self.softplus(scaled_value) / sharpness_local
return forward_v
def _inverse(self, y):
r"""
.. math::
f(x) = \frac{\log(1 + e^{kx}))}{k}
f^{-1}(y) = \frac{\log(e^{ky} - 1)}{k}
"""
y = self._check_value_dtype(y)
sharpness_local = self.cast_param_by_value(y, self.sharpness)
scaled_value = sharpness_local * y
inverse_v = self.inverse_softplus(scaled_value) / sharpness_local
return inverse_v
def _forward_log_jacobian(self, x):
r"""
.. math:
f(x) = \log(1 + e^{kx}) / k
f'(x) = \frac{e^{kx}}{ 1 + e^{kx}}
\log(f'(x)) = kx - \log(1 + e^{kx}) = kx - f(kx)
"""
x = self._check_value_dtype(x)
sharpness_local = self.cast_param_by_value(x, self.sharpness)
scaled_value = sharpness_local * x
forward_log_j = self.log_sigmoid(scaled_value)
return forward_log_j
def _inverse_log_jacobian(self, y):
r"""
.. math:
f(y) = \frac{\log(e^{ky} - 1)}{k}
f'(y) = \frac{e^{ky}}{e^{ky} - 1}
\log(f'(y)) = ky - \log(e^{ky} - 1) = ky - f(ky)
"""
y = self._check_value_dtype(y)
sharpness_local = self.cast_param_by_value(y, self.sharpness)
scaled_value = sharpness_local * y
inverse_log_j = scaled_value - self.inverse_softplus(scaled_value)
return inverse_log_j
| 35.15847 | 98 | 0.588436 |
7955b683c0db79910cf902d9d1494f9cc9da3a8b | 1,604 | py | Python | onnxruntime_extensions/cmake_helper.py | vvchernov/onnxruntime-extensions | cc858e831b719d31e4f34ee9adb391105b4ce26b | [
"MIT"
] | 59 | 2021-04-29T07:39:42.000Z | 2022-03-29T21:12:05.000Z | onnxruntime_extensions/cmake_helper.py | vvchernov/onnxruntime-extensions | cc858e831b719d31e4f34ee9adb391105b4ce26b | [
"MIT"
] | 45 | 2021-05-12T08:32:58.000Z | 2022-03-29T21:11:59.000Z | onnxruntime_extensions/cmake_helper.py | vvchernov/onnxruntime-extensions | cc858e831b719d31e4f34ee9adb391105b4ce26b | [
"MIT"
] | 18 | 2021-05-10T10:15:46.000Z | 2022-03-22T10:46:36.000Z | import inspect
from ._ocos import default_opset_domain
from . import _cuops
ALL_CUSTOM_OPS = {_name: _obj for _name, _obj in inspect.getmembers(_cuops)
if (inspect.isclass(_obj) and issubclass(_obj, _cuops.CustomOp))}
OPMAP_TO_CMAKE_FLAGS = {'GPT2Tokenizer': 'OCOS_ENABLE_GPT2_TOKENIZER',
'BlingFireSentenceBreaker': 'OCOS_ENABLE_BLINGFIRE'
}
def gen_cmake_oplist(opconfig_file, oplist_cmake_file = '_selectedoplist.cmake'):
ext_domain = default_opset_domain()
with open(oplist_cmake_file, 'w') as f:
print("# Auto-Generated File, not edited!!!", file=f)
with open(opconfig_file, 'r') as opfile:
for _ln in opfile:
if _ln.startswith(ext_domain):
items = _ln.strip().split(';')
if len(items) < 3:
raise RuntimeError("The malformated operator config file.")
for _op in items[2].split(','):
if not _op:
continue # is None or ""
if _op not in OPMAP_TO_CMAKE_FLAGS:
raise RuntimeError("Cannot find the custom operator({})\'s build flags, "
+ "Please update the OPMAP_TO_CMAKE_FLAGS dictionary.".format(_op))
print("set({} ON CACHE INTERNAL \"\")".format(OPMAP_TO_CMAKE_FLAGS[_op]), file=f)
print("# End of Building the Operator CMake variables", file=f)
print('The cmake tool file has been generated successfully.')
| 44.555556 | 111 | 0.577307 |
7955b6a97b04efe49bd554ebddb63f75f4295c2e | 9,620 | py | Python | lldb/test/API/tools/lldb-vscode/attach/TestVSCode_attach.py | elizabethandrews/llvm | 308498236c1c4778fdcba0bfbb556adf8aa333ea | [
"Apache-2.0"
] | 305 | 2019-09-14T17:16:05.000Z | 2022-03-31T15:05:20.000Z | lldb/test/API/tools/lldb-vscode/attach/TestVSCode_attach.py | elizabethandrews/llvm | 308498236c1c4778fdcba0bfbb556adf8aa333ea | [
"Apache-2.0"
] | 11 | 2019-10-17T21:11:52.000Z | 2022-02-17T20:10:00.000Z | lldb/test/API/tools/lldb-vscode/attach/TestVSCode_attach.py | elizabethandrews/llvm | 308498236c1c4778fdcba0bfbb556adf8aa333ea | [
"Apache-2.0"
] | 24 | 2019-10-03T11:22:11.000Z | 2022-01-25T09:59:30.000Z | """
Test lldb-vscode setBreakpoints request
"""
import unittest2
import vscode
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
import lldbvscode_testcase
import os
import shutil
import subprocess
import tempfile
import threading
import time
def spawn_and_wait(program, delay):
if delay:
time.sleep(delay)
process = subprocess.Popen([program],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.wait()
class TestVSCode_attach(lldbvscode_testcase.VSCodeTestCaseBase):
mydir = TestBase.compute_mydir(__file__)
def set_and_hit_breakpoint(self, continueToExit=True):
source = 'main.c'
breakpoint1_line = line_number(source, '// breakpoint 1')
lines = [breakpoint1_line]
# Set breakpoint in the thread function so we can step the threads
breakpoint_ids = self.set_source_breakpoints(source, lines)
self.assertEqual(len(breakpoint_ids), len(lines),
"expect correct number of breakpoints")
self.continue_to_breakpoints(breakpoint_ids)
if continueToExit:
self.continue_to_exit()
@skipIfWindows
@skipIfNetBSD # Hangs on NetBSD as well
@skipIfRemote
def test_by_pid(self):
'''
Tests attaching to a process by process ID.
'''
self.build_and_create_debug_adaptor()
program = self.getBuildArtifact("a.out")
self.process = subprocess.Popen([program],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.attach(pid=self.process.pid)
self.set_and_hit_breakpoint(continueToExit=True)
@skipIfWindows
@skipIfNetBSD # Hangs on NetBSD as well
@skipIfRemote
def test_by_name(self):
'''
Tests attaching to a process by process name.
'''
self.build_and_create_debug_adaptor()
orig_program = self.getBuildArtifact("a.out")
# Since we are going to attach by process name, we need a unique
# process name that has minimal chance to match a process that is
# already running. To do this we use tempfile.mktemp() to give us a
# full path to a location where we can copy our executable. We then
# run this copy to ensure we don't get the error "more that one
# process matches 'a.out'".
program = tempfile.mktemp()
shutil.copyfile(orig_program, program)
shutil.copymode(orig_program, program)
# Use a file as a synchronization point between test and inferior.
pid_file_path = lldbutil.append_to_process_working_directory(self,
"pid_file_%d" % (int(time.time())))
def cleanup():
if os.path.exists(program):
os.unlink(program)
self.run_platform_command("rm %s" % (pid_file_path))
# Execute the cleanup function during test case tear down.
self.addTearDownHook(cleanup)
popen = self.spawnSubprocess(program, [pid_file_path])
pid = lldbutil.wait_for_file_on_target(self, pid_file_path)
self.attach(program=program)
self.set_and_hit_breakpoint(continueToExit=True)
@skipUnlessDarwin
@skipIfDarwin
@skipIfNetBSD # Hangs on NetBSD as well
def test_by_name_waitFor(self):
'''
Tests attaching to a process by process name and waiting for the
next instance of a process to be launched, ingoring all current
ones.
'''
self.build_and_create_debug_adaptor()
program = self.getBuildArtifact("a.out")
self.spawn_thread = threading.Thread(target=spawn_and_wait,
args=(program, 1.0,))
self.spawn_thread.start()
self.attach(program=program, waitFor=True)
self.set_and_hit_breakpoint(continueToExit=True)
@skipIfWindows
@skipIfDarwin
@skipIfNetBSD # Hangs on NetBSD as well
@skipIf(archs=["arm", "aarch64"]) # Example of a flaky run http://lab.llvm.org:8011/builders/lldb-aarch64-ubuntu/builds/5527/steps/test/logs/stdio
def test_commands(self):
'''
Tests the "initCommands", "preRunCommands", "stopCommands",
"exitCommands", "terminateCommands" and "attachCommands"
that can be passed during attach.
"initCommands" are a list of LLDB commands that get executed
before the targt is created.
"preRunCommands" are a list of LLDB commands that get executed
after the target has been created and before the launch.
"stopCommands" are a list of LLDB commands that get executed each
time the program stops.
"exitCommands" are a list of LLDB commands that get executed when
the process exits
"attachCommands" are a list of LLDB commands that get executed and
must have a valid process in the selected target in LLDB after
they are done executing. This allows custom commands to create any
kind of debug session.
"terminateCommands" are a list of LLDB commands that get executed when
the debugger session terminates.
'''
self.build_and_create_debug_adaptor()
program = self.getBuildArtifact("a.out")
# Here we just create a target and launch the process as a way to test
# if we are able to use attach commands to create any kind of a target
# and use it for debugging
attachCommands = [
'target create -d "%s"' % (program),
'process launch'
]
initCommands = ['target list', 'platform list']
preRunCommands = ['image list a.out', 'image dump sections a.out']
stopCommands = ['frame variable', 'bt']
exitCommands = ['expr 2+3', 'expr 3+4']
terminateCommands = ['expr 4+2']
self.attach(program=program,
attachCommands=attachCommands,
initCommands=initCommands,
preRunCommands=preRunCommands,
stopCommands=stopCommands,
exitCommands=exitCommands,
terminateCommands=terminateCommands)
# Get output from the console. This should contain both the
# "initCommands" and the "preRunCommands".
output = self.get_console()
# Verify all "initCommands" were found in console output
self.verify_commands('initCommands', output, initCommands)
# Verify all "preRunCommands" were found in console output
self.verify_commands('preRunCommands', output, preRunCommands)
functions = ['main']
breakpoint_ids = self.set_function_breakpoints(functions)
self.assertEquals(len(breakpoint_ids), len(functions),
"expect one breakpoint")
self.continue_to_breakpoints(breakpoint_ids)
output = self.get_console(timeout=1.0)
self.verify_commands('stopCommands', output, stopCommands)
# Continue after launch and hit the "pause()" call and stop the target.
# Get output from the console. This should contain both the
# "stopCommands" that were run after we stop.
self.vscode.request_continue()
time.sleep(0.5)
self.vscode.request_pause()
self.vscode.wait_for_stopped()
output = self.get_console(timeout=1.0)
self.verify_commands('stopCommands', output, stopCommands)
# Continue until the program exits
self.continue_to_exit()
# Get output from the console. This should contain both the
# "exitCommands" that were run after the second breakpoint was hit
# and the "terminateCommands" due to the debugging session ending
output = self.collect_console(duration=1.0)
self.verify_commands('exitCommands', output, exitCommands)
self.verify_commands('terminateCommands', output, terminateCommands)
@skipIfWindows
@skipIfDarwin
@skipIfNetBSD # Hangs on NetBSD as well
@skipIf(archs=["arm", "aarch64"]) # Example of a flaky run http://lab.llvm.org:8011/builders/lldb-aarch64-ubuntu/builds/5517/steps/test/logs/stdio
def test_terminate_commands(self):
'''
Tests that the "terminateCommands", that can be passed during
attach, are run when the debugger is disconnected.
'''
self.build_and_create_debug_adaptor()
program = self.getBuildArtifact("a.out")
# Here we just create a target and launch the process as a way to test
# if we are able to use attach commands to create any kind of a target
# and use it for debugging
attachCommands = [
'target create -d "%s"' % (program),
'process launch'
]
terminateCommands = ['expr 4+2']
self.attach(program=program,
attachCommands=attachCommands,
terminateCommands=terminateCommands,
disconnectAutomatically=False)
self.get_console()
# Once it's disconnected the console should contain the
# "terminateCommands"
self.vscode.request_disconnect(terminateDebuggee=True)
output = self.collect_console(duration=1.0)
self.verify_commands('terminateCommands', output, terminateCommands)
| 42.566372 | 150 | 0.640333 |
7955b6e74e856f70748777a8430668115c1cd2a5 | 359 | py | Python | msdm/core/distributions/__init__.py | markkho/msdm | f2e07cdf1a16f7a0564a4822caed89a758e14bf1 | [
"MIT"
] | 15 | 2020-09-09T14:08:10.000Z | 2022-02-24T14:19:39.000Z | msdm/core/distributions/__init__.py | markkho/msdm | f2e07cdf1a16f7a0564a4822caed89a758e14bf1 | [
"MIT"
] | 28 | 2020-09-13T22:12:03.000Z | 2022-02-20T18:42:56.000Z | msdm/core/distributions/__init__.py | markkho/msdm | f2e07cdf1a16f7a0564a4822caed89a758e14bf1 | [
"MIT"
] | 3 | 2021-07-21T15:05:01.000Z | 2022-02-07T04:01:55.000Z | from msdm.core.distributions.distributions import Distribution, FiniteDistribution
from msdm.core.distributions.discretefactortable import \
DiscreteFactorTable
from msdm.core.distributions.dictdistribution import DictDistribution, DeterministicDistribution, UniformDistribution
from msdm.core.distributions.softmaxdistribution import SoftmaxDistribution
| 59.833333 | 117 | 0.888579 |
7955b7534b77ed24ea11ece5e8f83b1d9a02c41e | 136 | py | Python | 2375.py | heltonricardo/URI | 160cca22d94aa667177c9ebf2a1c9864c5e55b41 | [
"MIT"
] | 6 | 2021-04-13T00:33:43.000Z | 2022-02-10T10:23:59.000Z | 2375.py | heltonricardo/URI | 160cca22d94aa667177c9ebf2a1c9864c5e55b41 | [
"MIT"
] | null | null | null | 2375.py | heltonricardo/URI | 160cca22d94aa667177c9ebf2a1c9864c5e55b41 | [
"MIT"
] | 3 | 2021-03-23T18:42:24.000Z | 2022-02-10T10:24:07.000Z | b = int(input())
e = str(input()).split()
i = int(e[0])
j = int(e[1])
k = int(e[2])
print('S' if b <= i and b <= j and b <= k else 'N')
| 19.428571 | 51 | 0.485294 |
7955b766dc5c7df41c6e5b0f5f1789466d719190 | 1,585 | py | Python | awwwards/migrations/0004_auto_20210529_1156.py | Derrick-Nyongesa/Awwwards | 53c3e395490d31e7ac67079a838ff43561819bc9 | [
"MIT"
] | null | null | null | awwwards/migrations/0004_auto_20210529_1156.py | Derrick-Nyongesa/Awwwards | 53c3e395490d31e7ac67079a838ff43561819bc9 | [
"MIT"
] | null | null | null | awwwards/migrations/0004_auto_20210529_1156.py | Derrick-Nyongesa/Awwwards | 53c3e395490d31e7ac67079a838ff43561819bc9 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.2 on 2021-05-29 08:56
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('awwwards', '0003_profile_name'),
]
operations = [
migrations.RemoveField(
model_name='rating',
name='content',
),
migrations.RemoveField(
model_name='rating',
name='content_average',
),
migrations.RemoveField(
model_name='rating',
name='design_average',
),
migrations.RemoveField(
model_name='rating',
name='score',
),
migrations.RemoveField(
model_name='rating',
name='usability_average',
),
migrations.AddField(
model_name='rating',
name='creativity',
field=models.IntegerField(default=1, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(10)]),
),
migrations.AlterField(
model_name='rating',
name='design',
field=models.IntegerField(default=1, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(10)]),
),
migrations.AlterField(
model_name='rating',
name='usability',
field=models.IntegerField(default=1, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(10)]),
),
]
| 31.7 | 164 | 0.594322 |
7955b77d42246ac2e77e1341646d515cd6cee4c0 | 4,130 | py | Python | attpattern/save_att.py | jingtaozhan/bert-ranking-analysis | c17000abf4fb27ce631d403407522ff49586187e | [
"BSD-3-Clause"
] | 10 | 2020-07-19T10:25:16.000Z | 2022-03-07T06:40:22.000Z | attpattern/save_att.py | jingtaozhan/bert-ranking-analysis | c17000abf4fb27ce631d403407522ff49586187e | [
"BSD-3-Clause"
] | null | null | null | attpattern/save_att.py | jingtaozhan/bert-ranking-analysis | c17000abf4fb27ce631d403407522ff49586187e | [
"BSD-3-Clause"
] | 2 | 2021-01-22T06:06:18.000Z | 2022-03-07T06:40:21.000Z | import os
import glob
import torch
import random
import logging
import argparse
import zipfile
import numpy as np
from tqdm import tqdm, trange
from torch.utils.data import DataLoader
from transformers import (BertConfig, BertTokenizer)
from modeling import MonoBERT
from dataset import RelevantDataset, get_collate_function
logger = logging.getLogger(__name__)
logging.basicConfig(format = '%(asctime)s-%(levelname)s-%(name)s- %(message)s',
datefmt = '%d %H:%M:%S',
level = logging.INFO)
def evaluate(args, model, tokenizer):
eval_output_dir = args.output_dir
if not os.path.exists(eval_output_dir):
os.makedirs(eval_output_dir)
eval_dataset = RelevantDataset(tokenizer, "dev.small", args.msmarco_dir,
args.collection_memmap_dir, args.tokenize_dir,
args.max_query_length, args.max_seq_length)
args.eval_batch_size = args.per_gpu_eval_batch_size * max(1, args.n_gpu)
# Note that DistributedSampler samples randomly
eval_dataloader = DataLoader(eval_dataset, batch_size=args.eval_batch_size,
collate_fn=get_collate_function())
# multi-gpu eval
if args.n_gpu > 1:
model = torch.nn.DataParallel(model)
# Eval!
logger.info("***** Running evaluation *****")
logger.info(" Num examples = %d", len(eval_dataset))
logger.info(" Batch size = %d", args.eval_batch_size)
all_scores, all_ids = [], []
for batch, qids, pids in tqdm(eval_dataloader, desc="Evaluating"):
model.eval()
batch = {k:v.to(args.device) for k, v in batch.items()}
with torch.no_grad():
attentions = model(**batch)[1]
for layer_id, layer_attentions in enumerate(attentions):
attention_dir = os.path.join(eval_output_dir, "layer_{}".format(layer_id+1))
if not os.path.exists(attention_dir):
os.makedirs(attention_dir)
for idx, attention in enumerate(layer_attentions):
length = torch.sum(batch['attention_mask'][idx]).detach().cpu().item()
query_id, para_id = qids[idx], pids[idx]
attention = attention[:, :length, :length].detach().cpu().numpy()
file_path = os.path.join(attention_dir, "{}-{}.npy".format(query_id, para_id))
np.save(file_path, np.array(attention, dtype=np.float16))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
## Required parameters
parser.add_argument("--msmarco_dir", type=str, default="./data/msmarco-passage")
parser.add_argument("--collection_memmap_dir", type=str, default="./data/collection_memmap")
parser.add_argument("--tokenize_dir", type=str, default="./data/tokenize")
parser.add_argument("--output_dir", type=str, default="./data/attention")
parser.add_argument("--max_query_length", type=int, default=64)
parser.add_argument("--max_seq_length", type=int, default=256)
parser.add_argument("--model_path", type=str, default="./data/BERT_Base_trained_on_MSMARCO")
## Other parameters
parser.add_argument("--per_gpu_eval_batch_size", default=8, type=int,
help="Batch size per GPU/CPU for evaluation.")
parser.add_argument("--no_cuda", action='store_true',
help="Avoid using CUDA when available")
args = parser.parse_args()
# Setup CUDA, GPU
device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu")
args.n_gpu = torch.cuda.device_count()
args.device = device
# Setup logging
logger.warning("Device: %s, n_gpu: %s", device, args.n_gpu)
config = BertConfig.from_pretrained(f"{args.model_path}/bert_config.json")
config.output_attentions = True
model = MonoBERT.from_pretrained(f"{args.model_path}/model.ckpt-100000",
from_tf=True, config=config)
tokenizer = BertTokenizer.from_pretrained(args.model_path)
model.to(args.device)
logger.info("Training/evaluation parameters %s", args)
# Evaluation
evaluate(args, model, tokenizer)
| 38.962264 | 98 | 0.665617 |
7955b8fd605952b0b2151d15a8c8a5e435529229 | 187 | py | Python | base/backend/help/models.py | PatrickKoss/lecture_ws_2021 | 9de2f9b9baa2762fef24ea5597b9082b6b740367 | [
"MIT"
] | null | null | null | base/backend/help/models.py | PatrickKoss/lecture_ws_2021 | 9de2f9b9baa2762fef24ea5597b9082b6b740367 | [
"MIT"
] | null | null | null | base/backend/help/models.py | PatrickKoss/lecture_ws_2021 | 9de2f9b9baa2762fef24ea5597b9082b6b740367 | [
"MIT"
] | null | null | null | """Models."""
from django.db import models
class Help(models.Model):
"""Help model."""
help_id = models.UUIDField(primary_key=True)
tip = models.CharField(max_length=300)
| 17 | 48 | 0.679144 |
7955b927e99019ce2a36adcc0774cd310897c037 | 69,043 | py | Python | pysph/sph/swe/basic.py | nauaneed/pysph | 9cb9a859934939307c65a25cbf73e4ecc83fea4a | [
"BSD-3-Clause"
] | 293 | 2017-05-26T14:41:15.000Z | 2022-03-28T09:56:16.000Z | pysph/sph/swe/basic.py | nauaneed/pysph | 9cb9a859934939307c65a25cbf73e4ecc83fea4a | [
"BSD-3-Clause"
] | 217 | 2017-05-29T15:48:14.000Z | 2022-03-24T16:16:55.000Z | pysph/sph/swe/basic.py | nauaneed/pysph | 9cb9a859934939307c65a25cbf73e4ecc83fea4a | [
"BSD-3-Clause"
] | 126 | 2017-05-25T19:17:32.000Z | 2022-03-25T11:23:24.000Z | """
Basic Equations for solving shallow water problems
#####################
"""
from pysph.sph.equation import Equation
from pysph.sph.integrator_step import IntegratorStep
from pysph.sph.integrator import Integrator
from compyle.api import declare
from pysph.sph.wc.linalg import gj_solve, augmented_matrix
from numpy import sqrt, cos, sin, zeros, pi, exp
import numpy as np
import numpy
M_PI = pi
class CheckForParticlesToSplit(Equation):
r"""Particles are tagged for splitting if the following condition is
satisfied:
.. math::
(A_i > A_max) and (h_i < h_max) and (x_min < x_i < x_max) and (y_min <
y_i < y_max)
References
----------
.. [Vacondio2010] R. Vacondio, B.D. Rodgers and P.K. Stansby, "Accurate
particle splitting for smoothed particle hydrodynamics in shallow water
with shock capturing", Int. J. Numer. Meth. Fluids, 69 (2012), pp.
1377-1410
"""
def __init__(self, dest, h_max=1e9, A_max=1e9, x_min=-1e9, x_max=1e9,
y_min=-1e9, y_max=1e9):
r"""
Parameters
----------
h_max : float
maximum smoothing length beyond which splitting is deactivated
A_max : float
maximum area beyond which splitting is activated
x_min : float
minimum distance along x-direction beyond which splitting is
activated
x_max : float
maximum distance along x-direction beyond which splitting is
deactivated
y_min : float
minimum distance along y-direction beyond which splitting is
activated
y_max : float
maximum distance along y-direction beyond which splitting is
deactivated
"""
self.A_max = A_max
self.h_max = h_max
self.x_min = x_min
self.x_max = x_max
self.y_min = y_min
self.y_max = y_max
super(CheckForParticlesToSplit, self).__init__(dest, None)
def initialize(self, d_idx, d_A, d_h, d_x, d_y, d_pa_to_split):
if (d_A[d_idx] > self.A_max and d_h[d_idx] < self.h_max
and (self.x_min < d_x[d_idx] < self.x_max)
and (self.y_min < d_y[d_idx] < self.y_max)):
d_pa_to_split[d_idx] = 1
else:
d_pa_to_split[d_idx] = 0
class ParticleSplit(object):
r"""**Hexagonal particle splitting algorithm**
References
----------
.. [Vacondio2010] R. Vacondio, B.D. Rodgers and P.K. Stansby, "Accurate
particle splitting for smoothed particle hydrodynamics in shallow water
with shock capturing", Int. J. Numer. Meth. Fluids, 69 (2012), pp.
1377-1410
"""
def __init__(self, pa_arr):
r"""
Parameters
----------
pa_arr : pysph.base.particle_array.ParticleArray
particle array of fluid
"""
self.pa_arr = pa_arr
# Ratio of mass of daughter particle located at center of hexagon to
# that of its parents mass
self.center_pa_mass_frac = 0.178705766141917
# Ratio of mass of daughter particle located at vertex of hexagon to
# that of its parents mass
self.vertex_pa_mass_frac = 0.136882287617319
# Ratio of smoothing length of daughter particle to that of its parents
# smoothing length
self.pa_h_ratio = 0.9
# Ratio of distance between center daughter particle and vertex
# daughter particle to that of its parents smoothing length
self.center_and_vertex_pa_separation_frac = 0.4
# Get index of the parent particles to split
self.idx_pa_to_split = self._get_idx_of_particles_to_split()
# Number of daughter particles located at the vertices of hexagon after
# splitting
self.num_vertex_pa_after_single_split = 6
def do_particle_split(self, solver=None):
if not self.idx_pa_to_split.size:
# If no particles to split then return
return
else:
# Properties of parent particles to split
h_parent = self.pa_arr.h[self.idx_pa_to_split]
h0_parent = self.pa_arr.h0[self.idx_pa_to_split]
m_parent = self.pa_arr.m[self.idx_pa_to_split]
x_parent = self.pa_arr.x[self.idx_pa_to_split]
y_parent = self.pa_arr.y[self.idx_pa_to_split]
u_parent = self.pa_arr.u[self.idx_pa_to_split]
v_parent = self.pa_arr.v[self.idx_pa_to_split]
u_prev_step_parent = self.pa_arr.u_prev_step[self.idx_pa_to_split]
v_prev_step_parent = self.pa_arr.v_prev_step[self.idx_pa_to_split]
rho_parent = self.pa_arr.rho[self.idx_pa_to_split]
rho0_parent = self.pa_arr.rho0[self.idx_pa_to_split]
alpha_parent = self.pa_arr.alpha[self.idx_pa_to_split]
# Vertex daughter particle properties update
n = self.num_vertex_pa_after_single_split
h_vertex_pa = self.pa_h_ratio * np.repeat(h_parent, n)
h0_vertex_pa = self.pa_h_ratio * np.repeat(h0_parent, n)
u_prev_step_vertex_pa = np.repeat(u_prev_step_parent, n)
v_prev_step_vertex_pa = np.repeat(v_prev_step_parent, n)
m_vertex_pa = self.vertex_pa_mass_frac * np.repeat(m_parent, n)
vertex_pa_pos = self._get_vertex_pa_positions(h_parent, u_parent,
v_parent)
x_vertex_pa = vertex_pa_pos[0] + np.repeat(x_parent, n)
y_vertex_pa = vertex_pa_pos[1] + np.repeat(y_parent, n)
rho0_vertex_pa = np.repeat(rho0_parent, n)
rho_vertex_pa = np.repeat(rho_parent, n)
alpha_vertex_pa = np.repeat(alpha_parent, n)
parent_idx_vertex_pa = np.repeat(self.idx_pa_to_split, n)
# Note:
# The center daughter particle properties are set at index of
# parent. The properties of parent needed for further calculations
# are not changed for now
# Center daughter particle properties update
for idx in self.idx_pa_to_split:
self.pa_arr.m[idx] *= self.center_pa_mass_frac
self.pa_arr.h[idx] *= self.pa_h_ratio
self.pa_arr.h0[idx] *= self.pa_h_ratio
self.pa_arr.parent_idx[idx] = int(idx)
# Update particle array to include vertex daughter particles
self._add_vertex_pa_prop(
h0_vertex_pa, h_vertex_pa, m_vertex_pa, x_vertex_pa,
y_vertex_pa, rho0_vertex_pa, rho_vertex_pa,
u_prev_step_vertex_pa, v_prev_step_vertex_pa, alpha_vertex_pa,
parent_idx_vertex_pa)
def _get_idx_of_particles_to_split(self):
idx_pa_to_split = []
for idx, val in enumerate(self.pa_arr.pa_to_split):
if val:
idx_pa_to_split.append(idx)
return np.array(idx_pa_to_split)
def _get_vertex_pa_positions(self, h_parent, u_parent, v_parent):
# Number of particles to split
num_of_pa_to_split = len(self.idx_pa_to_split)
n = self.num_vertex_pa_after_single_split
theta_vertex_pa = zeros(n)
r = self.center_and_vertex_pa_separation_frac
for i, theta in enumerate(range(0, 360, 60)):
theta_vertex_pa[i] = (pi/180)*theta
# Angle of velocity vector with horizontal
angle_vel = np.where(
(np.abs(u_parent) > 1e-3) | (np.abs(v_parent) > 1e-3),
np.arctan2(v_parent, u_parent), 0
)
# Rotates the hexagon such that its horizontal axis aligns with the
# direction of velocity vector
angle_actual = (np.tile(theta_vertex_pa, num_of_pa_to_split)
+ np.repeat(angle_vel, n))
x = r * cos(angle_actual) * np.repeat(h_parent, n)
y = r * sin(angle_actual) * np.repeat(h_parent, n)
return x.copy(), y.copy()
def _add_vertex_pa_prop(self, h0_vertex_pa, h_vertex_pa, m_vertex_pa,
x_vertex_pa, y_vertex_pa, rho0_vertex_pa,
rho_vertex_pa, u_prev_step_vertex_pa,
v_prev_step_vertex_pa, alpha_vertex_pa,
parent_idx_vertex_pa):
vertex_pa_props = {
'm': m_vertex_pa,
'h': h_vertex_pa,
'h0': h0_vertex_pa,
'x': x_vertex_pa,
'y': y_vertex_pa,
'u_prev_step': u_prev_step_vertex_pa,
'v_prev_step': v_prev_step_vertex_pa,
'rho0': rho0_vertex_pa,
'rho': rho_vertex_pa,
'alpha': alpha_vertex_pa,
'parent_idx': parent_idx_vertex_pa.astype(int)
}
# Add vertex daughter particles to particle array
self.pa_arr.add_particles(**vertex_pa_props)
class DaughterVelocityEval(Equation):
r"""**Evaluation of the daughter particle velocity after splitting
procedure**
.. math::
\boldsymbol{v_k} = c_v\frac{d_N}{d_k}\boldsymbol{v_N}
where,
.. math::
c_v = \dfrac{A_N}{\sum_{k=1}^{M}A_k}
References
----------
.. [Vacondio2010] R. Vacondio, B.D. Rodgers and P.K. Stansby, "Accurate
particle splitting for smoothed particle hydrodynamics in shallow water
with shock capturing", Int. J. Numer. Meth. Fluids, 69 (2012), pp.
1377-1410
"""
def __init__(self, dest, sources, rhow=1000.0):
r""""
Parameters
----------
rhow : float
constant 3-D density of water (kg/m3)
Notes
-----
This equation should be called before the equation SWEOS, as the parent
particle area is required for calculating velocities. On calling the
SWEOS equation, the parent properties are changed to the center
daughter particle properties.
"""
self.rhow = rhow
super(DaughterVelocityEval, self).__init__(dest, sources)
def initialize(self, d_sum_Ak, d_idx, d_m, d_rho, d_u, d_v, d_uh,
d_vh, d_u_parent, d_v_parent, d_uh_parent, d_vh_parent,
d_parent_idx):
# Stores sum of areas of daughter particles
d_sum_Ak[d_idx] = 0.0
d_u_parent[d_idx] = d_u[d_parent_idx[d_idx]]
d_uh_parent[d_idx] = d_uh[d_parent_idx[d_idx]]
d_v_parent[d_idx] = d_v[d_parent_idx[d_idx]]
d_vh_parent[d_idx] = d_vh[d_parent_idx[d_idx]]
def loop_all(self, d_sum_Ak, d_pa_to_split, d_parent_idx, d_idx, s_m,
s_rho, s_parent_idx, NBRS, N_NBRS):
i = declare('int')
s_idx = declare('long')
if d_pa_to_split[d_idx]:
for i in range(N_NBRS):
s_idx = NBRS[i]
if s_parent_idx[s_idx] == d_parent_idx[d_idx]:
# Sums the area of daughter particles who have same parent
# idx
d_sum_Ak[d_idx] += s_m[s_idx] / s_rho[s_idx]
def post_loop(self, d_idx, d_parent_idx, d_A, d_sum_Ak, d_dw, d_rho, d_u,
d_uh, d_vh, d_v, d_u_parent, d_v_parent, d_uh_parent,
d_vh_parent, t):
# True only for daughter particles
if d_parent_idx[d_idx]:
# Ratio of parent area (before split) to sum of areas of its
# daughters (after split)
cv = d_A[d_parent_idx[d_idx]] / d_sum_Ak[d_parent_idx[d_idx]]
# The denominator (d_rho[d_idx]/self.rhow) represents the water
# depth of daughter particle. d_dw[d_idx] cannot be used as
# equation of state is called after this equation (Refer Notes in
# the constructor)
dw_ratio = d_dw[d_parent_idx[d_idx]] / (d_rho[d_idx]/self.rhow)
d_u[d_idx] = cv * dw_ratio * d_u_parent[d_idx]
d_uh[d_idx] = cv * dw_ratio * d_uh_parent[d_idx]
d_v[d_idx] = cv * dw_ratio * d_v_parent[d_idx]
d_vh[d_idx] = cv * dw_ratio * d_vh_parent[d_idx]
d_parent_idx[d_idx] = 0
class FindMergeable(Equation):
r"""**Particle merging algorithm**
Particles are tagged for merging if the following condition is
satisfied:
.. math::
(A_i < A_min) and (x_min < x_i < x_max) and (y_min < y_i < y_max)
References
----------
.. [Vacondio2013] R. Vacondio et al., "Shallow water SPH for flooding with
dynamic particle coalescing and splitting", Advances in Water Resources,
58 (2013), pp. 10-23
"""
def __init__(self, dest, sources, A_min, x_min=-1e9, x_max=1e9, y_min=-1e9,
y_max=1e9):
r"""
Parameters
----------
A_min : float
minimum area below which merging is activated
x_min : float
minimum distance along x-direction beyond which merging is
activated
x_max : float
maximum distance along x-direction beyond which merging is
deactivated
y_min : float
minimum distance along y-direction beyond which merging is
activated
y_max : float
maximum distance along y-direction beyond which merging is
deactivated
Notes
-----
The merging algorithm merges two particles 'a' and 'b' if the following
conditions are satisfied:
#. Both particles have area less than A_min
#. Both particles lies within :math:`x_min < x_i < x_max` and
:math:`y_min < y_i < y_max`
#. if 'a' is the closest neighbor of 'b' and vice versa
The merging algorithm is run every timestep
"""
self.A_min = A_min
self.x_min = x_min
self.x_max = x_max
self.y_min = y_min
self.y_max = y_max
super(FindMergeable, self).__init__(dest, sources)
def loop_all(self, d_idx, d_merge, d_closest_idx, d_x, d_y, d_h, d_A,
d_is_merged_pa, s_x, s_y, s_A, NBRS, N_NBRS):
# Finds the closest neighbor of a particle and stores the index of that
# neighbor in d_closest_idx[d_idx]
i, closest = declare('int', 2)
s_idx = declare('unsigned int')
d_merge[d_idx] = 0
d_is_merged_pa[d_idx] = 0
xi = d_x[d_idx]
yi = d_y[d_idx]
rmin = d_h[d_idx] * 10.0
closest = -1
if (d_A[d_idx] < self.A_min and ((self.x_min < d_x[d_idx] < self.x_max)
and (self.y_min < d_y[d_idx] < self.y_max))):
for i in range(N_NBRS):
s_idx = NBRS[i]
if s_idx == d_idx:
continue
xij = xi - s_x[s_idx]
yij = yi - s_y[s_idx]
rij = sqrt(xij*xij + yij*yij)
if rij < rmin:
closest = s_idx
rmin = rij
d_closest_idx[d_idx] = closest
def post_loop(self, d_idx, d_m, d_u, d_v, d_h, d_uh, d_vh, d_closest_idx,
d_is_merged_pa, d_merge, d_x, d_y, SPH_KERNEL):
idx = declare('int')
xma = declare('matrix(3)')
xmb = declare('matrix(3)')
idx = d_closest_idx[d_idx]
if idx > -1:
# If particle 'a' is closest neighbor of 'b' and vice versa
if d_idx == d_closest_idx[idx]:
if d_idx < idx:
# The newly merged particle properties are set at index of
# particle 'a'
m_merged = d_m[d_idx] + d_m[idx]
x_merged = ((d_m[d_idx]*d_x[d_idx] + d_m[idx]*d_x[idx])
/ m_merged)
y_merged = ((d_m[d_idx]*d_y[d_idx] + d_m[idx]*d_y[idx])
/ m_merged)
xma[0] = x_merged - d_x[d_idx]
xma[1] = y_merged - d_y[d_idx]
xmb[0] = x_merged - d_x[idx]
xmb[1] = y_merged - d_y[idx]
rma = sqrt(xma[0]*xma[0] + xma[1]*xma[1])
rmb = sqrt(xmb[0]*xmb[0] + xmb[1]*xmb[1])
d_u[d_idx] = ((d_m[d_idx]*d_u[d_idx] + d_m[idx]*d_u[idx])
/ m_merged)
d_uh[d_idx] = (d_m[d_idx]*d_uh[d_idx]
+ d_m[idx]*d_uh[idx]) / m_merged
d_v[d_idx] = ((d_m[d_idx]*d_v[d_idx] + d_m[idx]*d_v[idx])
/ m_merged)
d_vh[d_idx] = (d_m[d_idx]*d_vh[d_idx]
+ d_m[idx]*d_vh[idx]) / m_merged
const1 = d_m[d_idx] * SPH_KERNEL.kernel(xma, rma,
d_h[d_idx])
const2 = d_m[idx] * SPH_KERNEL.kernel(xmb, rmb, d_h[idx])
d_h[d_idx] = sqrt((7*M_PI/10.) * (m_merged/(const1+const2)))
d_m[d_idx] = m_merged
# Tags the newly formed particle after merging
d_is_merged_pa[d_idx] = 1
else:
# Tags particle 'b' for removal after merging
d_merge[d_idx] = 1
def reduce(self, dst, t, dt):
# The indices of particle 'b' are removed from particle array after
# merging is done
indices = declare('object')
indices = numpy.where(dst.merge > 0)[0]
if len(indices) > 0:
dst.remove_particles(indices)
class InitialDensityEvalAfterMerge(Equation):
r"""**Initial density of the newly formed particle after merging**
.. math ::
\rho_M = \sum_{j}^{}m_jW_{M,j}
References
----------
.. [Vacondio2013] R. Vacondio et al., "Shallow water SPH for flooding with
dynamic particle coalescing and splitting", Advances in Water Resources,
58 (2013), pp. 10-23
"""
def loop_all(self, d_rho, d_idx, d_is_merged_pa, d_x, d_y, s_h, s_m, s_x,
d_merge, d_closest_idx, s_y, SPH_KERNEL, NBRS, N_NBRS):
i = declare('int')
s_idx = declare('long')
xij = declare('matrix(3)')
# Evaluates the initial density of the newly formed particle after
# merging
if d_is_merged_pa[d_idx] == 1:
d_rho[d_idx] = 0.0
rij = 0.0
rho_sum = 0.0
for i in range(N_NBRS):
s_idx = NBRS[i]
xij[0] = d_x[d_idx] - s_x[s_idx]
xij[1] = d_y[d_idx] - s_y[s_idx]
rij = sqrt(xij[0]*xij[0] + xij[1]*xij[1])
rho_sum += s_m[s_idx] * SPH_KERNEL.kernel(xij, rij, s_h[s_idx])
d_rho[d_idx] += rho_sum
class EulerStep(IntegratorStep):
"""Fast but inaccurate integrator. Use this for testing"""
def initialize(self, d_u, d_v, d_u_prev_step, d_v_prev_step, d_idx):
d_u_prev_step[d_idx] = d_u[d_idx]
d_v_prev_step[d_idx] = d_v[d_idx]
def stage1(self, d_idx, d_u, d_v, d_au, d_av, d_x, d_y, dt):
d_u[d_idx] += dt * d_au[d_idx]
d_v[d_idx] += dt * d_av[d_idx]
d_x[d_idx] += dt * d_u[d_idx]
d_y[d_idx] += dt * d_v[d_idx]
class SWEStep(IntegratorStep):
"""Leap frog time integration scheme"""
def initialize(self, t, d_u, d_v, d_uh, d_vh, d_u_prev_step, d_v_prev_step,
d_idx):
# Stores the velocities at previous time step
d_u_prev_step[d_idx] = d_u[d_idx]
d_v_prev_step[d_idx] = d_v[d_idx]
def stage1(self, d_uh, d_vh, d_idx, d_au, d_av, dt):
# Velocities at half time step
d_uh[d_idx] += dt * d_au[d_idx]
d_vh[d_idx] += dt * d_av[d_idx]
def stage2(self, d_u, d_v, d_uh, d_vh, d_idx, d_au, d_av, d_x, d_y, dt):
d_x[d_idx] += dt * d_uh[d_idx]
d_y[d_idx] += dt * d_vh[d_idx]
d_u[d_idx] = d_uh[d_idx] + dt/2.*d_au[d_idx]
d_v[d_idx] = d_vh[d_idx] + dt/2.*d_av[d_idx]
class SWEIntegrator(Integrator):
"""Integrator for shallow water problems"""
def one_timestep(self, t, dt):
self.compute_accelerations()
self.initialize()
# Predict
self.stage1()
# Call any post-stage functions.
self.do_post_stage(0.5*dt, 1)
# Correct
self.stage2()
# Call any post-stage functions.
self.do_post_stage(dt, 2)
class GatherDensityEvalNextIteration(Equation):
r"""**Gather formulation for evaluating the density of a particle**
.. math::
\rho_i = \sum_{j}{m_jW(\textbf{x}_i - \textbf{x}_j, h_i)}
References
----------
.. [Hernquist and Katz, 1988] L. Hernquist and N. Katz, "TREESPH: A
unification of SPH with the hierarcgical tree method", The Astrophysical
Journal Supplement Series, 70 (1989), pp 419-446.
"""
def initialize(self, d_rho, d_idx, d_rho_prev_iter):
# Stores density of particle i of the previous iteration
d_rho_prev_iter[d_idx] = d_rho[d_idx]
d_rho[d_idx] = 0.0
def loop(self, d_rho, d_idx, s_m, s_idx, WI):
d_rho[d_idx] += s_m[s_idx] * WI
class ScatterDensityEvalNextIteration(Equation):
r"""**Scatter formulation for evaluating the density of a particle**
.. math::
\rho_i = \sum_{J}{m_JW(\textbf{x}_i - \textbf{x}_j, h_j)}
References
----------
.. [Hernquist and Katz, 1988] L. Hernquist and N. Katz, "TREESPH: A
unification of SPH with the hierarcgical tree method", The Astrophysical
Journal Supplement Series, 70 (1989), pp 419-446.
"""
def initialize(self, t, d_rho, d_idx, d_rho_prev_iter):
# Stores density of particle i of the previous iteration
d_rho_prev_iter[d_idx] = d_rho[d_idx]
d_rho[d_idx] = 0.0
def loop(self, d_rho, d_idx, s_m, s_idx, WJ):
d_rho[d_idx] += s_m[s_idx] * WJ
class NonDimensionalDensityResidual(Equation):
r"""**Non-dimensional density residual**
.. math::
\psi^{k+1} = \dfrac{|\rho_i^{k+1} - \rho_i^k|}{\rho_i^k}
References
----------
.. [Vacondio2010] R. Vacondio, B.D. Rodgers and P.K. Stansby, "Accurate
particle splitting for smoothed particle hydrodynamics in shallow water
with shock capturing", Int. J. Numer. Meth. Fluids, 69 (2012), pp.
1377-1410
"""
def __init__(self, dest, sources=None):
super(NonDimensionalDensityResidual, self).__init__(dest, sources)
def post_loop(self, d_psi, d_rho, d_rho_prev_iter, d_idx):
# Non-dimensional residual
d_psi[d_idx] = abs(d_rho[d_idx] - d_rho_prev_iter[d_idx]) \
/ d_rho_prev_iter[d_idx]
class CheckConvergenceDensityResidual(Equation):
r"""The iterative process is stopped once the following condition is met
.. math::
\psi^{k+1} < \epsilon_{\psi}
where,
\epsilon_{\psi} = 1e-3
References
----------
.. [Vacondio2010] R. Vacondio, B.D. Rodgers and P.K. Stansby, "Accurate
particle splitting for smoothed particle hydrodynamics in shallow water
with shock capturing", Int. J. Numer. Meth. Fluids, 69 (2012), pp.
1377-1410
Notes
-----
If particle splitting is activated, better to use this convergence
criteria. It can be used even if particle splitting is not activated.
"""
def __init__(self, dest, sources=None):
super(CheckConvergenceDensityResidual, self).__init__(dest, sources)
self.eqn_has_converged = 0
def initialize(self):
self.eqn_has_converged = 0
def reduce(self, dst, t, dt):
epsilon = max(dst.psi)
if epsilon <= 1e-3:
self.eqn_has_converged = 1
def converged(self):
return self.eqn_has_converged
class CorrectionFactorVariableSmoothingLength(Equation):
r"""**Correction factor in internal force due to variable smoothing
length**
.. math::
\alpha_i = -\sum_j m_j r_{ij}\frac{dW_i}{dr_{ij}}
References
----------
.. [Rodriguez and Bonet, 2005] M. Rodriguez and J. Bonet, "A corrected
smooth particle hydrodynamics formulation of the shallow-water equations",
Computers and Structures, 83 (2005), pp. 1396-1410
"""
def initialize(self, d_idx, d_alpha):
d_alpha[d_idx] = 0.0
def loop(self, d_alpha, d_idx, DWIJ, XIJ, s_idx, s_m):
d_alpha[d_idx] += -s_m[s_idx] * (DWIJ[0]*XIJ[0] + DWIJ[1]*XIJ[1])
class RemoveParticlesWithZeroAlpha(Equation):
r"""Removes particles if correction factor (alpha) in internal force due to
variable smoothing length is zero
"""
def __init__(self, dest):
super(RemoveParticlesWithZeroAlpha, self).__init__(dest, None)
def post_loop(self, d_alpha, d_pa_alpha_zero, d_idx):
if d_alpha[d_idx] == 0:
d_pa_alpha_zero[d_idx] = 1
def reduce(self, dst, t, dt):
indices = declare('object')
indices = numpy.where(dst.pa_alpha_zero > 0)[0]
if len(indices) > 0:
dst.remove_particles(indices)
class SummationDensity(Equation):
r"""**Summation Density**
.. math::
\rho_i = \sum_{j}{m_jW(\textbf{x}_i - \textbf{x}_j, h_i)}
"""
def initialize(self, d_summation_rho, d_idx):
d_summation_rho[d_idx] = 0.0
def loop(self, d_summation_rho, d_idx, s_m, s_idx, WI):
d_summation_rho[d_idx] += s_m[s_idx] * WI
class InitialGuessDensityVacondio(Equation):
r"""**Initial guess density to start the iterative evaluation of density
for time step n+1**
.. math::
\rho_{i(0)}^{n+1} = \rho_i^n + dt\dfrac{d\rho_i}{dt}\\
h_{i(0)}^{n+1} = h_i^n + -\dfrac{h_i^n}{\rho_i^n}\dfrac{dt}{dm}
\dfrac{d\rho_i}{dt}
where,
.. math::
\frac{d\rho_i}{dt} = \rho_i^n\sum_j\dfrac{m_j}{\rho_j}
(\textbf{v}_i-\textbf{v}_j).\nabla W_i
References
----------
.. [VacondioSWE-SPHysics, 2013] R. Vacondio et al., SWE-SPHysics source
code, File: SWE_SPHYsics/SWE-SPHysics_2D_v1.0.00/source/SPHYSICS_SWE_2D/
ac_dw_var_hj_2D.f
Note:
If particle splitting is activated, better to use this method. It can be
used even if particle splitting is not activated.
"""
def __init__(self, dest, sources, dim=2):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
"""
self.dim = dim
super(InitialGuessDensityVacondio, self).__init__(dest, sources)
def initialize(self, d_arho, d_idx):
d_arho[d_idx] = 0
def loop(self, d_arho, d_rho, d_idx, s_m, s_rho, s_idx, d_u_prev_step,
d_v_prev_step, s_u_prev_step, s_v_prev_step, DWI):
tmp1 = (d_u_prev_step[d_idx]-s_u_prev_step[s_idx]) * DWI[0]
tmp2 = (d_v_prev_step[d_idx]-s_v_prev_step[s_idx]) * DWI[1]
d_arho[d_idx] += d_rho[d_idx] * ((s_m[s_idx]/s_rho[s_idx])*(tmp1+tmp2))
def post_loop(self, d_rho, d_h, dt, d_arho, d_idx):
d_rho[d_idx] += dt * d_arho[d_idx]
d_h[d_idx] += -(dt/self.dim)*d_h[d_idx]*(d_arho[d_idx]/d_rho[d_idx])
class InitialGuessDensity(Equation):
r"""**Initial guess density to start the iterative evaluation of density
for time step n+1 based on properties of time step n**
.. math::
\rho_{I, n+1}^{(0)} = \rho_{I,n}e^{\lambda_n}
where,
\lambda = \dfrac{\rho_Id_m}{\alpha_I}\sum_{J}^{}m_J
(\textbf{v}_J - \textbf{v}_I).\nabla W_I(\textbf{x}_I
- \textbf{x}_J, h_I)
References
----------
.. [Rodriguez and Bonet, 2005] M. Rodriguez and J. Bonet, "A corrected
smooth particle hydrodynamics formulation of the shallow-water equations",
Computers and Structures, 83 (2005), pp. 1396-1410
"""
def __init__(self, dest, sources, dim=2):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
"""
self.dim = dim
super(InitialGuessDensity, self).__init__(dest, sources)
def initialize(self, d_exp_lambda, d_idx):
d_exp_lambda[d_idx] = 0.0
def loop(self, d_exp_lambda, d_u_prev_step, d_v_prev_step, d_alpha, d_idx,
s_m, s_u_prev_step, s_v_prev_step, s_idx, DWI, dt, t):
a1 = (d_u_prev_step[d_idx]-s_u_prev_step[s_idx]) * DWI[0]
a2 = (d_v_prev_step[d_idx]-s_v_prev_step[s_idx]) * DWI[1]
const = (self.dim*dt) / d_alpha[d_idx]
d_exp_lambda[d_idx] += const * (s_m[s_idx]*(a1+a2))
def post_loop(self, t, d_rho, d_exp_lambda, d_idx):
d_rho[d_idx] = d_rho[d_idx] * exp(d_exp_lambda[d_idx])
class UpdateSmoothingLength(Equation):
r"""**Update smoothing length based on density**
.. math::
h_I^{(k)} = h_I^{0}\biggl(\dfrac{\rho_I^0}{\rho_I^{(k)}}
\biggl)^\frac{1}{d_m}
References
----------
.. [Rodriguez and Bonet, 2005] M. Rodriguez and J. Bonet, "A corrected
smooth particle hydrodynamics formulation of the shallow-water equations",
Computers and Structures, 83 (2005), pp. 1396-1410
"""
def __init__(self, dest, dim=2):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
"""
self.dim = dim
super(UpdateSmoothingLength, self).__init__(dest, None)
def post_loop(self, d_h, d_h0, d_rho0, d_rho, d_idx):
d_h[d_idx] = d_h0[d_idx] * (d_rho0[d_idx]/d_rho[d_idx])**(1./self.dim)
class DensityResidual(Equation):
r"""**Residual of density**
.. math::
R(\rho^{(k)}) = \rho_I^{(k)} - \sum_{J}^{}m_J
W_I(\textbf{x}_I - \textbf{x}_J, h_I^{(k)})
References
----------
.. [Rodriguez and Bonet, 2005] M. Rodriguez and J. Bonet, "A corrected
smooth particle hydrodynamics formulation of the shallow-water equations",
Computers and Structures, 83 (2005), pp. 1396-1410
"""
def __init__(self, dest, sources=None):
super(DensityResidual, self).__init__(dest, sources)
def post_loop(self, d_rho, d_idx, d_rho_residual, d_summation_rho, t):
d_rho_residual[d_idx] = d_rho[d_idx] - d_summation_rho[d_idx]
class DensityNewtonRaphsonIteration(Equation):
r"""**Newton-Raphson approximate solution for the density equation at
iteration k+1**
.. math::
\rho^{(k+1)} = \rho_I^{(k)}\biggl[1 - \dfrac{R_I^{(k)}d_m}{(
R_I^{(k)} d_m + \alpha_I^k)}\biggr]
References
----------
.. [Rodriguez and Bonet, 2005] M. Rodriguez and J. Bonet, "A corrected
smooth particle hydrodynamics formulation of the shallow-water equations",
Computers and Structures, 83 (2005), pp. 1396-1410
"""
def __init__(self, dest, sources=None, dim=2):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
"""
self.dim = dim
super(DensityNewtonRaphsonIteration, self).__init__(dest, sources)
def initialize(self, d_rho, d_rho_prev_iter, d_idx):
d_rho_prev_iter[d_idx] = d_rho[d_idx]
def post_loop(self, d_rho, d_idx, d_alpha, d_rho_residual):
a1 = d_rho_residual[d_idx] * self.dim
a2 = a1 + d_alpha[d_idx]
const = 1 - (a1/a2)
d_rho[d_idx] = d_rho[d_idx] * const
class CheckConvergence(Equation):
r"""Stops the Newton-Raphson iterative procedure if the following
convergence criteria is satisfied:
.. math::
\dfrac{|R_I^{(k+1)}|}{\rho_I^{(k)}} \leq \epsilon
where,
\epsilon = 1e-15
References
----------
.. [Rodriguez and Bonet, 2005] M. Rodriguez and J. Bonet, "A corrected
smooth particle hydrodynamics formulation of the shallow-water equations",
Computers and Structures, 83 (2005), pp. 1396-1410
Notes
-----
Use this convergence criteria when using the Newton-Raphson iterative
procedure.
"""
def __init__(self, dest, sources=None):
super(CheckConvergence, self).__init__(dest, sources)
self.eqn_has_converged = 0
def initialize(self):
self.eqn_has_converged = 0
def post_loop(self, d_positive_rho_residual, d_rho_residual,
d_rho_prev_iter, d_idx, t):
d_positive_rho_residual[d_idx] = abs(d_rho_residual[d_idx])
def reduce(self, dst, t, dt):
max_epsilon = max(dst.positive_rho_residual / dst.rho_prev_iter)
if max_epsilon <= 1e-15:
self.eqn_has_converged = 1
def converged(self):
return self.eqn_has_converged
class SWEOS(Equation):
r"""**Update fluid properties based on density**
References
----------
.. [Rodriguez and Bonet, 2005] M. Rodriguez and J. Bonet, "A corrected
smooth particle hydrodynamics formulation of the shallow-water equations",
Computers and Structures, 83 (2005), pp. 1396-1410
"""
def __init__(self, dest, sources=None, g=9.81, rhow=1000.0):
r"""
Parameters
----------
g : float
acceleration due to gravity
rhow : float
constant 3-D density of water
"""
self.rhow = rhow
self.g = g
self.fac = 0.5 * (g/rhow)
super(SWEOS, self).__init__(dest, sources)
def post_loop(self, d_rho, d_cs, d_u, d_v, d_idx, d_p, d_dw, d_dt_cfl,
d_m, d_A, d_alpha):
# Pressure
d_p[d_idx] = self.fac * (d_rho[d_idx])**2
# Wave speed
d_cs[d_idx] = sqrt(self.g * d_rho[d_idx]/self.rhow)
# Area
d_A[d_idx] = d_m[d_idx] / d_rho[d_idx]
# Depth of water
d_dw[d_idx] = d_rho[d_idx] / self.rhow
# dt = CFL * (h_min / max(dt_cfl))
d_dt_cfl[d_idx] = d_cs[d_idx] + (d_u[d_idx]**2 + d_v[d_idx]**2)**0.5
def mu_calc(hi=1.0, hj=1.0, velij_dot_rij=1.0, rij2=1.0):
r"""Term present in the artificial viscosity formulation (Monaghan)
.. math::
\mu_{ij} = \dfrac{\bar h_{ij}\textbf{v}_{ij}.\textbf{x}_{ij}}
{|\textbf{x}_{ij}|^2 + \zeta^2}
References
----------
.. [Monaghan2005] J. Monaghan, "Smoothed particle hydrodynamics",
Reports on Progress in Physics, 68 (2005), pp. 1703-1759.
"""
h_bar = (hi+hj) / 2.0
eta2 = 0.01 * hi**2
muij = (h_bar*velij_dot_rij) / (rij2+eta2)
return muij
def artificial_visc(alpha=1.0, rij2=1.0, hi=1.0, hj=1.0, rhoi=1.0, rhoj=1.0,
csi=1.0, csj=1.0, muij=1.0):
r"""**Artificial viscosity based stabilization term (Monaghan)**
Activated when :math:`\textbf{v}_{ij}.\textbf{x}_{ij} < 0`
Given by
.. math::
\Pi_{ij} = \dfrac{-a\bar c_{ij}\mu_{ij}+b\bar c_{ij}\mu_{ij}^2}{\rho_{ij}}
References
----------
.. [Monaghan2005] J. Monaghan, "Smoothed particle hydrodynamics",
Reports on Progress in Physics, 68 (2005), pp. 1703-1759.
"""
cs_bar = (csi+csj) / 2.0
rho_bar = (rhoi+rhoj) / 2.0
pi_visc = -(alpha*cs_bar*muij) / rho_bar
return pi_visc
def viscosity_LF(alpha=1.0, rij2=1.0, hi=1.0, hj=1.0, rhoi=1.0, rhoj=1.0,
csi=1.0, csj=1.0, muij=1.0):
r"""**Lax-Friedrichs flux based stabilization term (Ata and Soulaimani)**
.. math::
\Pi_{ij} = \dfrac{\bar c_{ij}\textbf{v}_{ij}.\textbf{x}_{ij}}
{\bar\rho_{ij}\sqrt{|x_{ij}|^2 + \zeta^2}}
References
----------
.. [Ata and Soulaimani, 2004] R. Ata and A. Soulaimani, "A stabilized SPH
method for inviscid shallow water", Int. J. Numer. Meth. Fluids, 47 (2005),
pp. 139-159.
Notes
-----
The advantage of this term is that it automatically sets the required level
of numerical viscosity based on the Lax-Friedrichs flux. This is the
default stabilization method.
"""
cs_bar = (csi+csj) / 2.0
rho_bar = (rhoi+rhoj) / 2.0
eta2 = 0.01 * hi**2
h_bar = (hi+hj) / 2.0
tmp = (muij*(rij2+eta2)**0.5) / h_bar
pi_visc = -(cs_bar*tmp) / rho_bar
return pi_visc
class ParticleAcceleration(Equation):
r"""**Acceleration of a particle**
.. math::
\textbf{a}_i = -\frac{g+\textbf{v}_i.\textbf{k}_i\textbf{v}_i
-\textbf{t}_i.\nabla H_i}{1+\nabla H_i.\nabla H_i}
\nabla H_i - \textbf{t}_i - \textbf{S}_{fi}
where,
.. math::
\textbf{t}_i &= \sum_j m_j\ \biggl[\biggl(\frac{p_j}{
\alpha_j \rho_j^2}+0.5\Pi_{ij}\biggr)\nabla W_j(\textbf{x}_i, h_j) -
\biggl(\frac{p_i}{\alpha_i \rho_i^2}+0.5\Pi_{ij}\biggr)\nabla
W_i(\textbf{x}_j, h_i)\biggr]
.. math::
\textbf{S}_f = \textbf{v}\dfrac{gn^2|\textbf{v}|}{d^{\frac{4}{3}}}
with,
.. math::
\alpha_i = -\sum_j m_j r_{ij}\frac{dW_i}{dr_{ij}}
.. math::
n_i = \sum_jn_j^b\overline W_i(x_i - x_j^b, h^b)V_j
References
----------
.. [Vacondio2010] R. Vacondio, B.D. Rodgers and P.K. Stansby, "Accurate
particle splitting for smoothed particle hydrodynamics in shallow water
with shock capturing", Int. J. Numer. Meth. Fluids, 69 (2012), pp.
1377-1410
Notes
-----
The acceleration term given in [Vacondio2010] has incorrect sign.
"""
def __init__(self, dest, sources, dim=2, u_only=False, v_only=False,
alpha=0.0, visc_option=2, rhow=1000.0):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
u_only : bool
motion of fluid column in x-direction only evaluated
(Default: False)
v_only : bool
motion of fluid column in y-direction only evaluated
(Default: False)
alpha : float
coefficient to control amount of artificial viscosity (Monaghan)
(Default: 0.0)
visc_option : int
artifical viscosity (1) or Lax-Friedrichs flux (2) based
stabilization term (Default: 2)
rhow : float
constant 3-D density of water
"""
self.g = 9.81
self.rhow = rhow
self.ct = self.g / (2*self.rhow)
self.dim = dim
self.u_only = u_only
self.v_only = v_only
self.alpha = alpha
if visc_option == 1:
self.viscous_func = artificial_visc
else:
self.viscous_func = viscosity_LF
super(ParticleAcceleration, self).__init__(dest, sources)
def initialize(self, d_idx, d_tu, d_tv):
d_tu[d_idx] = 0.0
d_tv[d_idx] = 0.0
def loop(self, d_x, d_y, s_x, s_y, d_rho, d_idx, s_m, s_idx, s_rho, d_m,
DWI, DWJ, d_au, d_av, s_alpha, d_alpha, s_p, d_p, d_tu, s_dw,
d_dw, t, s_is_wall_boun_pa, s_tu, d_tv, s_tv, d_h, s_h, d_u, s_u,
d_v, s_v, d_cs, s_cs):
# True if neighbor is wall boundary particle
if s_is_wall_boun_pa[s_idx] == 1:
# Setting artificial viscosity to zero when a particle interacts
# with wall boundary particles
pi_visc = 0.0
# Setting water depth of wall boundary particles same as particle
# interacting with it (For sufficient pressure to prevent wall
# penetration)
s_dw[s_idx] = d_dw[d_idx]
else:
uij = d_u[d_idx] - s_u[s_idx]
vij = d_v[d_idx] - s_v[s_idx]
xij = d_x[d_idx] - s_x[s_idx]
yij = d_y[d_idx] - s_y[s_idx]
rij2 = xij**2 + yij**2
uij_dot_xij = uij * xij
vij_dot_yij = vij * yij
velij_dot_rij = uij_dot_xij + vij_dot_yij
muij = mu_calc(d_h[d_idx], s_h[s_idx], velij_dot_rij, rij2)
if velij_dot_rij < 0:
# Stabilization term
pi_visc = self.viscous_func(self.alpha, rij2, d_h[d_idx],
s_h[s_idx], d_rho[d_idx],
s_rho[s_idx], d_cs[d_idx],
s_cs[s_idx], muij)
else:
pi_visc = 0
tmp1 = (s_dw[s_idx]*self.rhow*self.dim) / s_alpha[s_idx]
tmp2 = (d_dw[d_idx]*self.rhow*self.dim) / d_alpha[d_idx]
# Internal force per unit mass
d_tu[d_idx] += s_m[s_idx] * ((self.ct*tmp1 + 0.5*pi_visc)*DWJ[0] +
(self.ct*tmp2 + 0.5*pi_visc)*DWI[0])
d_tv[d_idx] += s_m[s_idx] * ((self.ct*tmp1 + 0.5*pi_visc)*DWJ[1] +
(self.ct*tmp2 + 0.5*pi_visc)*DWI[1])
def _get_helpers_(self):
return [mu_calc, artificial_visc, viscosity_LF]
def post_loop(self, d_idx, d_u, d_v, d_tu, d_tv, d_au, d_av, d_Sfx, d_Sfy,
d_bx, d_by, d_bxx, d_bxy, d_byy):
vikivi = d_u[d_idx]*d_u[d_idx]*d_bxx[d_idx] \
+ 2*d_u[d_idx]*d_v[d_idx]*d_bxy[d_idx] \
+ d_v[d_idx]*d_v[d_idx]*d_byy[d_idx]
tidotgradbi = d_tu[d_idx]*d_bx[d_idx] + d_tv[d_idx]*d_by[d_idx]
gradbidotgradbi = d_bx[d_idx]**2 + d_by[d_idx]**2
temp3 = self.g + vikivi - tidotgradbi
temp4 = 1 + gradbidotgradbi
if not self.v_only:
# Acceleration along x-direction
d_au[d_idx] = -(temp3/temp4)*d_bx[d_idx] - d_tu[d_idx] \
- d_Sfx[d_idx]
if not self.u_only:
# Acceleration along y-direction
d_av[d_idx] = -(temp3/temp4)*d_by[d_idx] - d_tv[d_idx] \
- d_Sfy[d_idx]
class FluidBottomElevation(Equation):
r"""**Bottom elevation of fluid**
.. math::
b_i = \sum_jb_j^b\overline{W_i}(\textbf{x}_i - \textbf{x}_j^b, h^b)V_j
References
----------
.. [Vacondio2010] R. Vacondio, B.D. Rodgers and P.K. Stansby, "Accurate
particle splitting for smoothed particle hydrodynamics in shallow water
with shock capturing", Int. J. Numer. Meth. Fluids, 69 (2012), pp.
1377-1410
"""
def initialize(self, d_b, d_idx):
d_b[d_idx] = 0.0
def loop_all(self, d_shep_corr, d_x, d_y, d_idx, s_x, s_y, s_V, s_idx, s_h,
SPH_KERNEL, NBRS, N_NBRS):
# Shepard filter
i = declare('int')
xij = declare('matrix(3)')
rij = 0.0
corr_sum = 0.0
for i in range(N_NBRS):
s_idx = NBRS[i]
xij[0] = d_x[d_idx] - s_x[s_idx]
xij[1] = d_y[d_idx] - s_y[s_idx]
rij = sqrt(xij[0]*xij[0] + xij[1]*xij[1])
corr_sum += s_V[s_idx] * SPH_KERNEL.kernel(xij, rij, s_h[s_idx])
d_shep_corr[d_idx] = corr_sum
def loop(self, d_b, d_idx, s_b, s_idx, WJ, s_V, RIJ):
d_b[d_idx] += s_b[s_idx] * WJ * s_V[s_idx]
def post_loop(self, d_b, d_shep_corr, d_idx):
if d_shep_corr[d_idx] > 1e-14:
d_b[d_idx] /= d_shep_corr[d_idx]
class FluidBottomGradient(Equation):
r"""**Bottom gradient of fluid**
.. math::
\nabla b_i &=& \sum_j\nabla b_j^b W_i(\textbf{x}_i - \textbf{x}_j^b,
h^b)V_j
Notes:
It is obtained from a simple SPH interpolation from the gradient of bed
particles
"""
def initialize(self, d_idx, d_bx, d_by):
d_bx[d_idx] = 0.0
d_by[d_idx] = 0.0
def loop(self, d_idx, d_bx, d_by, WJ, s_idx, s_bx, s_by, s_V):
# Bottom gradient of fluid
d_bx[d_idx] += s_bx[s_idx] * WJ * s_V[s_idx]
d_by[d_idx] += s_by[s_idx] * WJ * s_V[s_idx]
class FluidBottomCurvature(Equation):
r"""Bottom curvature of fluid**
.. math::
\nabla^2 b_i = \sum_j\nabla^2 b_j^b W_i(\textbf{x}_i - \textbf{x}_j^b,
h^b)V_j
Notes:
It is obtained from a simple SPH interpolation from the curvature of bed
particles
"""
def initialize(self, d_idx, d_bx, d_by, d_bxx, d_bxy, d_byy):
d_bxx[d_idx] = 0.0
d_bxy[d_idx] = 0.0
d_byy[d_idx] = 0.0
def loop(self, d_idx, d_bxx, d_bxy, d_byy, WJ, s_idx, s_bxx, s_bxy, s_byy,
s_V):
# Bottom curvature of fluid
d_bxx[d_idx] += s_bxx[s_idx] * WJ * s_V[s_idx]
d_bxy[d_idx] += s_bxy[s_idx] * WJ * s_V[s_idx]
d_byy[d_idx] += s_byy[s_idx] * WJ * s_V[s_idx]
class BedGradient(Equation):
r"""**Gradient of bed**
.. math::
\nabla b_i = \sum_jb_j^b\tilde{\nabla}W_i(\textbf{x}_i -
\textbf{x}_j^b, h^b)V_j
References
----------
.. [Vacondio2010] R. Vacondio, B.D. Rodgers and P.K. Stansby, "Accurate
particle splitting for smoothed particle hydrodynamics in shallow water
with shock capturing", Int. J. Numer. Meth. Fluids, 69 (2012), pp.
1377-1410
"""
def initialize(self, d_bx, d_by, d_idx):
d_bx[d_idx] = 0.0
d_by[d_idx] = 0.0
def loop(self, d_bx, d_by, d_idx, s_b, s_idx, DWJ, s_V, RIJ):
if RIJ > 1e-6:
# Gradient of bed
d_bx[d_idx] += s_b[s_idx] * DWJ[0] * s_V[s_idx]
d_by[d_idx] += s_b[s_idx] * DWJ[1] * s_V[s_idx]
class BedCurvature(Equation):
r"""**Curvature of bed**
.. math::
\biggl(\dfrac{\partial^2b}{\partial x^\alpha \partial x^\beta}
\biggr)_i = \sum_{j}^{}\biggl(4\dfrac{x_{ij}^\alphax_{ij}^\beta}
{r_{ij}^2}-\delta^{\alpha\beta}\biggr)\dfrac{b_i - b_j^b}{
\textbf{r}_{ij}\textbf{r}_{ij} + \eta^2}\textbf{r}_{ij}.\tilde{\nabla}
W_i(\textbf{x}_i - \textbf{x}_j^b, h^b)V_j
References
----------
.. [Vacondio2010] R. Vacondio, B.D. Rodgers and P.K. Stansby, "Accurate
particle splitting for smoothed particle hydrodynamics in shallow water
with shock capturing", Int. J. Numer. Meth. Fluids, 69 (2012), pp.
1377-1410
"""
def initialize(self, d_bxx, d_bxy, d_byy, d_idx):
d_bxx[d_idx] = 0.0
d_bxy[d_idx] = 0.0
d_byy[d_idx] = 0.0
def loop(self, d_bxx, d_bxy, d_byy, d_b, d_idx, s_h, s_b, s_idx, XIJ, RIJ,
DWJ, s_V):
if RIJ > 1e-6:
eta = 0.01 * s_h[s_idx]
temp1 = (d_b[d_idx]-s_b[s_idx]) / (RIJ**2+eta**2)
temp2 = XIJ[0]*DWJ[0] + XIJ[1]*DWJ[1]
temp_bxx = ((4*XIJ[0]**2/RIJ**2)-1) * temp1
temp_bxy = (4*XIJ[0]*XIJ[1]/RIJ**2) * temp1
temp_byy = ((4*XIJ[1]**2/RIJ**2)-1) * temp1
# Curvature of bed
d_bxx[d_idx] += temp_bxx * temp2 * s_V[s_idx]
d_bxy[d_idx] += temp_bxy * temp2 * s_V[s_idx]
d_byy[d_idx] += temp_byy * temp2 * s_V[s_idx]
class BedFrictionSourceEval(Equation):
r"""**Friction source term**
.. math::
\textbf{S}_f = \textbf{v}\dfrac{gn^2|\textbf{v}|}{d^{\frac{4}{3}}}
where,
.. math::
n_i = \sum_jn_j^b\overline W_i(x_i - x_j^b, h^b)V_j
"""
def __init__(self, dest, sources):
self.g = 9.8
super(BedFrictionSourceEval, self).__init__(dest, sources)
def initialize(self, d_n, d_idx):
d_n[d_idx] = 0.0
def loop(self, d_n, d_idx, s_n, s_idx, WJ, s_V, RIJ):
if RIJ > 1e-6:
# Manning coefficient
d_n[d_idx] += s_n[s_idx] * WJ * s_V[s_idx]
def post_loop(self, d_idx, d_Sfx, d_Sfy, d_u, d_v, d_n, d_dw):
vmag = sqrt(d_u[d_idx]**2 + d_v[d_idx]**2)
temp = (self.g*d_n[d_idx]**2*vmag) / d_dw[d_idx]**(4.0/3.0)
# Friction source term
d_Sfx[d_idx] = d_u[d_idx] * temp
d_Sfy[d_idx] = d_v[d_idx] * temp
class BoundaryInnerReimannStateEval(Equation):
r"""Evaluates the inner Riemann state of velocity and depth
.. math::
\textbf{v}_i^o = \sum_j\dfrac{m_j^f}{\rho_j^f}\textbf{v}_j^f\bar
W_i(\textbf{x}_i^o - \textbf{x}_j^f, h_o)\\
{d}_i^o = \sum_j\dfrac{m_j^f}{\rho_j^f}d_j^f\bar W_i(\textbf{x}_i^o -
\textbf{x}_j^f, h_o)
References
----------
.. [Vacondio2012] R. Vacondio et al., "SPH modeling of shallow flow with
open boundaries for practical flood simulation", J. Hydraul. Eng., 2012,
138(6), pp. 530-541.
"""
def initialize(self, d_u_inner_reimann, d_v_inner_reimann,
d_dw_inner_reimann, d_idx):
d_u_inner_reimann[d_idx] = 0.0
d_v_inner_reimann[d_idx] = 0.0
d_dw_inner_reimann[d_idx] = 0.0
def loop_all(self, d_shep_corr, d_x, d_y, d_idx, s_x, s_y, s_m, s_rho,
s_idx, d_h, SPH_KERNEL, NBRS, N_NBRS):
# Shepard filter
i = declare('int')
xij = declare('matrix(3)')
rij = 0.0
corr_sum = 0.0
for i in range(N_NBRS):
s_idx = NBRS[i]
xij[0] = d_x[d_idx] - s_x[s_idx]
xij[1] = d_y[d_idx] - s_y[s_idx]
rij = sqrt(xij[0]*xij[0] + xij[1]*xij[1])
corr_sum += ((s_m[s_idx]/s_rho[s_idx])
* SPH_KERNEL.kernel(xij, rij, d_h[d_idx]))
d_shep_corr[d_idx] = corr_sum
def loop(self, d_u_inner_reimann, d_v_inner_reimann, d_dw_inner_reimann,
d_idx, WI, s_m, s_u, s_v, s_rho, s_dw, s_idx):
tmp = WI * (s_m[s_idx]/s_rho[s_idx])
# Riemann invariants at open boundaries
d_u_inner_reimann[d_idx] += s_u[s_idx] * tmp
d_v_inner_reimann[d_idx] += s_v[s_idx] * tmp
d_dw_inner_reimann[d_idx] += s_dw[s_idx] * tmp
def post_loop(self, d_u_inner_reimann, d_v_inner_reimann,
d_dw_inner_reimann, d_shep_corr, d_idx):
if d_shep_corr[d_idx] > 1e-14:
d_u_inner_reimann[d_idx] /= d_shep_corr[d_idx]
d_v_inner_reimann[d_idx] /= d_shep_corr[d_idx]
d_dw_inner_reimann[d_idx] /= d_shep_corr[d_idx]
class SubCriticalInFlow(Equation):
r"""**Subcritical inflow condition**
..math ::
d_B = \biggl[\frac{1}{2\sqrt{g}}(v_{B,n}-v_{I,n}) + \sqrt{d_I}\biggr]^2
References
----------
.. [Vacondio2012] R. Vacondio et al., "SPH modeling of shallow flow with
open boundaries for practical flood simulation", J. Hydraul. Eng., 2012,
138(6), pp. 530-541.
Notes
-----
The velocity is imposed at the open boundary.
"""
def __init__(self, dest, dim=2, rhow=1000.0):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
rhow : float
constant 3-D density of water
"""
self.g = 9.8
self.dim = dim
self.rhow = rhow
super(SubCriticalInFlow, self).__init__(dest, None)
def post_loop(self, d_dw, d_dw_inner_reimann, d_u, d_u_inner_reimann,
d_rho, d_alpha, d_cs, d_idx):
const = 1. / (2.*sqrt(self.g))
# Properties of open boundary particles
d_dw[d_idx] = (const*(d_u_inner_reimann[d_idx] - d_u[d_idx])
+ sqrt(d_dw_inner_reimann[d_idx]))**2
d_rho[d_idx] = d_dw[d_idx] * self.rhow
d_alpha[d_idx] = self.dim * d_rho[d_idx]
d_cs[d_idx] = sqrt(self.g * d_dw[d_idx])
class SubCriticalOutFlow(Equation):
r"""**Subcritical outflow condition**
..math ::
v_{B,n} = v_{I,n} + 2\sqrt{g}(\sqrt{d_I} - \sqrt{d_B}), v_{B,t} =
v_{I,t}
References
----------
.. [Vacondio2012] R. Vacondio et al., "SPH modeling of shallow flow with
open boundaries for practical flood simulation", J. Hydraul. Eng., 2012,
138(6), pp. 530-541.
Notes:
-----
The constant water depth is imposed at the open boundary.
"""
def __init__(self, dest, dim=2, rhow=1000.0):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
rhow : float
constant 3-D density of water
"""
self.g = 9.8
self.dim = dim
self.rhow = rhow
super(SubCriticalOutFlow, self).__init__(dest, None)
def post_loop(self, d_dw, d_dw_inner_reimann, d_u, d_u_inner_reimann,
d_rho, d_cs, d_alpha, d_v, d_v_inner_reimann, d_idx):
const = 2. * sqrt(self.g)
# Velocities of open boundary particles
d_u[d_idx] = (d_u_inner_reimann[d_idx]
+ const*(sqrt(d_dw_inner_reimann[d_idx])
- sqrt(d_dw[d_idx])))
d_v[d_idx] = d_v_inner_reimann[d_idx]
class SubCriticalTimeVaryingOutFlow(Equation):
r"""**Subcritical outflow condition**
..math ::
v_{B,n} = v_{I,n} + 2\sqrt{g}(\sqrt{d_I} - \sqrt{d_B}), v_{B,t} =
v_{I,t}
References
----------
.. [Vacondio2012] R. Vacondio et al., "SPH modeling of shallow flow with
open boundaries for practical flood simulation", J. Hydraul. Eng., 2012,
138(6), pp. 530-541.
Notes:
-----
The time varying water depth is imposed at the open boundary.
"""
def __init__(self, dest, dim=2, rhow=1000.0):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
rhow : float
constant 3-D density of water
"""
self.g = 9.8
self.dim = dim
self.rhow = rhow
super(SubCriticalTimeVaryingOutFlow, self).__init__(dest, None)
def post_loop(self, d_dw, d_dw_inner_reimann, d_u, d_u_inner_reimann,
d_rho, d_cs, d_alpha, d_v, d_v_inner_reimann, d_idx,
d_dw_at_t):
# Properties of open boundary particles
# Time varying water depth imposed
d_dw[d_idx] = d_dw_at_t[d_idx]
d_rho[d_idx] = d_dw[d_idx] * self.rhow
d_cs[d_idx] = sqrt(d_dw[d_idx] * self.g)
d_alpha[d_idx] = d_rho[d_idx] * self.dim
const = 2. * sqrt(self.g)
d_u[d_idx] = (d_u_inner_reimann[d_idx]
+ const*(sqrt(d_dw_inner_reimann[d_idx])
- sqrt(d_dw[d_idx])))
d_v[d_idx] = d_v_inner_reimann[d_idx]
class SuperCriticalOutFlow(Equation):
r"""**Supercritical outflow condition**
.. math::
v_{B,n} = v_{I,n}, v_{B,t} = v_{I,t}, d_B = d_I
References
----------
.. [Vacondio2012] R. Vacondio et al., "SPH modeling of shallow flow with
open boundaries for practical flood simulation", J. Hydraul. Eng., 2012,
138(6), pp. 530-541.
Notes:
-----
For supercritical outflow condition, the velocity and water depth at the
open boundary equals the respective inner Riemann state values. For
supercritical inflow condition, both the velocity and water depth at the
open boundary have to be imposed.
"""
def __init__(self, dest, dim=2, rhow=1000.0):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
rhow : float
constant 3-D density of water
"""
self.g = 9.8
self.dim = dim
self.rhow = rhow
super(SuperCriticalOutFlow, self).__init__(dest, None)
def post_loop(self, d_dw, d_rho, d_dw_inner_reimann, d_u_inner_reimann,
d_u, d_v, d_v_inner_reimann, d_alpha, d_cs, d_idx):
# Properties of open boundary particles
d_u[d_idx] = d_u_inner_reimann[d_idx]
d_v[d_idx] = d_v_inner_reimann[d_idx]
d_dw[d_idx] = d_dw_inner_reimann[d_idx]
d_rho[d_idx] = d_dw[d_idx] * self.rhow
d_alpha[d_idx] = self.dim * d_rho[d_idx]
d_cs[d_idx] = sqrt(self.g * d_dw[d_idx])
class GradientCorrectionPreStep(Equation):
def __init__(self, dest, sources, dim=2):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
"""
self.dim = dim
super(GradientCorrectionPreStep, self).__init__(dest, sources)
def initialize(self, d_idx, d_m_mat):
i = declare('int')
for i in range(9):
d_m_mat[9*d_idx + i] = 0.0
def loop_all(self, d_idx, d_m_mat, s_V, d_x, d_y, d_z, d_h, s_x,
s_y, s_z, s_h, SPH_KERNEL, NBRS, N_NBRS):
x = d_x[d_idx]
y = d_y[d_idx]
z = d_z[d_idx]
h = d_h[d_idx]
i, j, s_idx, n = declare('int', 4)
xij = declare('matrix(3)')
dwij = declare('matrix(3)')
n = self.dim
for k in range(N_NBRS):
s_idx = NBRS[k]
xij[0] = x - s_x[s_idx]
xij[1] = y - s_y[s_idx]
xij[2] = z - s_z[s_idx]
hij = (h + s_h[s_idx]) * 0.5
r = sqrt(xij[0]*xij[0] + xij[1]*xij[1] + xij[2]*xij[2])
SPH_KERNEL.gradient(xij, r, hij, dwij)
dw = sqrt(dwij[0]*dwij[0] + dwij[1]*dwij[1]
+ dwij[2]*dwij[2])
V = s_V[s_idx]
if r >= 1.0e-12:
for i in range(n):
xi = xij[i]
for j in range(n):
xj = xij[j]
d_m_mat[9*d_idx + 3*i + j] += (dw*V*xi*xj) / r
class GradientCorrection(Equation):
r"""**Kernel Gradient Correction**
.. math::
\nabla \tilde{W}_{ab} = L_{a}\nabla W_{ab}
.. math::
L_{a} = \left(\sum \frac{m_{b}}{\rho_{b}}\nabla W_{ab}
\mathbf{\times}x_{ab} \right)^{-1}
References
----------
.. [Bonet and Lok, 1999] J. Bonet and T.-S.L. Lok, "Variational and
Momentum Preservation Aspects of Smoothed Particle Hydrodynamic
Formulations", Comput. Methods Appl. Mech. Engrg., 180 (1999), pp. 97-115
"""
def _get_helpers_(self):
return [gj_solve, augmented_matrix]
def __init__(self, dest, sources, dim=2, tol=0.5):
r"""
Parameters
----------
dim : int
number of space dimensions (Default: 2)
tol : float
tolerance for gradient correction (Default: 0.5)
"""
self.dim = dim
self.tol = tol
super(GradientCorrection, self).__init__(dest, sources)
def loop(self, d_idx, d_m_mat, DWJ, s_h, s_idx):
i, j, n = declare('int', 3)
n = self.dim
temp = declare('matrix(9)')
aug_m = declare('matrix(12)')
res = declare('matrix(3)')
eps = 1.0e-04 * s_h[s_idx]
for i in range(n):
for j in range(n):
temp[n*i + j] = d_m_mat[9*d_idx + 3*i + j]
augmented_matrix(temp, DWJ, n, 1, n, aug_m)
gj_solve(aug_m, n, 1, res)
change = 0.0
for i in range(n):
change += abs(DWJ[i]-res[i]) / (abs(DWJ[i])+eps)
if change <= self.tol:
for i in range(n):
DWJ[i] = res[i]
class RemoveOutofDomainParticles(Equation):
r"""Removes particles if the following condition is satisfied:
.. math::
(x_i < x_min) or (x_i > x_max) or (y_i < y_min) or (y_i > y_max)
"""
def __init__(self, dest, x_min=-1e9, x_max=1e9,
y_min=-1e9, y_max=1e9):
r"""
Parameters
----------
x_min : float
minimum distance along x-direction below which particles are
removed
x_max : float
maximum distance along x-direction above which particles are
removed
y_min : float
minimum distance along y-direction below which particles are
removed
y_max : float
maximum distance along x-direction above which particles are
removed
"""
self.x_min = x_min
self.x_max = x_max
self.y_min = y_min
self.y_max = y_max
super(RemoveOutofDomainParticles, self).__init__(dest, None)
def initialize(self, d_pa_out_of_domain, d_x, d_y, d_idx):
if (
(d_x[d_idx] < self.x_min or d_x[d_idx] > self.x_max)
or (d_y[d_idx] < self.y_min or d_y[d_idx] > self.y_max)
):
d_pa_out_of_domain[d_idx] = 1
else:
d_pa_out_of_domain[d_idx] = 0
def reduce(self, dst, t, dt):
indices = declare('object')
indices = numpy.where(dst.pa_out_of_domain > 0)[0]
# Removes the out of domain particles
if len(indices) > 0:
dst.remove_particles(indices)
class RemoveCloseParticlesAtOpenBoundary(Equation):
r"""Removes the newly created open boundary particle if the distance
between this particle and any of its neighbor is less than min_dist_ob
The following cases creates new open boundary particles
* Particles which are moved back to the inlet after exiting the inlet.
* Particles which have moved from another domain into the open boundary and
have been converted to open boundary particles.
References
----------
.. [VacondioSWE-SPHysics, 2013] R. Vacondio et al., SWE-SPHysics source
code, File: SWE_SPHYsics/SWE-SPHysics_2D_v1.0.00/source/SPHYSICS_SWE_2D/
check_limits_2D.f
"""
def __init__(self, dest, sources, min_dist_ob=0.0):
"""
Parameters
----------
min_dist_ob : float
minimum distance of a newly created open boundary particle and its
neighbor below which the particle is removed
"""
self.min_dist_ob = min_dist_ob
super(RemoveCloseParticlesAtOpenBoundary, self).__init__(dest, sources)
def loop_all(self, d_idx, d_ob_pa_to_tag, d_ob_pa_to_remove, d_x, d_y, s_x,
s_y, NBRS, N_NBRS):
i = declare('int')
s_idx = declare('unsigned int')
# ob_pa_to_tag is 1 for newly created open boundary particles
if d_ob_pa_to_tag[d_idx]:
xi = d_x[d_idx]
yi = d_y[d_idx]
for i in range(N_NBRS):
s_idx = NBRS[i]
if s_idx == d_idx:
continue
xij = xi - s_x[s_idx]
yij = yi - s_y[s_idx]
rij = sqrt(xij*xij + yij*yij)
if rij < self.min_dist_ob:
d_ob_pa_to_remove[d_idx] = 1
def reduce(self, dst, t, dt):
indices = declare('object')
indices = numpy.where(dst.ob_pa_to_remove > 0)[0]
if len(indices) > 0:
dst.remove_particles(indices)
dst.ob_pa_to_tag = numpy.zeros_like(dst.ob_pa_to_tag)
class RemoveFluidParticlesWithNoNeighbors(Equation):
r"""Removes fluid particles if there exists no neighboring particles within
its kernel radius (2*smoothing length)
"""
def loop_all(self, d_idx, d_ob_pa_to_tag, d_fluid_pa_to_remove, d_x, d_y,
s_x, s_y, d_h, NBRS, N_NBRS):
i, n_nbrs_outside_ker = declare('int', 2)
s_idx = declare('unsigned int')
xi = d_x[d_idx]
yi = d_y[d_idx]
# Number of neighbors outside the particles kernel radius
n_nbrs_outside_ker = 0
for i in range(N_NBRS):
s_idx = NBRS[i]
if s_idx == d_idx:
continue
xij = xi - s_x[s_idx]
yij = yi - s_y[s_idx]
rij = sqrt(xij*xij + yij*yij)
if rij > 2*d_h[d_idx]:
n_nbrs_outside_ker += 1
# If all neighbors outside its kernel then tag particle for removal
if n_nbrs_outside_ker == N_NBRS-1:
d_fluid_pa_to_remove[d_idx] = 1
else:
d_fluid_pa_to_remove[d_idx] = 0
def reduce(self, dst, t, dt):
indices = declare('object')
indices = numpy.where(dst.fluid_pa_to_remove > 0)[0]
if len(indices) > 0:
dst.remove_particles(indices)
class SWEInletOutletStep(IntegratorStep):
r"""Stepper for both inlet and outlet particles for the cases dealing with
shallow water flows
"""
def initialize(self):
pass
def stage1(self, d_idx, d_x, d_y, d_uh, d_vh, d_u, d_v, dt):
dtb2 = 0.5*dt
d_uh[d_idx] = d_u[d_idx]
d_vh[d_idx] = d_v[d_idx]
d_x[d_idx] += dtb2 * d_u[d_idx]
d_y[d_idx] += dtb2 * d_v[d_idx]
def stage2(self, d_idx, d_x, d_y, d_u, d_v, dt):
dtb2 = 0.5*dt
d_x[d_idx] += dtb2 * d_u[d_idx]
d_y[d_idx] += dtb2 * d_v[d_idx]
class SWEInlet(object):
"""This inlet is used for shallow water flows. It has particles
stacked along a particular axis (defaults to 'x'). These particles can
move along any direction and as they flow out of the domain they are copied
into the destination particle array at each timestep.
Inlet particles are stacked by subtracting the spacing amount from the
existing inlet array. These are copied when the inlet is created. The
particles that cross the inlet domain are copied over to the destination
particle array and moved back to the other side of the inlet.
The particles from the source particle array which have moved to the inlet
domain are removed from the source and added to the inlet particle array.
The motion of the particles can be along any direction required. One
can set the 'u' velocity to have a parabolic profile in the 'y' direction
if so desired.
"""
def __init__(self, inlet_pa, dest_pa, source_pa, spacing, n=5, axis='x',
xmin=-1.0, xmax=1.0, ymin=-1.0, ymax=1.0, callback=None):
"""Constructor.
Note that the inlet must be defined such that the spacing times the
number of stacks of particles is equal to the length of the domain in
the stacked direction. For example, if particles are stacked along
the 'x' axis and n=5 with spacing 0.1, then xmax - xmin should be 0.5.
Parameters
----------
inlet_pa: ParticleArray
Particle array for the inlet particles.
dest_pa: ParticleArray
Particle array for the destination into which inlet flows.
source_pa : ParticleArray
Particle array from which the particles flow in.
spacing: float
Spacing of particles in the inlet domain.
n: int
Total number of copies of the initial particles.
axis: str
Axis along which to stack particles, one of 'x', 'y'.
xmin, xmax, ymin, ymax : float
Domain of the outlet.
"""
self.inlet_pa = inlet_pa
self.dest_pa = dest_pa
self.spacing = spacing
self.source_pa = source_pa
self.callback = callback
assert axis in ('x', 'y')
self.axis = axis
self.n = n
self.xmin, self.xmax = xmin, xmax
self.ymin, self.ymax = ymin, ymax
self._create_inlet_particles()
def _create_inlet_particles(self):
props = self.inlet_pa.get_property_arrays()
inlet_props = {}
for prop, array in props.items():
new_array = np.array([], dtype=array.dtype)
for i in range(1, self.n):
if prop == self.axis:
new_array = np.append(new_array, array - i*self.spacing)
else:
new_array = np.append(new_array, array)
inlet_props[prop] = new_array
self.inlet_pa.add_particles(**inlet_props)
def update(self, t, dt, stage):
"""This is called by the solver after each timestep and is passed
the solver instance.
"""
pa_add = {}
inlet_pa = self.inlet_pa
xmin, xmax, ymin, ymax = self.xmin, self.xmax, self.ymin, self.ymax
lx, ly = xmax - xmin, ymax - ymin
x, y = inlet_pa.x, inlet_pa.y
xcond = (x > xmax)
ycond = (y > ymax)
# All the indices of particles which have left.
all_idx = np.where(xcond | ycond)[0]
# The indices which need to be wrapped around.
x_idx = np.where(xcond)[0]
y_idx = np.where(ycond)[0]
# Adding particles to the destination array.
props = inlet_pa.get_property_arrays()
for prop, array in props.items():
pa_add[prop] = np.array(array[all_idx])
self.dest_pa.add_particles(**pa_add)
# Moving the moved particles back to the array beginning.
inlet_pa.x[x_idx] -= np.sign(inlet_pa.x[x_idx] - xmax)*lx
inlet_pa.y[y_idx] -= np.sign(inlet_pa.y[y_idx] - ymax)*ly
# Tags the particles which have been moved back to inlet. These tagged
# particles are then used for checking minimum spacing condition
# with other open boundary particles.
inlet_pa.ob_pa_to_tag[all_idx] = 1
source_pa = self.source_pa
x, y = source_pa.x, source_pa.y
idx = np.where((x <= xmax) & (x >= xmin) & (y <= ymax) & (y >=
ymin))[0]
# Adding particles to the destination array.
pa_add = {}
props = source_pa.get_property_arrays()
for prop, array in props.items():
pa_add[prop] = np.array(array[idx])
# Tags the particles which have been added to the destination array
# from the source array. These tagged particles are then used for
# checking minimum spacing condition with other open boundary
# particles.
pa_add['ob_pa_to_tag'] = np.ones_like(pa_add['ob_pa_to_tag'])
if self.callback is not None:
self.callback(inlet_pa, pa_add)
inlet_pa.add_particles(**pa_add)
source_pa.remove_particles(idx)
# Removing the particles that moved out of inlet
x, y = inlet_pa.x, inlet_pa.y
idx = np.where((x > xmax) | (x < xmin) | (y > ymax) | (y < ymin))[0]
inlet_pa.remove_particles(idx)
| 33.861206 | 80 | 0.580073 |
7955b928a2082d8ee0021ea2e6d497f93fb53586 | 815 | py | Python | python/django/opendirectory_gui/manage.py | malvarezcastillo/opendirectory-indexer | 58409b00dc5527e60d74bee013631b38cc637023 | [
"MIT"
] | 15 | 2018-01-16T01:11:52.000Z | 2021-04-20T02:09:52.000Z | python/django/opendirectory_gui/manage.py | malvarezcastillo/opendirectory-indexer | 58409b00dc5527e60d74bee013631b38cc637023 | [
"MIT"
] | null | null | null | python/django/opendirectory_gui/manage.py | malvarezcastillo/opendirectory-indexer | 58409b00dc5527e60d74bee013631b38cc637023 | [
"MIT"
] | 6 | 2018-01-16T04:57:02.000Z | 2018-03-01T01:10:04.000Z | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opendirectory_gui.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| 35.434783 | 81 | 0.646626 |
7955b97ef3abe54abcaf9cf1f5dd5c8e37b0523e | 1,610 | py | Python | setup.py | Bimi05/bstats | a1c6ea51a306de25ab1df868bb099389d6188f34 | [
"MIT"
] | 6 | 2022-01-09T17:30:09.000Z | 2022-03-11T13:10:37.000Z | setup.py | Bimi05/bstats | a1c6ea51a306de25ab1df868bb099389d6188f34 | [
"MIT"
] | 4 | 2022-01-16T16:00:03.000Z | 2022-02-04T16:05:02.000Z | setup.py | Bimi05/bstats | a1c6ea51a306de25ab1df868bb099389d6188f34 | [
"MIT"
] | 2 | 2022-01-16T15:14:22.000Z | 2022-03-13T10:52:45.000Z | import re
from setuptools import setup
with open("bstats/__init__.py") as file:
version = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", file.read(), re.MULTILINE).group(1)
with open("README.md") as file:
readme = file.read()
if version.endswith(("a", "b", "rc")):
version = version[:-1]
try:
import subprocess
process = subprocess.Popen(["git", "rev-parse", "--short", "HEAD"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_out, _err = process.communicate()
if _out:
version += "+" + _out.decode("UTF-8").strip()
except Exception:
pass
setup(
name="bstats",
version=version,
author="Bimi05",
license="MIT",
url="https://github.com/Bimi05/bstats",
project_urls={
"Issues": "https://github.com/Bimi05/bstats/issues"
},
packages=["bstats"],
description="A fundamental wrapper for the Brawl Stars API covering all endpoints and including many features!",
long_description=readme,
long_description_content_type="text/markdown",
python_requires=">=3.8",
install_requires=["aiohttp>=3.7.0,<3.9", "cachetools>=4.1.0", "requests"],
include_package_data=True,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"License :: OSI Approved :: MIT License",
"Topic :: Games/Entertainment :: Real Time Strategy",
"Natural Language :: English,"
]
)
| 31.568627 | 123 | 0.612422 |
7955b9b341f2c9fe8e69a2544b899527b17c46f1 | 4,528 | py | Python | sdk/python/pulumi_azure_native/recoveryservices/get_replication_protected_item.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/recoveryservices/get_replication_protected_item.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/recoveryservices/get_replication_protected_item.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetReplicationProtectedItemResult',
'AwaitableGetReplicationProtectedItemResult',
'get_replication_protected_item',
]
@pulumi.output_type
class GetReplicationProtectedItemResult:
"""
Replication protected item.
"""
def __init__(__self__, id=None, location=None, name=None, properties=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource Location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.ReplicationProtectedItemPropertiesResponse':
"""
The custom data.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource Type
"""
return pulumi.get(self, "type")
class AwaitableGetReplicationProtectedItemResult(GetReplicationProtectedItemResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetReplicationProtectedItemResult(
id=self.id,
location=self.location,
name=self.name,
properties=self.properties,
type=self.type)
def get_replication_protected_item(fabric_name: Optional[str] = None,
protection_container_name: Optional[str] = None,
replicated_protected_item_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
resource_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetReplicationProtectedItemResult:
"""
Replication protected item.
API Version: 2018-07-10.
:param str fabric_name: Fabric unique name.
:param str protection_container_name: Protection container name.
:param str replicated_protected_item_name: Replication protected item name.
:param str resource_group_name: The name of the resource group where the recovery services vault is present.
:param str resource_name: The name of the recovery services vault.
"""
__args__ = dict()
__args__['fabricName'] = fabric_name
__args__['protectionContainerName'] = protection_container_name
__args__['replicatedProtectedItemName'] = replicated_protected_item_name
__args__['resourceGroupName'] = resource_group_name
__args__['resourceName'] = resource_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:recoveryservices:getReplicationProtectedItem', __args__, opts=opts, typ=GetReplicationProtectedItemResult).value
return AwaitableGetReplicationProtectedItemResult(
id=__ret__.id,
location=__ret__.location,
name=__ret__.name,
properties=__ret__.properties,
type=__ret__.type)
| 35.100775 | 162 | 0.647968 |
7955bb4c4712ebd839bc5363295fa6ec7928f058 | 3,111 | py | Python | tests/test_thehappyfoodie.py | chrisbubernak/recipe-scrapers | 1617a542f6e728552886aebd29f14e97feeb8f5c | [
"MIT"
] | 1 | 2021-02-07T17:48:09.000Z | 2021-02-07T17:48:09.000Z | tests/test_thehappyfoodie.py | chrisbubernak/recipe-scrapers | 1617a542f6e728552886aebd29f14e97feeb8f5c | [
"MIT"
] | null | null | null | tests/test_thehappyfoodie.py | chrisbubernak/recipe-scrapers | 1617a542f6e728552886aebd29f14e97feeb8f5c | [
"MIT"
] | null | null | null | from tests import ScraperTest
from recipe_scrapers.thehappyfoodie import TheHappyFoodie
class TestTheHappyFoodie(ScraperTest):
scraper_class = TheHappyFoodie
def test_host(self):
self.assertEqual("thehappyfoodie.co.uk", self.harvester_class.host())
def test_title(self):
self.assertEqual(
self.harvester_class.title(),
"Poulet rôti au vin rouge (Roast Red Wine Chicken)",
)
def test_total_time(self):
self.assertEqual(90, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("6 serving(s)", self.harvester_class.yields())
def test_ingredients(self):
self.assertCountEqual(
[
"150ml red wine",
"100g tomato paste",
"3 sprigs of thyme, leaves picked",
"3 sprigs of marjoram, leaves picked, or ½ teaspoon dried",
"100ml red wine vinegar",
"1 whole chicken cut into 8 pieces (approx. 1.5kg)",
"Salt and ground black pepper",
"500g baby potatoes, washed",
"3 onions, peeled and cut into quarters",
"6 carrots, peeled and cut into quarters lengthways",
],
self.harvester_class.ingredients(),
)
def test_instructions(self):
return self.assertEqual(
"Mix together the red wine, tomato paste, herbs and red wine vinegar. Season the chicken pieces with plenty of salt and pepper then place in a bag with the marinade. Shake the bag to make sure each piece is well coated. Place in the fridge for at least 30 minutes.\nIn the meantime, place the potatoes in a pan of cold water, put the lid on top and bring up to the boil. Boil for 1–2 minutes, then drain in a colander. Place the onions, carrots and potatoes in a large baking dish or tray (big enough to fit the chicken and the vegetables) and pour over 125ml of water. Preheat the oven to 200°C. Remove the chicken from the fridge and arrange the pieces, skin side up, in a layer on top of the vegetables in the dish. Pour the rest of the marinade over the chicken. Cover with a sheet of baking paper or foil and roast in the preheated oven for 30 minutes. Remove the baking paper or foil and baste the chicken with the cooking liquid. Roast, uncovered, for another 15 minutes or until the skin is crisp. Serve immediately.\nLes petits conseils – tips: Buying a whole chicken always works out more affordable. If you aren’t up for dissecting it yourself, ask your butcher to cut it into pieces for you. Otherwise if there’s no knife-wielding butcher about you can always cheat and go for chicken thighs. If you’re unsure whether the chicken is cooked through, pierce with a sharp knife and the juices from the chicken should come out clear.\nFaire en avance – get ahead: The veg and chicken can be prepared up to a day in advance, then simply pop it all in the baking tray and cook as indicated in the recipe.\nMarinating time: 30 minutes – overnight",
self.harvester_class.instructions(),
)
| 66.191489 | 1,665 | 0.694953 |
7955bbcf7716dc36d7c3de6508f5e61441f887d5 | 28 | py | Python | tapiriik/services/MapMyFitness/__init__.py | neilboyd/tapiriik | 268453aa7fee081bd188c90f9872a1786d85e20d | [
"Apache-2.0"
] | 5 | 2020-12-27T15:37:08.000Z | 2021-10-05T14:54:32.000Z | tapiriik/services/MapMyFitness/__init__.py | neilboyd/tapiriik | 268453aa7fee081bd188c90f9872a1786d85e20d | [
"Apache-2.0"
] | 46 | 2020-11-16T06:04:19.000Z | 2021-02-07T16:21:02.000Z | tapiriik/services/MapMyFitness/__init__.py | neilboyd/tapiriik | 268453aa7fee081bd188c90f9872a1786d85e20d | [
"Apache-2.0"
] | null | null | null | from .mapmyfitness import *
| 14 | 27 | 0.785714 |
7955bc2ad3dd980eed29ec2f3c52eeb474052651 | 194 | py | Python | test/test-assign/pdf-correct/student/tests/q8.py | drjbarker/otter-grader | 9e89e1675b09cf7889995b5f1bc8e1648bf6c309 | [
"BSD-3-Clause"
] | null | null | null | test/test-assign/pdf-correct/student/tests/q8.py | drjbarker/otter-grader | 9e89e1675b09cf7889995b5f1bc8e1648bf6c309 | [
"BSD-3-Clause"
] | null | null | null | test/test-assign/pdf-correct/student/tests/q8.py | drjbarker/otter-grader | 9e89e1675b09cf7889995b5f1bc8e1648bf6c309 | [
"BSD-3-Clause"
] | null | null | null | test = {'name': 'q8', 'points': 1, 'suites': [{'cases': [{'code': '>>> len(z) == 10\nTrue', 'hidden': False, 'locked': False}], 'scored': True, 'setup': '', 'teardown': '', 'type': 'doctest'}]}
| 97 | 193 | 0.510309 |
7955bd7b459a2aa675db2175f00457fbca812148 | 1,686 | py | Python | DNA_to_GenBank.py | SynBioHub/Snapgene-submit-plugin | 82cfaa7e7b3dfb59d9a120a6668fbad1c0799afd | [
"BSD-2-Clause"
] | 1 | 2021-05-30T17:35:17.000Z | 2021-05-30T17:35:17.000Z | DNA_to_GenBank.py | SynBioHub/Snapgene-submit-plugin | 82cfaa7e7b3dfb59d9a120a6668fbad1c0799afd | [
"BSD-2-Clause"
] | 1 | 2021-11-08T04:36:11.000Z | 2021-11-08T04:36:11.000Z | DNA_to_GenBank.py | SynBioHub/Plugin-Submit-Snapgene | 82cfaa7e7b3dfb59d9a120a6668fbad1c0799afd | [
"BSD-2-Clause"
] | 1 | 2020-01-27T23:37:37.000Z | 2020-01-27T23:37:37.000Z | # -*- coding: utf-8 -*-
"""
Created on Tue Oct 22 18:46:41 2019
@author: JVM
"""
import requests, tempfile, os
#converter api help http://synbiodex.github.io/SBOL-Validator/?javascript#introduction
def DNA_to_GenBank(filename, partname):
newfile_url = "http://song.ece.utah.edu/examples/pages/acceptNewFile.php"
temp = tempfile.NamedTemporaryFile(suffix=".dna")
get_url = "http://song.ece.utah.edu/dnafiles/" + os.path.basename(temp.name)[:-4]
partfile = requests.get(filename).content
temp.write(partfile)
temp.flush()
temp.seek(0)
files = {'fileToUpload': temp}
#upload file
res = requests.post(newfile_url, files=files,
headers = {"Accept":"text/plain"})
#print(res)
temp.close()
#request genebank
s = requests.get(f"{get_url}.gb")
genebank = s.text
request = { 'options': {'language' : 'SBOL2',
'test_equality': False,
'check_uri_compliance': False,
'check_completeness': False,
'check_best_practices': False,
'fail_on_first_error': False,
'provide_detailed_stack_trace': False,
'uri_prefix': 'trial',
'version': '',
'insert_type': False
},
'return_file': True,
'main_file': genebank
}
resp = requests.post("https://validator.sbolstandard.org/validate/", json=request)
content = resp.json()
return content["result"]
| 31.222222 | 86 | 0.531435 |
7955beed9dfdfb5a03ee6df6fe7eb287f0a13546 | 1,437 | py | Python | task10/CyberSausage.py | p0l0satik/pyprak | 3f1f11bb622476e225db09e3dcc9d9ee6397b654 | [
"Apache-2.0"
] | null | null | null | task10/CyberSausage.py | p0l0satik/pyprak | 3f1f11bb622476e225db09e3dcc9d9ee6397b654 | [
"Apache-2.0"
] | null | null | null | task10/CyberSausage.py | p0l0satik/pyprak | 3f1f11bb622476e225db09e3dcc9d9ee6397b654 | [
"Apache-2.0"
] | null | null | null | class sausage():
## OK NOW I AM HUNGRY!!!
def __init__(self, mince = "pork!", volume = 1):
self.mince = mince
self.size = eval(str(volume)) * 12
if len(mince) > 12:
self.mince_str = mince[:12]
else:
self.mince_str = mince * (12 // len(mince)) + mince[:12 % len(mince)]
def __str__(self):
blocks = int(self.size) // 12
left = int(self.size) % 12
up = "/------------\\" * blocks
down = "\\------------/" * blocks
s = "|" + self.mince_str + "|"
body = s * blocks
if left:
body += s[:left + 1] + "|"
up += "/------------"[:left +1] + "|"
down += "\\------------"[:left + 1] + "|"
if int(self.size) == 0:
up = "/|"
down = "\\|"
body = "||"
return "\n".join((up, body, body, body, down))
def __truediv__(self, num):
return sausage(self.mince, (self.size / num) / 12)
def __mul__(self, num):
return sausage(self.mince, (self.size * num) / 12)
def __add__(self, other):
return sausage(self.mince, (self.size + other.size) / 12)
def __sub__(self, other):
size = (self.size - other.size) / 12
if size < 0:
size = 0
return sausage(self.mince, size)
def __bool__(self):
return bool(self.size)
__rmul__ = __mul__
| 30.574468 | 81 | 0.450244 |
7955c0cc21a6dd8327891be12e664fa77fae9b02 | 1,375 | py | Python | azure-servicefabric/azure/servicefabric/models/partition_quorum_loss_progress.py | v-Ajnava/azure-sdk-for-python | a1f6f80eb5869c5b710e8bfb66146546697e2a6f | [
"MIT"
] | null | null | null | azure-servicefabric/azure/servicefabric/models/partition_quorum_loss_progress.py | v-Ajnava/azure-sdk-for-python | a1f6f80eb5869c5b710e8bfb66146546697e2a6f | [
"MIT"
] | null | null | null | azure-servicefabric/azure/servicefabric/models/partition_quorum_loss_progress.py | v-Ajnava/azure-sdk-for-python | a1f6f80eb5869c5b710e8bfb66146546697e2a6f | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class PartitionQuorumLossProgress(Model):
"""Information about a partition quorum loss user-induced operation.
:param state: Possible values include: 'Invalid', 'Running',
'RollingBack', 'Completed', 'Faulted', 'Cancelled', 'ForceCancelled'
:type state: str or ~azure.servicefabric.models.enum
:param invoke_quorum_loss_result:
:type invoke_quorum_loss_result:
~azure.servicefabric.models.InvokeQuorumLossResult
"""
_attribute_map = {
'state': {'key': 'State', 'type': 'str'},
'invoke_quorum_loss_result': {'key': 'InvokeQuorumLossResult', 'type': 'InvokeQuorumLossResult'},
}
def __init__(self, state=None, invoke_quorum_loss_result=None):
super(PartitionQuorumLossProgress, self).__init__()
self.state = state
self.invoke_quorum_loss_result = invoke_quorum_loss_result
| 39.285714 | 105 | 0.647273 |
7955c1f0ca4249295d649fb0ad6a3b43a60f7692 | 3,774 | py | Python | docs/source/exts/youtube.py | vivamoto/usp-hpc | 6fbb6ff7beef5761019388bb4545a1a750525bb5 | [
"MIT"
] | null | null | null | docs/source/exts/youtube.py | vivamoto/usp-hpc | 6fbb6ff7beef5761019388bb4545a1a750525bb5 | [
"MIT"
] | null | null | null | docs/source/exts/youtube.py | vivamoto/usp-hpc | 6fbb6ff7beef5761019388bb4545a1a750525bb5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division
import re
from docutils import nodes
from docutils.parsers.rst import directives, Directive
CONTROL_HEIGHT = 30
def get_size(d, key):
if key not in d:
return None
m = re.match("(\d+)(|%|px)$", d[key])
if not m:
raise ValueError("invalid size %r" % d[key])
return int(m.group(1)), m.group(2) or "px"
def css(d):
return "; ".join(sorted("%s: %s" % kv for kv in d.items()))
class youtube(nodes.General, nodes.Element): pass
def visit_youtube_node(self, node):
aspect = node["aspect"]
width = node["width"]
height = node["height"]
if aspect is None:
aspect = 16, 9
div_style = {}
if (height is None) and (width is not None) and (width[1] == "%"):
div_style = {
"padding-top": "%dpx" % CONTROL_HEIGHT,
"padding-bottom": "%f%%" % (width[0] * aspect[1] / aspect[0]),
"width": "%d%s" % width,
"position": "relative",
}
style = {
"position": "absolute",
"top": "0",
"left": "0",
"width": "100%",
"height": "100%",
"border": "0",
}
attrs = {
"src": "https://www.youtube.com/embed/%s" % node["id"],
"style": css(style),
}
else:
if width is None:
if height is None:
width = 560, "px"
else:
width = height[0] * aspect[0] / aspect[1], "px"
if height is None:
height = width[0] * aspect[1] / aspect[0], "px"
style = {
"width": "%d%s" % width,
"height": "%d%s" % (height[0] + CONTROL_HEIGHT, height[1]),
"border": "0",
}
attrs = {
"src": "https://www.youtube.com/embed/%s" % node["id"],
"style": css(style),
}
attrs["allowfullscreen"] = "true"
div_attrs = {
"CLASS": "youtube_wrapper",
"style": css(div_style),
}
self.body.append(self.starttag(node, "div", **div_attrs))
self.body.append(self.starttag(node, "iframe", **attrs))
self.body.append("</iframe></div>")
def depart_youtube_node(self, node):
pass
def visit_youtube_node_latex(self,node):
self.body.append(r'\begin{quote}\begin{center}\fbox{\url{https://youtu.be/%s}}\end{center}\end{quote}'%node['id'])
class YouTube(Directive):
has_content = True
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
"width": directives.unchanged,
"height": directives.unchanged,
"aspect": directives.unchanged,
}
def run(self):
if "aspect" in self.options:
aspect = self.options.get("aspect")
m = re.match("(\d+):(\d+)", aspect)
if m is None:
raise ValueError("invalid aspect ratio %r" % aspect)
aspect = tuple(int(x) for x in m.groups())
else:
aspect = None
width = get_size(self.options, "width")
height = get_size(self.options, "height")
return [youtube(id=self.arguments[0], aspect=aspect, width=width, height=height)]
def unsupported_visit_youtube(self, node):
self.builder.warn('youtube: unsupported output format (node skipped)')
raise nodes.SkipNode
_NODE_VISITORS = {
'html': (visit_youtube_node, depart_youtube_node),
'latex': (visit_youtube_node_latex, depart_youtube_node),
'man': (unsupported_visit_youtube, None),
'texinfo': (unsupported_visit_youtube, None),
'text': (unsupported_visit_youtube, None)
}
def setup(app):
app.add_node(youtube, **_NODE_VISITORS)
app.add_directive("youtube", YouTube)
| 29.255814 | 118 | 0.557764 |
7955c2a6f0bb173830a98a034f868152b56cd9e6 | 855 | py | Python | setup.py | atlantpayments/paymentpage-sdk-python | 0881ef5e9bb42206b8360a7d7e43c10cfe0ed953 | [
"MIT"
] | null | null | null | setup.py | atlantpayments/paymentpage-sdk-python | 0881ef5e9bb42206b8360a7d7e43c10cfe0ed953 | [
"MIT"
] | null | null | null | setup.py | atlantpayments/paymentpage-sdk-python | 0881ef5e9bb42206b8360a7d7e43c10cfe0ed953 | [
"MIT"
] | null | null | null | import pathlib
from setuptools import setup, find_packages
HERE = pathlib.Path(__file__).parent
README = (HERE / "README.md").read_text()
setup(
name='atlantpayments-sdk',
version='1.0.0',
url='https://github.com/atlantpayments/paymentpage-sdk-python',
license='MIT',
long_description=README,
long_description_content_type="text/markdown",
packages=find_packages(exclude=['tests', 'tests.*']),
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| 32.884615 | 71 | 0.639766 |
7955c3d17717cf9b887073ce7af0fb970dc64fc6 | 3,694 | py | Python | src/relstorage/adapters/sql/tests/test_query.py | lungj/relstorage | e18394b0197f6b70708037f36defbd3fe3ee5137 | [
"ZPL-2.1"
] | null | null | null | src/relstorage/adapters/sql/tests/test_query.py | lungj/relstorage | e18394b0197f6b70708037f36defbd3fe3ee5137 | [
"ZPL-2.1"
] | null | null | null | src/relstorage/adapters/sql/tests/test_query.py | lungj/relstorage | e18394b0197f6b70708037f36defbd3fe3ee5137 | [
"ZPL-2.1"
] | null | null | null | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2019 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from relstorage.tests import TestCase
from ..query import Query as _BaseQuery
from ..query import CompiledQuery
class MockDialect(object):
def bind(self, context): # pylint:disable=unused-argument
return self
class Query(_BaseQuery):
def compiled(self):
return self
class TestQuery(TestCase):
def test_name_discovery(self):
# If a __name__ isn't assigned when a query is a
# class property and used as a non-data-descriptor,
# it finds it.
class C(object):
dialect = MockDialect()
q1 = Query()
q2 = Query()
q_over = Query()
class D(C):
q3 = Query()
q_over = Query()
inst = D()
# Undo the effects of Python 3.6's __set_name__.
D.q1.__name__ = None
D.q2.__name__ = None
D.q3.__name__ = None
C.q_over.__name__ = None
D.q_over.__name__ = None
# get them to trigger them to search their name
getattr(inst, 'q1')
getattr(inst, 'q2')
getattr(inst, 'q3')
getattr(inst, 'q_over')
self.assertEqual(C.q1.__name__, 'q1')
self.assertEqual(C.q2.__name__, 'q2')
self.assertEqual(D.q3.__name__, 'q3')
self.assertIsNone(C.q_over.__name__)
self.assertEqual(D.q_over.__name__, 'q_over')
class TestCompiledQuery(TestCase):
def test_stmt_cache_on_bad_cursor(self):
unique_execute_stmt = []
class MockStatement(object):
class dialect(object):
class compiler(object):
def __init__(self, _):
"Does nothing"
def compile(self):
return 'stmt', ()
def can_prepare(self):
# We have to prepare if we want to try the cache
return True
def prepare(self):
o = object()
unique_execute_stmt.append(o)
return "prepare", o, lambda params: params
executed = []
class Cursor(object):
__slots__ = ('__weakref__',)
@property
def connection(self):
return self
def execute(self, stmt):
executed.append(stmt)
cursor = Cursor()
query = CompiledQuery(MockStatement())
query.execute(cursor)
self.assertLength(unique_execute_stmt, 1)
self.assertLength(executed, 2)
self.assertEqual(executed, [
"prepare",
unique_execute_stmt[0],
])
query.execute(cursor)
self.assertLength(unique_execute_stmt, 1)
self.assertLength(executed, 3)
self.assertEqual(executed, [
"prepare",
unique_execute_stmt[0],
unique_execute_stmt[0],
])
| 28.415385 | 78 | 0.554413 |
7955c52cb0e35426c5a2a6989c117b82d8000e4e | 531 | py | Python | gwpycore/gw_gui/gw_gui_misc_controls.py | gruntwurk/gwpycore | 24f353d7026f9f682da741cddd4714ac3cd807b2 | [
"BSD-3-Clause"
] | null | null | null | gwpycore/gw_gui/gw_gui_misc_controls.py | gruntwurk/gwpycore | 24f353d7026f9f682da741cddd4714ac3cd807b2 | [
"BSD-3-Clause"
] | null | null | null | gwpycore/gw_gui/gw_gui_misc_controls.py | gruntwurk/gwpycore | 24f353d7026f9f682da741cddd4714ac3cd807b2 | [
"BSD-3-Clause"
] | 2 | 2020-10-06T00:13:18.000Z | 2021-07-16T23:22:10.000Z | from PyQt5.QtWidgets import QAbstractButton, QButtonGroup
def find_button_in_group_by_text(button_group: QButtonGroup, text) -> QAbstractButton:
for button in button_group.buttons():
if button.text() == text:
return button
return None
def clear_button_group(button_group: QButtonGroup):
button_group.setExclusive(False)
for button in button_group.buttons():
button.setChecked(False)
button_group.setExclusive(True)
__all__ = ("find_button_in_group_by_text","clear_button_group")
| 31.235294 | 86 | 0.751412 |
7955c5f3a01cadd1abadb354695d9dbf88980dc0 | 761 | py | Python | checkov/terraform/checks/resource/azure/AzureSearchPublicNetworkAccessDisabled.py | jamesholland-uk/checkov | d73fd4bd7096d48ab3434a92a177bcc55605460a | [
"Apache-2.0"
] | 4,013 | 2019-12-09T13:16:54.000Z | 2022-03-31T14:31:01.000Z | checkov/terraform/checks/resource/azure/AzureSearchPublicNetworkAccessDisabled.py | jamesholland-uk/checkov | d73fd4bd7096d48ab3434a92a177bcc55605460a | [
"Apache-2.0"
] | 1,258 | 2019-12-17T09:55:51.000Z | 2022-03-31T19:17:17.000Z | checkov/terraform/checks/resource/azure/AzureSearchPublicNetworkAccessDisabled.py | jamesholland-uk/checkov | d73fd4bd7096d48ab3434a92a177bcc55605460a | [
"Apache-2.0"
] | 638 | 2019-12-19T08:57:38.000Z | 2022-03-30T21:38:37.000Z | from checkov.common.models.enums import CheckCategories
from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck
class AzureSearchPublicNetworkAccessDisabled(BaseResourceValueCheck):
def __init__(self):
name = "Ensure that Azure Cognitive Search disables public network access"
id = "CKV_AZURE_124"
supported_resources = ['azurerm_search_service']
categories = [CheckCategories.NETWORKING]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def get_inspected_key(self):
return 'public_network_access_enabled'
def get_expected_value(self):
return False
check = AzureSearchPublicNetworkAccessDisabled()
| 36.238095 | 106 | 0.773982 |
7955c613ec931772e609db70cd48f9968b123e93 | 2,484 | py | Python | test_libs/pyspec/eth2spec/test/helpers/phase1/shard_block.py | NethermindEth/eth2.0-specs | f1bf0bf85b0208f784aa52c31d5adda744090798 | [
"CC0-1.0"
] | null | null | null | test_libs/pyspec/eth2spec/test/helpers/phase1/shard_block.py | NethermindEth/eth2.0-specs | f1bf0bf85b0208f784aa52c31d5adda744090798 | [
"CC0-1.0"
] | null | null | null | test_libs/pyspec/eth2spec/test/helpers/phase1/shard_block.py | NethermindEth/eth2.0-specs | f1bf0bf85b0208f784aa52c31d5adda744090798 | [
"CC0-1.0"
] | 1 | 2020-07-05T22:12:40.000Z | 2020-07-05T22:12:40.000Z | from copy import deepcopy
from eth2spec.test.helpers.keys import privkeys
from eth2spec.utils.bls import (
bls_sign,
only_with_bls,
)
from eth2spec.utils.ssz.ssz_impl import (
signing_root,
)
from .attestations import (
sign_shard_attestation,
)
@only_with_bls()
def sign_shard_block(spec, beacon_state, shard_state, block, proposer_index=None):
if proposer_index is None:
proposer_index = spec.get_shard_proposer_index(beacon_state, shard_state.shard, block.slot)
privkey = privkeys[proposer_index]
block.signature = bls_sign(
message_hash=signing_root(block),
privkey=privkey,
domain=spec.get_domain(
beacon_state,
spec.DOMAIN_SHARD_PROPOSER,
spec.compute_epoch_of_shard_slot(block.slot),
)
)
def build_empty_shard_block(spec,
beacon_state,
shard_state,
slot,
signed=False,
full_attestation=False):
if slot is None:
slot = shard_state.slot
previous_beacon_header = deepcopy(beacon_state.latest_block_header)
if previous_beacon_header.state_root == spec.Bytes32():
previous_beacon_header.state_root = beacon_state.hash_tree_root()
beacon_block_root = spec.signing_root(previous_beacon_header)
previous_block_header = deepcopy(shard_state.latest_block_header)
if previous_block_header.state_root == spec.Bytes32():
previous_block_header.state_root = shard_state.hash_tree_root()
parent_root = signing_root(previous_block_header)
block = spec.ShardBlock(
shard=shard_state.shard,
slot=slot,
beacon_block_root=beacon_block_root,
parent_root=parent_root,
block_size_sum=shard_state.block_size_sum + spec.SHARD_HEADER_SIZE,
)
if full_attestation:
shard_committee = spec.get_shard_committee(beacon_state, shard_state.shard, block.slot)
block.aggregation_bits = list(
(True,) * len(shard_committee) +
(False,) * (spec.MAX_PERIOD_COMMITTEE_SIZE * 2 - len(shard_committee))
)
else:
shard_committee = []
block.attestations = sign_shard_attestation(
spec,
beacon_state,
shard_state,
block,
participants=shard_committee,
)
if signed:
sign_shard_block(spec, beacon_state, shard_state, block)
return block
| 29.927711 | 99 | 0.667069 |
7955c6c3ca0ec375153bddf47c91139d0f55e5b1 | 2,168 | py | Python | src/bin/shipyard_client/shipyard_client/cli/help/output.py | nishant9620/airship-shipyard | 08957916056f08ce99acd1837511c106ce74dd2f | [
"Apache-2.0"
] | null | null | null | src/bin/shipyard_client/shipyard_client/cli/help/output.py | nishant9620/airship-shipyard | 08957916056f08ce99acd1837511c106ce74dd2f | [
"Apache-2.0"
] | null | null | null | src/bin/shipyard_client/shipyard_client/cli/help/output.py | nishant9620/airship-shipyard | 08957916056f08ce99acd1837511c106ce74dd2f | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def default():
return '''THE SHIPYARD COMMAND
The base shipyard command supports options that determine cross-CLI behaviors.
FORMAT
shipyard [--context-marker=<uuid>] [--os_{various}=<value>]
[--debug/--no-debug] [--output-format] <subcommands>
Please Note: --os_auth_url is required for every command except shipyard help
<topic>.
TOPICS
For information of the following topics, run shipyard help <topic>
actions
configdocs'''
def actions():
return '''ACTIONS
The workflow actions that may be invoked using Shipyard
deploy_site: Triggers the initial deployment of a site using the latest
committed configuration documents.
update_site: Triggers the update to a deployment of a site, using the latest
committed configuration documents.
update_software: Starts an update that only exercises the software portion of
the committed configuration documents.
redeploy_server: Using parameters to indicate which server(s), triggers a
redeployment of servers to the last committed design and
secrets.
relabel_nodes: Using parameters to indicate which server(s), updates the
labels for those servers.
'''
def configdocs():
return '''CONFIGDOCS
The Shipyard Buffer Documents
Supported Commands:
shipyard commit configdocs
shipyard create configdocs
shipyard get configdocs'''
def logs():
return '''LOGS
Allows users to query and view logs using Shipyard
Supported Commands:
shipyard logs step'''
| 30.535211 | 78 | 0.73155 |
7955c7af39857a228ad241e8f11d642b6aeacb33 | 1,216 | py | Python | mathgenerator/funcs/algebra/linear_equations.py | Sankari-K/mathgenerator | 712c74fbe34fe594c4c0f7e3b3057b01d85112ba | [
"MIT"
] | 40 | 2020-11-17T19:45:20.000Z | 2022-03-22T18:16:43.000Z | mathgenerator/funcs/algebra/linear_equations.py | Sankari-K/mathgenerator | 712c74fbe34fe594c4c0f7e3b3057b01d85112ba | [
"MIT"
] | 209 | 2020-10-14T15:32:08.000Z | 2020-11-03T19:08:19.000Z | mathgenerator/funcs/algebra/linear_equations.py | Sankari-K/mathgenerator | 712c74fbe34fe594c4c0f7e3b3057b01d85112ba | [
"MIT"
] | 179 | 2020-10-14T15:36:55.000Z | 2020-10-29T19:26:16.000Z | from .__init__ import *
def gen_func(n=2, varRange=20, coeffRange=20, format='string'):
if n > 10:
print("[!] n cannot be greater than 10")
return None, None
vars = ['x', 'y', 'z', 'a', 'b', 'c', 'd', 'e', 'f', 'g'][:n]
soln = [random.randint(-varRange, varRange) for i in range(n)]
problem = list()
solution = ", ".join(
["{} = {}".format(vars[i], soln[i]) for i in range(n)])
for _ in range(n):
coeff = [random.randint(-coeffRange, coeffRange) for i in range(n)]
res = sum([coeff[i] * soln[i] for i in range(n)])
prob = [
"{}{}".format(coeff[i], vars[i]) if coeff[i] != 0 else ""
for i in range(n)
]
while "" in prob:
prob.remove("")
prob = " + ".join(prob) + " = " + str(res)
problem.append(prob)
# problem = "\n".join(problem)
problem = ", ".join(problem)
if format == 'string':
return problem, solution
elif format == 'latex':
return "Latex unavailable"
else:
return problem, solution
linear_equations = Generator("Linear Equations", 26, gen_func,
["n=2", "varRange=20", "coeffRange=20"])
| 29.658537 | 75 | 0.512336 |
7955c88ed93810155ec180970da859a7eae5b2e4 | 9,438 | py | Python | common/params.py | ConservativeEconomist/openpilot | ff1598f88a6f71dddf4e8c51f69f237b29c938d2 | [
"MIT"
] | 1 | 2018-04-22T07:00:49.000Z | 2018-04-22T07:00:49.000Z | common/params.py | cshr/openpilot | 9a9ff839a9b70cb2601d7696af743f5652395389 | [
"MIT"
] | null | null | null | common/params.py | cshr/openpilot | 9a9ff839a9b70cb2601d7696af743f5652395389 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""ROS has a parameter server, we have files.
The parameter store is a persistent key value store, implemented as a directory with a writer lock.
On Android, we store params under params_dir = /data/params. The writer lock is a file
"<params_dir>/.lock" taken using flock(), and data is stored in a directory symlinked to by
"<params_dir>/d".
Each key, value pair is stored as a file with named <key> with contents <value>, located in
<params_dir>/d/<key>
Readers of a single key can just open("<params_dir>/d/<key>") and read the file contents.
Readers who want a consistent snapshot of multiple keys should take the lock.
Writers should take the lock before modifying anything. Writers should also leave the DB in a
consistent state after a crash. The implementation below does this by copying all params to a temp
directory <params_dir>/<tmp>, then atomically symlinking <params_dir>/<d> to <params_dir>/<tmp>
before deleting the old <params_dir>/<d> directory.
Writers that only modify a single key can simply take the lock, then swap the corresponding value
file in place without messing with <params_dir>/d.
"""
import time
import os
import errno
import sys
import shutil
import fcntl
import tempfile
from enum import Enum
def mkdirs_exists_ok(path):
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
class TxType(Enum):
PERSISTANT = 1
CLEAR_ON_MANAGER_START = 2
CLEAR_ON_CAR_START = 3
class UnknownKeyName(Exception):
pass
keys = {
# written: manager
# read: loggerd, uploaderd, baseui
"DongleId": TxType.PERSISTANT,
"AccessToken": TxType.PERSISTANT,
"Version": TxType.PERSISTANT,
"GitCommit": TxType.PERSISTANT,
"GitBranch": TxType.PERSISTANT,
"GitRemote": TxType.PERSISTANT,
# written: baseui
# read: ui, controls
"IsMetric": TxType.PERSISTANT,
"IsRearViewMirror": TxType.PERSISTANT,
"IsFcwEnabled": TxType.PERSISTANT,
"HasAcceptedTerms": TxType.PERSISTANT,
"IsUploadVideoOverCellularEnabled": TxType.PERSISTANT,
# written: visiond
# read: visiond, controlsd
"CalibrationParams": TxType.PERSISTANT,
# written: visiond
# read: visiond, ui
"CloudCalibration": TxType.PERSISTANT,
# written: controlsd
# read: radard
"CarParams": TxType.CLEAR_ON_CAR_START,
"Passive": TxType.PERSISTANT,
"DoUninstall": TxType.CLEAR_ON_MANAGER_START,
"ShouldDoUpdate": TxType.CLEAR_ON_MANAGER_START,
"IsUpdateAvailable": TxType.PERSISTANT,
"RecordFront": TxType.PERSISTANT,
}
def fsync_dir(path):
fd = os.open(path, os.O_RDONLY)
try:
os.fsync(fd)
finally:
os.close(fd)
class FileLock(object):
def __init__(self, path, create):
self._path = path
self._create = create
self._fd = None
def acquire(self):
self._fd = os.open(self._path, os.O_CREAT if self._create else 0)
fcntl.flock(self._fd, fcntl.LOCK_EX)
def release(self):
if self._fd is not None:
os.close(self._fd)
self._fd = None
class DBAccessor(object):
def __init__(self, path):
self._path = path
self._vals = None
def keys(self):
self._check_entered()
return self._vals.keys()
def get(self, key):
self._check_entered()
try:
return self._vals[key]
except KeyError:
return None
def _get_lock(self, create):
lock = FileLock(os.path.join(self._path, ".lock"), create)
lock.acquire()
return lock
def _read_values_locked(self):
"""Callers should hold a lock while calling this method."""
vals = {}
try:
data_path = self._data_path()
keys = os.listdir(data_path)
for key in keys:
with open(os.path.join(data_path, key), "rb") as f:
vals[key] = f.read()
except (OSError, IOError) as e:
# Either the DB hasn't been created yet, or somebody wrote a bug and left the DB in an
# inconsistent state. Either way, return empty.
if e.errno == errno.ENOENT:
return {}
return vals
def _data_path(self):
return os.path.join(self._path, "d")
def _check_entered(self):
if self._vals is None:
raise Exception("Must call __enter__ before using DB")
class DBReader(DBAccessor):
def __enter__(self):
try:
lock = self._get_lock(False)
except OSError as e:
# Do not create lock if it does not exist.
if e.errno == errno.ENOENT:
self._vals = {}
return self
try:
# Read everything.
self._vals = self._read_values_locked()
return self
finally:
lock.release()
def __exit__(self, type, value, traceback): pass
class DBWriter(DBAccessor):
def __init__(self, path):
super(DBWriter, self).__init__(path)
self._lock = None
self._prev_umask = None
def put(self, key, value):
self._vals[key] = value
def delete(self, key):
self._vals.pop(key, None)
def __enter__(self):
mkdirs_exists_ok(self._path)
# Make sure we can write and that permissions are correct.
self._prev_umask = os.umask(0)
try:
os.chmod(self._path, 0o777)
self._lock = self._get_lock(True)
self._vals = self._read_values_locked()
except:
os.umask(self._prev_umask)
self._prev_umask = None
raise
return self
def __exit__(self, type, value, traceback):
self._check_entered()
try:
# data_path refers to the externally used path to the params. It is a symlink.
# old_data_path is the path currently pointed to by data_path.
# tempdir_path is a path where the new params will go, which the new data path will point to.
# new_data_path is a temporary symlink that will atomically overwrite data_path.
#
# The current situation is:
# data_path -> old_data_path
# We're going to write params data to tempdir_path
# tempdir_path -> params data
# Then point new_data_path to tempdir_path
# new_data_path -> tempdir_path
# Then atomically overwrite data_path with new_data_path
# data_path -> tempdir_path
old_data_path = None
new_data_path = None
tempdir_path = tempfile.mkdtemp(prefix=".tmp", dir=self._path)
try:
# Write back all keys.
os.chmod(tempdir_path, 0o777)
for k, v in self._vals.items():
with open(os.path.join(tempdir_path, k), "wb") as f:
f.write(v)
f.flush()
os.fsync(f.fileno())
fsync_dir(tempdir_path)
data_path = self._data_path()
try:
old_data_path = os.path.join(self._path, os.readlink(data_path))
except (OSError, IOError):
# NOTE(mgraczyk): If other DB implementations have bugs, this could cause
# copies to be left behind, but we still want to overwrite.
pass
new_data_path = "{}.link".format(tempdir_path)
os.symlink(os.path.basename(tempdir_path), new_data_path)
os.rename(new_data_path, data_path)
fsync_dir(self._path)
finally:
# If the rename worked, we can delete the old data. Otherwise delete the new one.
success = new_data_path is not None and os.path.exists(data_path) and (
os.readlink(data_path) == os.path.basename(tempdir_path))
if success:
if old_data_path is not None:
shutil.rmtree(old_data_path)
else:
shutil.rmtree(tempdir_path)
# Regardless of what happened above, there should be no link at new_data_path.
if new_data_path is not None and os.path.islink(new_data_path):
os.remove(new_data_path)
finally:
os.umask(self._prev_umask)
self._prev_umask = None
# Always release the lock.
self._lock.release()
self._lock = None
class JSDB(object):
def __init__(self, fn):
self._fn = fn
def begin(self, write=False):
if write:
return DBWriter(self._fn)
else:
return DBReader(self._fn)
class Params(object):
def __init__(self, db='/data/params'):
self.env = JSDB(db)
def _clear_keys_with_type(self, tx_type):
with self.env.begin(write=True) as txn:
for key in keys:
if keys[key] == tx_type:
txn.delete(key)
def manager_start(self):
self._clear_keys_with_type(TxType.CLEAR_ON_MANAGER_START)
def car_start(self):
self._clear_keys_with_type(TxType.CLEAR_ON_CAR_START)
def delete(self, key):
with self.env.begin(write=True) as txn:
txn.delete(key)
def get(self, key, block=False):
if key not in keys:
raise UnknownKeyName(key)
while 1:
with self.env.begin() as txn:
ret = txn.get(key)
if not block or ret is not None:
break
# is polling really the best we can do?
time.sleep(0.05)
return ret
def put(self, key, dat):
if key not in keys:
raise UnknownKeyName(key)
with self.env.begin(write=True) as txn:
txn.put(key, dat)
print "set", key
if __name__ == "__main__":
params = Params()
if len(sys.argv) > 2:
params.put(sys.argv[1], sys.argv[2])
else:
for k in keys:
pp = params.get(k)
if pp is None:
print k, "is None"
elif all(ord(c) < 128 and ord(c) >= 32 for c in pp):
print k, pp
else:
print k, pp.encode("hex")
# Test multiprocess:
# seq 0 100000 | xargs -P20 -I{} python common/params.py DongleId {} && sleep 0.05
# while python common/params.py DongleId; do sleep 0.05; done
| 28.173134 | 99 | 0.66317 |
7955c8c361c84e81ab5bba216ecad46a25aca2e7 | 382 | py | Python | src/Hyperactive/hyperactive/optimizers/monte_carlo/__init__.py | skn123/LDWPSO-CNN | 7f05eb1defee2e968e5b3bed53f2b444b2b48fdb | [
"MIT"
] | 6 | 2020-01-24T16:15:34.000Z | 2022-03-21T13:53:32.000Z | src/Hyperactive/hyperactive/optimizers/monte_carlo/__init__.py | skn123/LDWPSO-CNN | 7f05eb1defee2e968e5b3bed53f2b444b2b48fdb | [
"MIT"
] | 1 | 2020-06-15T04:19:32.000Z | 2020-06-15T04:19:32.000Z | src/Hyperactive/hyperactive/optimizers/monte_carlo/__init__.py | skn123/LDWPSO-CNN | 7f05eb1defee2e968e5b3bed53f2b444b2b48fdb | [
"MIT"
] | 3 | 2021-03-29T17:11:27.000Z | 2021-05-17T13:33:10.000Z | # Author: Simon Blanke
# Email: simon.blanke@yahoo.com
# License: MIT License
from .simulated_annealing import SimulatedAnnealingOptimizer
from .stochastic_tunneling import StochasticTunnelingOptimizer
from .parallel_tempering import ParallelTemperingOptimizer
__all__ = [
"SimulatedAnnealingOptimizer",
"StochasticTunnelingOptimizer",
"ParallelTemperingOptimizer",
]
| 27.285714 | 62 | 0.82199 |
7955c8df596a984aac3529a22d1a5a5d801bf767 | 774 | py | Python | day-06/part-2/chloe.py | lypnol/adventofcode-2017 | 03ced3df3eb80e5c7965c4120e3932919067cb15 | [
"MIT"
] | 16 | 2017-12-02T11:56:25.000Z | 2018-02-10T15:09:23.000Z | day-06/part-2/chloe.py | lypnol/adventofcode-2017 | 03ced3df3eb80e5c7965c4120e3932919067cb15 | [
"MIT"
] | 19 | 2017-12-01T07:54:22.000Z | 2017-12-19T17:41:02.000Z | day-06/part-2/chloe.py | lypnol/adventofcode-2017 | 03ced3df3eb80e5c7965c4120e3932919067cb15 | [
"MIT"
] | 4 | 2017-12-04T23:58:12.000Z | 2018-02-01T08:53:16.000Z | from submission import Submission
class ChloeSubmission(Submission):
def run(self, s):
input = list(map(int,s.split('\t')))
liste_deja_vu = [input]
deja_vu = False
while not deja_vu:
input = self.allocation(input)
deja_vu = input in liste_deja_vu
liste_deja_vu.append(input)
indices = [i for i, x in enumerate(liste_deja_vu) if x == input]
difference = abs(indices[1] - indices[0])
return difference
def allocation(self, etat_depart):
etat_final = etat_depart[:]
n = len(etat_final)
pointeur = etat_final.index(max(etat_final))
a_distribuer = max(etat_final)
etat_final[pointeur] = 0
while a_distribuer > 0:
pointeur += 1
if pointeur == n:
pointeur = 0
etat_final[pointeur] += 1
a_distribuer -= 1
return etat_final | 24.967742 | 66 | 0.698966 |
7955c8e350c17fa5001c7a212d5c91c7c283ff5c | 631 | py | Python | manage.py | AtriSaxena/HospitalsAPI | 1336bafceb197628c776b860bd16ab6064a3849b | [
"Apache-2.0"
] | 2 | 2020-05-20T10:15:49.000Z | 2021-05-06T10:19:02.000Z | manage.py | AtriSaxena/HospitalsAPI | 1336bafceb197628c776b860bd16ab6064a3849b | [
"Apache-2.0"
] | 5 | 2021-03-19T03:24:15.000Z | 2021-09-22T19:01:59.000Z | manage.py | AtriSaxena/HospitalsAPI | 1336bafceb197628c776b860bd16ab6064a3849b | [
"Apache-2.0"
] | 1 | 2021-05-06T10:19:03.000Z | 2021-05-06T10:19:03.000Z | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'HospitalAPI.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.681818 | 75 | 0.684628 |
7955c9c5d7ab1074f2bd67fc771ab059d0d76f2d | 509 | bzl | Python | glfw/repository.bzl | dldt/third_party.bzl | 8c5431c3c48ad4c8761f0daee004d94d1f165eb2 | [
"MIT"
] | 1 | 2021-12-27T05:40:05.000Z | 2021-12-27T05:40:05.000Z | glfw/repository.bzl | tarcila/third_party.bzl | 8c5431c3c48ad4c8761f0daee004d94d1f165eb2 | [
"MIT"
] | null | null | null | glfw/repository.bzl | tarcila/third_party.bzl | 8c5431c3c48ad4c8761f0daee004d94d1f165eb2 | [
"MIT"
] | 1 | 2021-07-22T07:16:50.000Z | 2021-07-22T07:16:50.000Z | load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def glfw_repository():
maybe(
http_archive,
name = "glfw",
urls = [
"https://github.com/glfw/glfw/releases/download/3.3.6/glfw-3.3.6.zip",
],
sha256 = "45537305d44c0a9f3612d4ec4a48414547cf854bff3ed613078f7ec648a12781",
strip_prefix = "glfw-3.3.6/",
build_file = "@third_party//glfw:package.BUILD",
)
| 33.933333 | 84 | 0.636542 |
7955caf0722c623102ee8ce749e23bb9de7fff64 | 1,434 | py | Python | apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/mrp/models/stock_picking.py | gtfarng/Odoo_migrade | 9cc28fae4c379e407645248a29d22139925eafe7 | [
"Apache-2.0"
] | 1 | 2019-12-19T01:53:13.000Z | 2019-12-19T01:53:13.000Z | apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/mrp/models/stock_picking.py | gtfarng/Odoo_migrade | 9cc28fae4c379e407645248a29d22139925eafe7 | [
"Apache-2.0"
] | null | null | null | apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/mrp/models/stock_picking.py | gtfarng/Odoo_migrade | 9cc28fae4c379e407645248a29d22139925eafe7 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class StockPickingType(models.Model):
_inherit = 'stock.picking.type'
code = fields.Selection(selection_add=[('mrp_operation', 'Manufacturing Operation')])
count_mo_todo = fields.Integer(compute='_get_mo_count')
count_mo_waiting = fields.Integer(compute='_get_mo_count')
count_mo_late = fields.Integer(compute='_get_mo_count')
def _get_mo_count(self):
mrp_picking_types = self.filtered(lambda picking: picking.code == 'mrp_operation')
if not mrp_picking_types:
return
domains = {
'count_mo_waiting': [('availability', '=', 'waiting')],
'count_mo_todo': [('state', 'in', ('confirmed', 'planned', 'progress'))],
'count_mo_late': [('date_planned_start', '<', fields.Date.today()), ('state', '=', 'confirmed')],
}
for field in domains:
data = self.env['mrp.production'].read_group(domains[field] +
[('state', 'not in', ('done', 'cancel')), ('picking_type_id', 'in', self.ids)],
['picking_type_id'], ['picking_type_id'])
count = dict(map(lambda x: (x['picking_type_id'] and x['picking_type_id'][0], x['picking_type_id_count']), data))
for record in mrp_picking_types:
record[field] = count.get(record.id, 0) | 47.8 | 125 | 0.622734 |
7955cb66cf06559e0cb667a24c0079765d2762a4 | 214 | py | Python | iwrok_test/wsgi.py | Mohamed-Kaizen/IWork-test | 1212a06907fed7a6fc216744768f74ad9642c420 | [
"MIT"
] | null | null | null | iwrok_test/wsgi.py | Mohamed-Kaizen/IWork-test | 1212a06907fed7a6fc216744768f74ad9642c420 | [
"MIT"
] | 4 | 2021-04-08T20:14:49.000Z | 2021-09-22T19:45:35.000Z | iwrok_test/wsgi.py | Mohamed-Kaizen/IWork-test | 1212a06907fed7a6fc216744768f74ad9642c420 | [
"MIT"
] | 1 | 2020-12-15T20:50:58.000Z | 2020-12-15T20:50:58.000Z | """WSGI config for IWrok Test project."""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "iwrok_test.settings")
application = get_wsgi_application()
| 21.4 | 70 | 0.799065 |
7955cc9b6f5e1918e8f16632b4a74c78b59edd89 | 12,319 | py | Python | docs/conf.py | renovate-bot/python-automl | 5c1a9a383680bc5b89f514aa9a3aef47af9feac2 | [
"Apache-2.0"
] | 68 | 2020-01-31T18:13:23.000Z | 2022-03-28T14:57:12.000Z | docs/conf.py | renovate-bot/python-automl | 5c1a9a383680bc5b89f514aa9a3aef47af9feac2 | [
"Apache-2.0"
] | 184 | 2020-01-31T17:34:00.000Z | 2022-03-30T22:42:11.000Z | docs/conf.py | renovate-bot/python-automl | 5c1a9a383680bc5b89f514aa9a3aef47af9feac2 | [
"Apache-2.0"
] | 29 | 2020-01-31T19:32:55.000Z | 2022-01-29T08:07:34.000Z | # -*- coding: utf-8 -*-
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# google-cloud-automl documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
# For plugins that can not read conf.py.
# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
sys.path.insert(0, os.path.abspath("."))
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
"recommonmark",
]
# autodoc/autosummary flags
autoclass_content = "both"
autodoc_default_options = {"members": True}
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = [".rst", ".md"]
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The root toctree document.
root_doc = "index"
# General information about the project.
project = "google-cloud-automl"
copyright = "2019, Google"
author = "Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = ".".join(release.split(".")[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
"_build",
"**/.nox/**/*",
"samples/AUTHORING_GUIDE.md",
"samples/CONTRIBUTING.md",
"samples/snippets/README.rst",
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"description": "Google Cloud Client Libraries for google-cloud-automl",
"github_user": "googleapis",
"github_repo": "python-automl",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
"code_font_family": "'Roboto Mono', 'Consolas', monospace",
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "google-cloud-automl-doc"
# -- Options for warnings ------------------------------------------------------
suppress_warnings = [
# Temporarily suppress this to avoid "more than one target found for
# cross-reference" warning, which are intractable for us to avoid while in
# a mono-repo.
# See https://github.com/sphinx-doc/sphinx/blob
# /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
"ref.python"
]
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
root_doc,
"google-cloud-automl.tex",
"google-cloud-automl Documentation",
author,
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(root_doc, "google-cloud-automl", "google-cloud-automl Documentation", [author], 1,)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
root_doc,
"google-cloud-automl",
"google-cloud-automl Documentation",
author,
"google-cloud-automl",
"google-cloud-automl Library",
"APIs",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"python": ("https://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.github.io/grpc/python/", None),
"proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
"protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
}
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| 32.763298 | 88 | 0.706794 |
7955cd296081f315675ca8d4583d3561cde187ea | 1,063 | py | Python | openstack_dashboard/dashboards/project/database_backups/urls.py | shhui/horizon | fd8cf6e31c07b147289bfb86c90133599eb2906e | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/project/database_backups/urls.py | shhui/horizon | fd8cf6e31c07b147289bfb86c90133599eb2906e | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/project/database_backups/urls.py | shhui/horizon | fd8cf6e31c07b147289bfb86c90133599eb2906e | [
"Apache-2.0"
] | 4 | 2015-05-05T08:17:28.000Z | 2020-02-05T10:47:06.000Z | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns # noqa
from django.conf.urls import url # noqa
from openstack_dashboard.dashboards.project.database_backups import views
urlpatterns = patterns(
'',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^create$', views.BackupView.as_view(), name='create'),
url(r'^(?P<backup_id>[^/]+)/$', views.DetailView.as_view(),
name='detail'),
)
| 36.655172 | 78 | 0.709313 |
7955cdf50084f604f7550226cfe19cb4dda4e949 | 14,440 | py | Python | share/migrations/0039_subject_taxonomy_a.py | CenterForOpenScience/SHARE | c7715af2881f6fa23197d4e7c381d90169a90ed1 | [
"Apache-2.0"
] | 87 | 2015-01-06T18:24:45.000Z | 2021-08-08T07:59:40.000Z | share/migrations/0039_subject_taxonomy_a.py | fortress-biotech/SHARE | 9c5a05dd831447949fa6253afec5225ff8ab5d4f | [
"Apache-2.0"
] | 442 | 2015-01-01T19:16:01.000Z | 2022-03-30T21:10:26.000Z | share/migrations/0039_subject_taxonomy_a.py | fortress-biotech/SHARE | 9c5a05dd831447949fa6253afec5225ff8ab5d4f | [
"Apache-2.0"
] | 67 | 2015-03-10T16:32:58.000Z | 2021-11-12T16:33:41.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-02 12:55
from __future__ import unicode_literals
import db.deletion
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import share.models.fields
class Migration(migrations.Migration):
dependencies = [
('share', '0038_trust_system_user'),
]
operations = [
migrations.CreateModel(
name='SubjectVersion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('action', models.TextField(max_length=10)),
('name', models.TextField()),
('is_deleted', models.BooleanField(default=False)),
('uri', share.models.fields.ShareURLField(blank=True, null=True)),
('date_created', models.DateTimeField(auto_now_add=True, help_text='The date of ingress to SHARE.')),
('date_modified', models.DateTimeField(auto_now=True, db_index=True, help_text='The date this record was modified by SHARE.')),
],
options={
'ordering': ('-date_modified',),
'abstract': False,
'db_table': None,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='SubjectTaxonomy',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('source', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='share.Source')),
('is_deleted', models.BooleanField(default=False)),
('date_created', models.DateTimeField(auto_now_add=True)),
('date_modified', models.DateTimeField(auto_now=True)),
],
options={'verbose_name_plural': 'Subject Taxonomies'},
),
migrations.AddField(
model_name='subject',
name='central_synonym',
field=models.ForeignKey(blank=True, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='custom_synonyms', to='share.Subject'),
),
migrations.AddField(
model_name='subject',
name='date_created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, help_text='The date of ingress to SHARE.'),
preserve_default=False,
),
migrations.AddField(
model_name='subject',
name='date_modified',
field=models.DateTimeField(auto_now=True, db_index=True, help_text='The date this record was modified by SHARE.'),
),
migrations.AddField(
model_name='subject',
name='extra',
field=models.OneToOneField(null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), to='share.ExtraData'),
),
migrations.AddField(
model_name='subject',
name='extra_version',
field=models.OneToOneField(db_index=False, editable=False, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.ExtraDataVersion'),
),
migrations.AddField(
model_name='subject',
name='is_deleted',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='subject',
name='same_as',
field=models.ForeignKey(null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.Subject'),
),
migrations.AddField(
model_name='subject',
name='sources',
field=share.models.fields.TypedManyToManyField(editable=False, related_name='source_subject', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='subject',
name='uri',
field=share.models.fields.ShareURLField(blank=True, null=True, unique=True),
),
migrations.AddField(
model_name='throughsubjects',
name='is_deleted',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='throughsubjectsversion',
name='is_deleted',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='subject',
name='id',
field=models.AutoField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='subject',
name='name',
field=models.TextField(),
),
migrations.AlterField(
model_name='subject',
name='parent',
field=models.ForeignKey(blank=True, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='children', to='share.Subject'),
),
migrations.AlterField(
model_name='throughsubjects',
name='subject',
field=models.ForeignKey(on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='work_relations', to='share.Subject'),
),
migrations.AlterField(
model_name='throughsubjectsversion',
name='subject',
field=models.ForeignKey(db_index=False, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.Subject'),
),
migrations.AddField(
model_name='subjectversion',
name='central_synonym',
field=models.ForeignKey(blank=True, db_index=False, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.Subject'),
),
migrations.AddField(
model_name='subjectversion',
name='central_synonym_version',
field=models.ForeignKey(blank=True, db_index=False, editable=False, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.SubjectVersion'),
),
migrations.AddField(
model_name='subjectversion',
name='change',
field=models.OneToOneField(editable=False, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='affected_subjectversion', to='share.Change'),
),
migrations.AddField(
model_name='subjectversion',
name='extra',
field=models.ForeignKey(null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), to='share.ExtraData'),
),
migrations.AddField(
model_name='subjectversion',
name='extra_version',
field=models.ForeignKey(db_index=False, editable=False, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.ExtraDataVersion'),
),
migrations.AddField(
model_name='subjectversion',
name='parent',
field=models.ForeignKey(blank=True, db_index=False, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.Subject'),
),
migrations.AddField(
model_name='subjectversion',
name='parent_version',
field=models.ForeignKey(blank=True, db_index=False, editable=False, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.SubjectVersion'),
),
migrations.AddField(
model_name='subjectversion',
name='persistent_id',
field=models.ForeignKey(db_column='persistent_id', on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='versions', to='share.Subject'),
),
migrations.AddField(
model_name='subjectversion',
name='same_as',
field=models.ForeignKey(null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.Subject'),
),
migrations.AddField(
model_name='subjectversion',
name='same_as_version',
field=models.ForeignKey(db_index=False, editable=False, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.SubjectVersion'),
),
migrations.AddField(
model_name='subjectversion',
name='taxonomy',
field=models.ForeignKey(db_index=False, editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='share.SubjectTaxonomy'),
),
migrations.AddField(
model_name='abstractcreativework',
name='subject_versions',
field=share.models.fields.TypedManyToManyField(editable=False, related_name='_abstractcreativework_subject_versions_+', through='share.ThroughSubjects', to='share.SubjectVersion'),
),
migrations.AddField(
model_name='abstractcreativeworkversion',
name='subject_versions',
field=share.models.fields.TypedManyToManyField(editable=False, related_name='_abstractcreativeworkversion_subject_versions_+', through='share.ThroughSubjects', to='share.SubjectVersion'),
),
migrations.AddField(
model_name='subject',
name='central_synonym_version',
field=models.ForeignKey(blank=True, db_index=False, editable=False, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.SubjectVersion'),
),
migrations.AddField(
model_name='subject',
name='parent_version',
field=models.ForeignKey(blank=True, db_index=False, editable=False, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.SubjectVersion'),
),
migrations.AddField(
model_name='subject',
name='same_as_version',
field=models.ForeignKey(db_index=False, editable=False, null=True, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.SubjectVersion'),
),
# Temporarily null fields
migrations.AddField(
model_name='subject',
name='change',
field=models.OneToOneField(null=True, editable=False, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='affected_subject', to='share.Change'),
),
migrations.AddField(
model_name='subject',
name='taxonomy',
field=models.ForeignKey(null=True, editable=False, on_delete=django.db.models.deletion.CASCADE, to='share.SubjectTaxonomy'),
),
migrations.AddField(
model_name='subject',
name='version',
field=models.OneToOneField(null=True, editable=False, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='share_subject_version', to='share.SubjectVersion'),
),
migrations.AddField(
model_name='throughsubjects',
name='subject_version',
field=models.ForeignKey(db_index=False, null=True, editable=False, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.SubjectVersion'),
),
migrations.AddField(
model_name='throughsubjectsversion',
name='subject_version',
field=models.ForeignKey(db_index=False, null=True, editable=False, on_delete=db.deletion.DatabaseOnDelete(clause='CASCADE'), related_name='+', to='share.SubjectVersion'),
),
# Update triggers
migrations.RunSQL(
sql='CREATE OR REPLACE FUNCTION before_share_subject_change() RETURNS trigger AS $$\n DECLARE\n vid INTEGER;\n BEGIN\n INSERT INTO share_subjectversion(persistent_id, action, central_synonym_id, central_synonym_version_id, change_id, date_created, date_modified, extra_id, extra_version_id, is_deleted, name, parent_id, parent_version_id, same_as_id, same_as_version_id, taxonomy_id, uri) VALUES (NEW.id, TG_OP, NEW.central_synonym_id, NEW.central_synonym_version_id, NEW.change_id, NEW.date_created, NEW.date_modified, NEW.extra_id, NEW.extra_version_id, NEW.is_deleted, NEW.name, NEW.parent_id, NEW.parent_version_id, NEW.same_as_id, NEW.same_as_version_id, NEW.taxonomy_id, NEW.uri) RETURNING (id) INTO vid;\n NEW.version_id = vid;\n RETURN NEW;\n END;\n $$ LANGUAGE plpgsql;',
reverse_sql='DROP FUNCTION before_share_subject_change();',
),
migrations.RunSQL(
sql='DROP TRIGGER IF EXISTS share_subject_change ON share_subject;\n\n CREATE TRIGGER share_subject_change\n BEFORE INSERT OR UPDATE ON share_subject\n FOR EACH ROW\n EXECUTE PROCEDURE before_share_subject_change();',
reverse_sql='DROP TRIGGER share_subject_change',
),
migrations.RunSQL(
sql='CREATE OR REPLACE FUNCTION before_share_throughsubjects_change() RETURNS trigger AS $$\n DECLARE\n vid INTEGER;\n BEGIN\n INSERT INTO share_throughsubjectsversion(persistent_id, action, change_id, creative_work_id, creative_work_version_id, date_created, date_modified, extra_id, extra_version_id, is_deleted, same_as_id, same_as_version_id, subject_id, subject_version_id) VALUES (NEW.id, TG_OP, NEW.change_id, NEW.creative_work_id, NEW.creative_work_version_id, NEW.date_created, NEW.date_modified, NEW.extra_id, NEW.extra_version_id, NEW.is_deleted, NEW.same_as_id, NEW.same_as_version_id, NEW.subject_id, NEW.subject_version_id) RETURNING (id) INTO vid;\n NEW.version_id = vid;\n RETURN NEW;\n END;\n $$ LANGUAGE plpgsql;',
reverse_sql='DROP FUNCTION before_share_throughsubjects_change();',
),
migrations.RunSQL(
sql='DROP TRIGGER IF EXISTS share_throughsubjects_change ON share_throughsubjects;\n\n CREATE TRIGGER share_throughsubjects_change\n BEFORE INSERT OR UPDATE ON share_throughsubjects\n FOR EACH ROW\n EXECUTE PROCEDURE before_share_throughsubjects_change();',
reverse_sql='DROP TRIGGER share_throughsubjects_change',
),
]
| 56.40625 | 864 | 0.645568 |
7955ce0c06c1f09df2e7f818c78472e06608f9f0 | 630 | py | Python | string/1540_can_convert_string_in_k_moves/1540_can_convert_string_in_k_moves.py | zdyxry/LeetCode | 33371285d0f3302158230f46e8b1b63b9f4639c4 | [
"Xnet",
"X11"
] | 6 | 2019-09-16T01:50:44.000Z | 2020-09-17T08:52:25.000Z | string/1540_can_convert_string_in_k_moves/1540_can_convert_string_in_k_moves.py | zdyxry/LeetCode | 33371285d0f3302158230f46e8b1b63b9f4639c4 | [
"Xnet",
"X11"
] | null | null | null | string/1540_can_convert_string_in_k_moves/1540_can_convert_string_in_k_moves.py | zdyxry/LeetCode | 33371285d0f3302158230f46e8b1b63b9f4639c4 | [
"Xnet",
"X11"
] | 4 | 2020-02-07T12:43:16.000Z | 2021-04-11T06:38:55.000Z | class Solution:
def canConvertString(self, s: str, t: str, k: int) -> bool:
if len(s) != len(t):
return False
counts =[0] * 26
for si, ti in zip(s, t):
difference = ord(ti) - ord(si)
if difference < 0:
difference += 26
counts[difference] += 1
print(counts)
for i,count in enumerate(counts[1:], 1):
maxConvert = i + 26 * (counts[i] -1)
if maxConvert > k:
return False
return True
s = "input"
t = "ouput"
k = 9
res = Solution().canConvertString(s, t, k)
print(res) | 25.2 | 63 | 0.48254 |
7955cecba0458d8b37ad7084dbdd21d397e9e423 | 22,322 | py | Python | tests/components/light/test_hue.py | andrewdolphin/home-assistant | 15b48ca2aa739d9b5c11e7337addd2437f7ef34f | [
"Apache-2.0"
] | null | null | null | tests/components/light/test_hue.py | andrewdolphin/home-assistant | 15b48ca2aa739d9b5c11e7337addd2437f7ef34f | [
"Apache-2.0"
] | null | null | null | tests/components/light/test_hue.py | andrewdolphin/home-assistant | 15b48ca2aa739d9b5c11e7337addd2437f7ef34f | [
"Apache-2.0"
] | null | null | null | """Philips Hue lights platform tests."""
import logging
import unittest
import unittest.mock as mock
from unittest.mock import call, MagicMock, patch
from homeassistant.components import hue
import homeassistant.components.light.hue as hue_light
from tests.common import get_test_home_assistant, MockDependency
_LOGGER = logging.getLogger(__name__)
class TestSetup(unittest.TestCase):
"""Test the Hue light platform."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.skip_teardown_stop = False
def tearDown(self):
"""Stop everything that was started."""
if not self.skip_teardown_stop:
self.hass.stop()
def setup_mocks_for_update_lights(self):
"""Set up all mocks for update_lights tests."""
self.mock_bridge = MagicMock()
self.mock_bridge.allow_hue_groups = False
self.mock_api = MagicMock()
self.mock_bridge.get_api.return_value = self.mock_api
self.mock_bridge_type = MagicMock()
self.mock_lights = []
self.mock_groups = []
self.mock_add_devices = MagicMock()
def setup_mocks_for_process_lights(self):
"""Set up all mocks for process_lights tests."""
self.mock_bridge = self.create_mock_bridge('host')
self.mock_api = MagicMock()
self.mock_api.get.return_value = {}
self.mock_bridge.get_api.return_value = self.mock_api
self.mock_bridge_type = MagicMock()
def setup_mocks_for_process_groups(self):
"""Set up all mocks for process_groups tests."""
self.mock_bridge = self.create_mock_bridge('host')
self.mock_bridge.get_group.return_value = {
'name': 'Group 0', 'state': {'any_on': True}}
self.mock_api = MagicMock()
self.mock_api.get.return_value = {}
self.mock_bridge.get_api.return_value = self.mock_api
self.mock_bridge_type = MagicMock()
def create_mock_bridge(self, host, allow_hue_groups=True):
"""Return a mock HueBridge with reasonable defaults."""
mock_bridge = MagicMock()
mock_bridge.host = host
mock_bridge.allow_hue_groups = allow_hue_groups
mock_bridge.lights = {}
mock_bridge.lightgroups = {}
return mock_bridge
def create_mock_lights(self, lights):
"""Return a dict suitable for mocking api.get('lights')."""
mock_bridge_lights = lights
for light_id, info in mock_bridge_lights.items():
if 'state' not in info:
info['state'] = {'on': False}
return mock_bridge_lights
def test_setup_platform_no_discovery_info(self):
"""Test setup_platform without discovery info."""
self.hass.data[hue.DOMAIN] = {}
mock_add_devices = MagicMock()
hue_light.setup_platform(self.hass, {}, mock_add_devices)
mock_add_devices.assert_not_called()
def test_setup_platform_no_bridge_id(self):
"""Test setup_platform without a bridge."""
self.hass.data[hue.DOMAIN] = {}
mock_add_devices = MagicMock()
hue_light.setup_platform(self.hass, {}, mock_add_devices, {})
mock_add_devices.assert_not_called()
def test_setup_platform_one_bridge(self):
"""Test setup_platform with one bridge."""
mock_bridge = MagicMock()
self.hass.data[hue.DOMAIN] = {'10.0.0.1': mock_bridge}
mock_add_devices = MagicMock()
with patch('homeassistant.components.light.hue.' +
'unthrottled_update_lights') as mock_update_lights:
hue_light.setup_platform(
self.hass, {}, mock_add_devices,
{'bridge_id': '10.0.0.1'})
mock_update_lights.assert_called_once_with(
self.hass, mock_bridge, mock_add_devices)
def test_setup_platform_multiple_bridges(self):
"""Test setup_platform wuth multiple bridges."""
mock_bridge = MagicMock()
mock_bridge2 = MagicMock()
self.hass.data[hue.DOMAIN] = {
'10.0.0.1': mock_bridge,
'192.168.0.10': mock_bridge2,
}
mock_add_devices = MagicMock()
with patch('homeassistant.components.light.hue.' +
'unthrottled_update_lights') as mock_update_lights:
hue_light.setup_platform(
self.hass, {}, mock_add_devices,
{'bridge_id': '10.0.0.1'})
hue_light.setup_platform(
self.hass, {}, mock_add_devices,
{'bridge_id': '192.168.0.10'})
mock_update_lights.assert_has_calls([
call(self.hass, mock_bridge, mock_add_devices),
call(self.hass, mock_bridge2, mock_add_devices),
])
@MockDependency('phue')
def test_update_lights_with_no_lights(self, mock_phue):
"""Test the update_lights function when no lights are found."""
self.setup_mocks_for_update_lights()
with patch('homeassistant.components.light.hue.get_bridge_type',
return_value=self.mock_bridge_type):
with patch('homeassistant.components.light.hue.process_lights',
return_value=[]) as mock_process_lights:
with patch('homeassistant.components.light.hue.process_groups',
return_value=self.mock_groups) \
as mock_process_groups:
hue_light.unthrottled_update_lights(
self.hass, self.mock_bridge, self.mock_add_devices)
mock_process_lights.assert_called_once_with(
self.hass, self.mock_api, self.mock_bridge,
self.mock_bridge_type, mock.ANY)
mock_process_groups.assert_not_called()
self.mock_add_devices.assert_not_called()
@MockDependency('phue')
def test_update_lights_with_some_lights(self, mock_phue):
"""Test the update_lights function with some lights."""
self.setup_mocks_for_update_lights()
self.mock_lights = ['some', 'light']
with patch('homeassistant.components.light.hue.get_bridge_type',
return_value=self.mock_bridge_type):
with patch('homeassistant.components.light.hue.process_lights',
return_value=self.mock_lights) as mock_process_lights:
with patch('homeassistant.components.light.hue.process_groups',
return_value=self.mock_groups) \
as mock_process_groups:
hue_light.unthrottled_update_lights(
self.hass, self.mock_bridge, self.mock_add_devices)
mock_process_lights.assert_called_once_with(
self.hass, self.mock_api, self.mock_bridge,
self.mock_bridge_type, mock.ANY)
mock_process_groups.assert_not_called()
self.mock_add_devices.assert_called_once_with(
self.mock_lights)
@MockDependency('phue')
def test_update_lights_no_groups(self, mock_phue):
"""Test the update_lights function when no groups are found."""
self.setup_mocks_for_update_lights()
self.mock_bridge.allow_hue_groups = True
self.mock_lights = ['some', 'light']
with patch('homeassistant.components.light.hue.get_bridge_type',
return_value=self.mock_bridge_type):
with patch('homeassistant.components.light.hue.process_lights',
return_value=self.mock_lights) as mock_process_lights:
with patch('homeassistant.components.light.hue.process_groups',
return_value=self.mock_groups) \
as mock_process_groups:
hue_light.unthrottled_update_lights(
self.hass, self.mock_bridge, self.mock_add_devices)
mock_process_lights.assert_called_once_with(
self.hass, self.mock_api, self.mock_bridge,
self.mock_bridge_type, mock.ANY)
mock_process_groups.assert_called_once_with(
self.hass, self.mock_api, self.mock_bridge,
self.mock_bridge_type, mock.ANY)
self.mock_add_devices.assert_called_once_with(
self.mock_lights)
@MockDependency('phue')
def test_update_lights_with_lights_and_groups(self, mock_phue):
"""Test the update_lights function with both lights and groups."""
self.setup_mocks_for_update_lights()
self.mock_bridge.allow_hue_groups = True
self.mock_lights = ['some', 'light']
self.mock_groups = ['and', 'groups']
with patch('homeassistant.components.light.hue.get_bridge_type',
return_value=self.mock_bridge_type):
with patch('homeassistant.components.light.hue.process_lights',
return_value=self.mock_lights) as mock_process_lights:
with patch('homeassistant.components.light.hue.process_groups',
return_value=self.mock_groups) \
as mock_process_groups:
hue_light.unthrottled_update_lights(
self.hass, self.mock_bridge, self.mock_add_devices)
mock_process_lights.assert_called_once_with(
self.hass, self.mock_api, self.mock_bridge,
self.mock_bridge_type, mock.ANY)
mock_process_groups.assert_called_once_with(
self.hass, self.mock_api, self.mock_bridge,
self.mock_bridge_type, mock.ANY)
self.mock_add_devices.assert_called_once_with(
self.mock_lights)
@MockDependency('phue')
def test_update_lights_with_two_bridges(self, mock_phue):
"""Test the update_lights function with two bridges."""
self.setup_mocks_for_update_lights()
mock_bridge_one = self.create_mock_bridge('one', False)
mock_bridge_one_lights = self.create_mock_lights(
{1: {'name': 'b1l1'}, 2: {'name': 'b1l2'}})
mock_bridge_two = self.create_mock_bridge('two', False)
mock_bridge_two_lights = self.create_mock_lights(
{1: {'name': 'b2l1'}, 3: {'name': 'b2l3'}})
with patch('homeassistant.components.light.hue.get_bridge_type',
return_value=self.mock_bridge_type):
with patch('homeassistant.components.light.hue.HueLight.'
'schedule_update_ha_state'):
mock_api = MagicMock()
mock_api.get.return_value = mock_bridge_one_lights
with patch.object(mock_bridge_one, 'get_api',
return_value=mock_api):
hue_light.unthrottled_update_lights(
self.hass, mock_bridge_one, self.mock_add_devices)
mock_api = MagicMock()
mock_api.get.return_value = mock_bridge_two_lights
with patch.object(mock_bridge_two, 'get_api',
return_value=mock_api):
hue_light.unthrottled_update_lights(
self.hass, mock_bridge_two, self.mock_add_devices)
self.assertEquals(sorted(mock_bridge_one.lights.keys()), [1, 2])
self.assertEquals(sorted(mock_bridge_two.lights.keys()), [1, 3])
self.assertEquals(len(self.mock_add_devices.mock_calls), 2)
# first call
name, args, kwargs = self.mock_add_devices.mock_calls[0]
self.assertEquals(len(args), 1)
self.assertEquals(len(kwargs), 0)
# one argument, a list of lights in bridge one; each of them is an
# object of type HueLight so we can't straight up compare them
lights = args[0]
self.assertEquals(
lights[0].unique_id,
'{}.b1l1.Light.1'.format(hue_light.HueLight))
self.assertEquals(
lights[1].unique_id,
'{}.b1l2.Light.2'.format(hue_light.HueLight))
# second call works the same
name, args, kwargs = self.mock_add_devices.mock_calls[1]
self.assertEquals(len(args), 1)
self.assertEquals(len(kwargs), 0)
lights = args[0]
self.assertEquals(
lights[0].unique_id,
'{}.b2l1.Light.1'.format(hue_light.HueLight))
self.assertEquals(
lights[1].unique_id,
'{}.b2l3.Light.3'.format(hue_light.HueLight))
def test_process_lights_api_error(self):
"""Test the process_lights function when the bridge errors out."""
self.setup_mocks_for_process_lights()
self.mock_api.get.return_value = None
ret = hue_light.process_lights(
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
None)
self.assertEquals([], ret)
self.assertEquals(self.mock_bridge.lights, {})
def test_process_lights_no_lights(self):
"""Test the process_lights function when bridge returns no lights."""
self.setup_mocks_for_process_lights()
ret = hue_light.process_lights(
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
None)
self.assertEquals([], ret)
self.assertEquals(self.mock_bridge.lights, {})
@patch('homeassistant.components.light.hue.HueLight')
def test_process_lights_some_lights(self, mock_hue_light):
"""Test the process_lights function with multiple groups."""
self.setup_mocks_for_process_lights()
self.mock_api.get.return_value = {
1: {'state': 'on'}, 2: {'state': 'off'}}
ret = hue_light.process_lights(
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
None)
self.assertEquals(len(ret), 2)
mock_hue_light.assert_has_calls([
call(
1, {'state': 'on'}, self.mock_bridge, mock.ANY,
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
self.mock_bridge.allow_in_emulated_hue),
call(
2, {'state': 'off'}, self.mock_bridge, mock.ANY,
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
self.mock_bridge.allow_in_emulated_hue),
])
self.assertEquals(len(self.mock_bridge.lights), 2)
@patch('homeassistant.components.light.hue.HueLight')
def test_process_lights_new_light(self, mock_hue_light):
"""
Test the process_lights function with new groups.
Test what happens when we already have a light and a new one shows up.
"""
self.setup_mocks_for_process_lights()
self.mock_api.get.return_value = {
1: {'state': 'on'}, 2: {'state': 'off'}}
self.mock_bridge.lights = {1: MagicMock()}
ret = hue_light.process_lights(
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
None)
self.assertEquals(len(ret), 1)
mock_hue_light.assert_has_calls([
call(
2, {'state': 'off'}, self.mock_bridge, mock.ANY,
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
self.mock_bridge.allow_in_emulated_hue),
])
self.assertEquals(len(self.mock_bridge.lights), 2)
self.mock_bridge.lights[1]\
.schedule_update_ha_state.assert_called_once_with()
def test_process_groups_api_error(self):
"""Test the process_groups function when the bridge errors out."""
self.setup_mocks_for_process_groups()
self.mock_api.get.return_value = None
ret = hue_light.process_groups(
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
None)
self.assertEquals([], ret)
self.assertEquals(self.mock_bridge.lightgroups, {})
def test_process_groups_no_state(self):
"""Test the process_groups function when bridge returns no status."""
self.setup_mocks_for_process_groups()
self.mock_bridge.get_group.return_value = {'name': 'Group 0'}
ret = hue_light.process_groups(
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
None)
self.assertEquals([], ret)
self.assertEquals(self.mock_bridge.lightgroups, {})
@patch('homeassistant.components.light.hue.HueLight')
def test_process_groups_some_groups(self, mock_hue_light):
"""Test the process_groups function with multiple groups."""
self.setup_mocks_for_process_groups()
self.mock_api.get.return_value = {
1: {'state': 'on'}, 2: {'state': 'off'}}
ret = hue_light.process_groups(
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
None)
self.assertEquals(len(ret), 2)
mock_hue_light.assert_has_calls([
call(
1, {'state': 'on'}, self.mock_bridge, mock.ANY,
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
self.mock_bridge.allow_in_emulated_hue, True),
call(
2, {'state': 'off'}, self.mock_bridge, mock.ANY,
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
self.mock_bridge.allow_in_emulated_hue, True),
])
self.assertEquals(len(self.mock_bridge.lightgroups), 2)
@patch('homeassistant.components.light.hue.HueLight')
def test_process_groups_new_group(self, mock_hue_light):
"""
Test the process_groups function with new groups.
Test what happens when we already have a light and a new one shows up.
"""
self.setup_mocks_for_process_groups()
self.mock_api.get.return_value = {
1: {'state': 'on'}, 2: {'state': 'off'}}
self.mock_bridge.lightgroups = {1: MagicMock()}
ret = hue_light.process_groups(
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
None)
self.assertEquals(len(ret), 1)
mock_hue_light.assert_has_calls([
call(
2, {'state': 'off'}, self.mock_bridge, mock.ANY,
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
self.mock_bridge.allow_in_emulated_hue, True),
])
self.assertEquals(len(self.mock_bridge.lightgroups), 2)
self.mock_bridge.lightgroups[1]\
.schedule_update_ha_state.assert_called_once_with()
class TestHueLight(unittest.TestCase):
"""Test the HueLight class."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.skip_teardown_stop = False
self.light_id = 42
self.mock_info = MagicMock()
self.mock_bridge = MagicMock()
self.mock_update_lights = MagicMock()
self.mock_bridge_type = MagicMock()
self.mock_allow_unreachable = MagicMock()
self.mock_is_group = MagicMock()
self.mock_allow_in_emulated_hue = MagicMock()
self.mock_is_group = False
def tearDown(self):
"""Stop everything that was started."""
if not self.skip_teardown_stop:
self.hass.stop()
def buildLight(
self, light_id=None, info=None, update_lights=None, is_group=None):
"""Helper to build a HueLight object with minimal fuss."""
return hue_light.HueLight(
light_id if light_id is not None else self.light_id,
info if info is not None else self.mock_info,
self.mock_bridge,
(update_lights
if update_lights is not None
else self.mock_update_lights),
self.mock_bridge_type,
self.mock_allow_unreachable, self.mock_allow_in_emulated_hue,
is_group if is_group is not None else self.mock_is_group)
def test_unique_id_for_light(self):
"""Test the unique_id method with lights."""
class_name = "<class 'homeassistant.components.light.hue.HueLight'>"
light = self.buildLight(info={'uniqueid': 'foobar'})
self.assertEquals(
class_name+'.foobar',
light.unique_id)
light = self.buildLight(info={})
self.assertEquals(
class_name+'.Unnamed Device.Light.42',
light.unique_id)
light = self.buildLight(info={'name': 'my-name'})
self.assertEquals(
class_name+'.my-name.Light.42',
light.unique_id)
light = self.buildLight(info={'type': 'my-type'})
self.assertEquals(
class_name+'.Unnamed Device.my-type.42',
light.unique_id)
light = self.buildLight(info={'name': 'a name', 'type': 'my-type'})
self.assertEquals(
class_name+'.a name.my-type.42',
light.unique_id)
def test_unique_id_for_group(self):
"""Test the unique_id method with groups."""
class_name = "<class 'homeassistant.components.light.hue.HueLight'>"
light = self.buildLight(info={'uniqueid': 'foobar'}, is_group=True)
self.assertEquals(
class_name+'.foobar',
light.unique_id)
light = self.buildLight(info={}, is_group=True)
self.assertEquals(
class_name+'.Unnamed Device.Group.42',
light.unique_id)
light = self.buildLight(info={'name': 'my-name'}, is_group=True)
self.assertEquals(
class_name+'.my-name.Group.42',
light.unique_id)
light = self.buildLight(info={'type': 'my-type'}, is_group=True)
self.assertEquals(
class_name+'.Unnamed Device.my-type.42',
light.unique_id)
light = self.buildLight(
info={'name': 'a name', 'type': 'my-type'},
is_group=True)
self.assertEquals(
class_name+'.a name.my-type.42',
light.unique_id)
| 41.184502 | 79 | 0.618986 |
7955cf6935e7cf2ff415457cd8f6ee120151552e | 258 | py | Python | cats/cats_simus/writefield.py | brunellacarlomagno/CATS | 2fb85366dbca81736d75a982156674523632414f | [
"MIT"
] | null | null | null | cats/cats_simus/writefield.py | brunellacarlomagno/CATS | 2fb85366dbca81736d75a982156674523632414f | [
"MIT"
] | null | null | null | cats/cats_simus/writefield.py | brunellacarlomagno/CATS | 2fb85366dbca81736d75a982156674523632414f | [
"MIT"
] | null | null | null | from astropy.io import fits
def writefield(path, filename, field):
fits.writeto(path + filename + '_r.fits', field.real, header=None, overwrite=True)
fits.writeto(path + filename + '_i.fits', field.imag, header=None, overwrite=True)
return
| 21.5 | 86 | 0.70155 |
7955d1406303eddfcaece6455945e01dcc8fa058 | 3,131 | py | Python | recipes/uriparser/all/conanfile.py | xqp/conan-center-index | bf66f3c448aa42e76c9ba5f2d4380c4ff2fd8fd5 | [
"MIT"
] | null | null | null | recipes/uriparser/all/conanfile.py | xqp/conan-center-index | bf66f3c448aa42e76c9ba5f2d4380c4ff2fd8fd5 | [
"MIT"
] | 4 | 2020-03-04T20:19:54.000Z | 2020-03-10T13:56:28.000Z | recipes/uriparser/all/conanfile.py | xqp/conan-center-index | bf66f3c448aa42e76c9ba5f2d4380c4ff2fd8fd5 | [
"MIT"
] | null | null | null | from conans import CMake, ConanFile, tools
import os
class UriparserConan(ConanFile):
name = "uriparser"
description = "Strictly RFC 3986 compliant URI parsing and handling library written in C89"
topics = ("conan", "uriparser", "URI", "parser")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://uriparser.github.io/"
exports_sources = "CMakeLists.txt"
generators = "cmake"
license = "BSD-3-Clause"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"with_char": [True, False],
"with_wchar": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"with_char": True,
"with_wchar": True,
}
_source_subfolder = "source_subfolder"
_build_subfolder = "build_subfolder"
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
del self.options.fPIC
del self.settings.compiler.cppstd
del self.settings.compiler.libcxx
def source(self):
tools.get(**self.conan_data["sources"][self.version])
extracted_folder = self.name + "-" + self.version
os.rename(extracted_folder, self._source_subfolder)
def _configure_cmake(self):
cmake = CMake(self)
cmake.definitions["URIPARSER_BUILD_DOCS"] = False
cmake.definitions["URIPARSER_BUILD_TESTS"] = False
cmake.definitions["URIPARSER_BUILD_TOOLS"] = False
cmake.definitions["URIPARSER_BUILD_CHAR"] = self.options.with_char
cmake.definitions["URIPARSER_BUILD_WCHAR"] = self.options.with_wchar
if self.settings.compiler == "Visual Studio":
cmake.definitions["URIPARSER_MSVC_RUNTIME"] = "/{}".format(self.settings.compiler.runtime)
cmake.configure(build_folder=self._build_subfolder)
return cmake
def _patch_sources(self):
if not self.options.shared:
tools.replace_in_file(os.path.join(self._source_subfolder, "include", "uriparser", "UriBase.h"),
"__declspec(dllimport)",
"")
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
self.copy("COPYING", src=self._source_subfolder, dst="licenses")
cmake = self._configure_cmake()
cmake.install()
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
tools.rmdir(os.path.join(self.package_folder, "lib", "cmake"))
def package_info(self):
self.cpp_info.libs = ["uriparser"]
self.cpp_info.includedirs = [os.path.join(self.package_folder, "include")]
defines = []
if not self.options.shared:
defines.append("URI_STATIC_BUILD")
if not self.options.with_char:
defines.append("URI_NO_ANSI")
if not self.options.with_wchar:
defines.append("URI_NO_UNICODE")
self.cpp_info.defines = defines
| 35.988506 | 108 | 0.625998 |
7955d1e65e1a6c5e1aa6445fc6ceee79867072f5 | 1,090 | py | Python | metanime/bilibili.py | ClumsyLee/metanime | 3534b3c7a980b7a879497d9b1de7040b16b5c439 | [
"MIT"
] | 2 | 2019-02-07T07:24:11.000Z | 2019-04-22T17:21:25.000Z | metanime/bilibili.py | ClumsyLee/metanime | 3534b3c7a980b7a879497d9b1de7040b16b5c439 | [
"MIT"
] | null | null | null | metanime/bilibili.py | ClumsyLee/metanime | 3534b3c7a980b7a879497d9b1de7040b16b5c439 | [
"MIT"
] | null | null | null | from .site import main, Site
class Bilibili(Site):
"""bilibili.com"""
BASE_URL = 'https://www.bilibili.com'
API_BASE_URL = 'https://bangumi.bilibili.com/view/web_api'
NAMES = {
'en': 'Bilibili',
'ja-jp': 'ビリビリ',
'zh-cn': '哔哩哔哩',
}
MIN_RATING = 2
MAX_RATING = 10
SEARCH_LOCALES = ['ja-jp', 'zh-cn']
def info_url(self, id):
return f'{self.BASE_URL}/bangumi/media/md{id}'
def _get_rating(self, id):
anime = self._get_json(f'{self.API_BASE_URL}/season',
params={'media_id': id})['result']
rating = anime['rating']
return float(rating['score']), int(rating['count'])
def _search(self, name):
params = {
'search_type': 'media_bangumi',
'keyword': name,
}
media = self._get_json(
'https://api.bilibili.com/x/web-interface/search/type',
params=params)['data']['result'][0]
return media['media_id']
if __name__ == '__main__':
main(Bilibili(), {'ja-jp': '少女☆歌劇 レヴュースタァライト'})
| 25.952381 | 67 | 0.546789 |
7955d23105e442936f0eccf144d4e90bf7e39b63 | 8,214 | gyp | Python | Github-Arduino/libraries/ideawu_RTC/multimedia/webrtc/common_audio/common_audio.gyp | famley-richards/Documents-KTibow | b5d2be03ea2f6687cd9d854d9f43ef839a37e275 | [
"MIT"
] | null | null | null | Github-Arduino/libraries/ideawu_RTC/multimedia/webrtc/common_audio/common_audio.gyp | famley-richards/Documents-KTibow | b5d2be03ea2f6687cd9d854d9f43ef839a37e275 | [
"MIT"
] | null | null | null | Github-Arduino/libraries/ideawu_RTC/multimedia/webrtc/common_audio/common_audio.gyp | famley-richards/Documents-KTibow | b5d2be03ea2f6687cd9d854d9f43ef839a37e275 | [
"MIT"
] | null | null | null | # Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'includes': [
'../build/common.gypi',
],
'targets': [
{
'target_name': 'common_audio',
'type': 'static_library',
'dependencies': [
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
'include_dirs': [
'resampler/include',
'signal_processing/include',
],
'direct_dependent_settings': {
'include_dirs': [
'resampler/include',
'signal_processing/include',
'vad/include',
],
},
'sources': [
'audio_util.cc',
'include/audio_util.h',
'resampler/include/push_resampler.h',
'resampler/include/resampler.h',
'resampler/push_resampler.cc',
'resampler/push_sinc_resampler.cc',
'resampler/push_sinc_resampler.h',
'resampler/resampler.cc',
'resampler/sinc_resampler.cc',
'resampler/sinc_resampler.h',
'signal_processing/include/real_fft.h',
'signal_processing/include/signal_processing_library.h',
'signal_processing/include/spl_inl.h',
'signal_processing/auto_corr_to_refl_coef.c',
'signal_processing/auto_correlation.c',
'signal_processing/complex_fft.c',
'signal_processing/complex_fft_tables.h',
'signal_processing/complex_bit_reverse.c',
'signal_processing/copy_set_operations.c',
'signal_processing/cross_correlation.c',
'signal_processing/division_operations.c',
'signal_processing/dot_product_with_scale.c',
'signal_processing/downsample_fast.c',
'signal_processing/energy.c',
'signal_processing/filter_ar.c',
'signal_processing/filter_ar_fast_q12.c',
'signal_processing/filter_ma_fast_q12.c',
'signal_processing/get_hanning_window.c',
'signal_processing/get_scaling_square.c',
'signal_processing/ilbc_specific_functions.c',
'signal_processing/levinson_durbin.c',
'signal_processing/lpc_to_refl_coef.c',
'signal_processing/min_max_operations.c',
'signal_processing/randomization_functions.c',
'signal_processing/refl_coef_to_lpc.c',
'signal_processing/real_fft.c',
'signal_processing/resample.c',
'signal_processing/resample_48khz.c',
'signal_processing/resample_by_2.c',
'signal_processing/resample_by_2_internal.c',
'signal_processing/resample_by_2_internal.h',
'signal_processing/resample_fractional.c',
'signal_processing/spl_init.c',
'signal_processing/spl_sqrt.c',
'signal_processing/spl_sqrt_floor.c',
'signal_processing/spl_version.c',
'signal_processing/splitting_filter.c',
'signal_processing/sqrt_of_one_minus_x_squared.c',
'signal_processing/vector_scaling_operations.c',
'vad/include/webrtc_vad.h',
'vad/webrtc_vad.c',
'vad/vad_core.c',
'vad/vad_core.h',
'vad/vad_filterbank.c',
'vad/vad_filterbank.h',
'vad/vad_gmm.c',
'vad/vad_gmm.h',
'vad/vad_sp.c',
'vad/vad_sp.h',
],
'conditions': [
['target_arch=="ia32" or target_arch=="x64"', {
'dependencies': ['common_audio_sse2',],
}],
['target_arch=="arm"', {
'sources': [
'signal_processing/complex_bit_reverse_arm.S',
'signal_processing/spl_sqrt_floor_arm.S',
],
'sources!': [
'signal_processing/complex_bit_reverse.c',
'signal_processing/spl_sqrt_floor.c',
],
'conditions': [
['armv7==1', {
'dependencies': ['common_audio_neon',],
'sources': [
'signal_processing/filter_ar_fast_q12_armv7.S',
],
'sources!': [
'signal_processing/filter_ar_fast_q12.c',
],
}],
], # conditions
}],
['target_arch=="mipsel"', {
'sources': [
'signal_processing/complex_bit_reverse_mips.c',
'signal_processing/complex_fft_mips.c',
'signal_processing/downsample_fast_mips.c',
'signal_processing/filter_ar_fast_q12_mips.c',
'signal_processing/min_max_operations_mips.c',
'signal_processing/resample_by_2_mips.c',
],
'sources!': [
'signal_processing/complex_bit_reverse.c',
'signal_processing/complex_fft.c',
'signal_processing/filter_ar_fast_q12.c',
],
}],
], # conditions
# Ignore warning on shift operator promotion.
'msvs_disabled_warnings': [ 4334, ],
},
], # targets
'conditions': [
['target_arch=="ia32" or target_arch=="x64"', {
'targets': [
{
'target_name': 'common_audio_sse2',
'type': 'static_library',
'sources': [
'resampler/sinc_resampler_sse.cc',
],
'cflags': ['-msse2',],
'xcode_settings': {
'OTHER_CFLAGS': ['-msse2',],
},
},
], # targets
}],
['target_arch=="arm" and armv7==1', {
'targets': [
{
'target_name': 'common_audio_neon',
'type': 'static_library',
'includes': ['../build/arm_neon.gypi',],
'sources': [
'resampler/sinc_resampler_neon.cc',
'signal_processing/cross_correlation_neon.S',
'signal_processing/downsample_fast_neon.S',
'signal_processing/min_max_operations_neon.S',
'signal_processing/vector_scaling_operations_neon.S',
],
},
], # targets
}],
['include_tests==1', {
'targets' : [
{
'target_name': 'common_audio_unittests',
'type': '<(gtest_target_type)',
'dependencies': [
'common_audio',
'<(webrtc_root)/test/test.gyp:test_support_main',
'<(DEPTH)/testing/gmock.gyp:gmock',
'<(DEPTH)/testing/gtest.gyp:gtest',
],
'sources': [
'audio_util_unittest.cc',
'resampler/resampler_unittest.cc',
'resampler/push_resampler_unittest.cc',
'resampler/push_sinc_resampler_unittest.cc',
'resampler/sinc_resampler_unittest.cc',
'resampler/sinusoidal_linear_chirp_source.cc',
'resampler/sinusoidal_linear_chirp_source.h',
'signal_processing/real_fft_unittest.cc',
'signal_processing/signal_processing_unittest.cc',
'vad/vad_core_unittest.cc',
'vad/vad_filterbank_unittest.cc',
'vad/vad_gmm_unittest.cc',
'vad/vad_sp_unittest.cc',
'vad/vad_unittest.cc',
'vad/vad_unittest.h',
],
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
}],
],
},
], # targets
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
'targets': [
{
'target_name': 'common_audio_unittests_apk_target',
'type': 'none',
'dependencies': [
'<(apk_tests_path):common_audio_unittests_apk',
],
},
],
}],
]
}],
], # conditions
}
| 36.345133 | 98 | 0.583029 |
7955d3baeb79a5ea00c72f1a5537bba6c35b8e20 | 8,036 | py | Python | src/sqlfluff/rules/L010.py | quannh-uet/sqlfluff | 982aef19ab537e256ddc7e6fb2ac095e30959e4b | [
"MIT"
] | null | null | null | src/sqlfluff/rules/L010.py | quannh-uet/sqlfluff | 982aef19ab537e256ddc7e6fb2ac095e30959e4b | [
"MIT"
] | 1 | 2021-09-15T18:20:39.000Z | 2021-09-15T18:20:39.000Z | src/sqlfluff/rules/L010.py | tunetheweb/sqlfluff | 9504fc28af3142f3e4915b0adf5e31746df06e43 | [
"MIT"
] | null | null | null | """Implementation of Rule L010."""
import regex
from typing import Tuple, List
from sqlfluff.core.rules.base import BaseRule, LintResult, LintFix, RuleContext
from sqlfluff.core.rules.config_info import get_config_info
from sqlfluff.core.rules.doc_decorators import (
document_fix_compatible,
document_configuration,
)
@document_fix_compatible
@document_configuration
class Rule_L010(BaseRule):
"""Inconsistent capitalisation of keywords.
| **Anti-pattern**
| In this example, 'select 'is in lower-case whereas 'FROM' is in upper-case.
.. code-block:: sql
select
a
FROM foo
| **Best practice**
| Make all keywords either in upper-case or in lower-case
.. code-block:: sql
SELECT
a
FROM foo
-- Also good
select
a
from foo
"""
# Binary operators behave like keywords too.
_target_elems: List[Tuple[str, str]] = [
("type", "keyword"),
("type", "binary_operator"),
]
config_keywords = ["capitalisation_policy"]
# Human readable target elem for description
_description_elem = "Keywords"
def _eval(self, context: RuleContext) -> LintResult:
"""Inconsistent capitalisation of keywords.
We use the `memory` feature here to keep track of cases known to be
INconsistent with what we've seen so far as well as the top choice
for what the possible case is.
"""
# Skip if not an element of the specified type/name
if not self.matches_target_tuples(context.segment, self._target_elems):
return LintResult(memory=context.memory)
# Get the capitalisation policy configuration.
try:
cap_policy = self.cap_policy
cap_policy_opts = self.cap_policy_opts
except AttributeError:
# First-time only, read the settings from configuration. This is
# very slow.
cap_policy, cap_policy_opts = self._init_capitalisation_policy()
memory = context.memory
refuted_cases = memory.get("refuted_cases", set())
# Which cases are definitely inconsistent with the segment?
if context.segment.raw[0] != context.segment.raw[0].upper():
refuted_cases.update(["upper", "capitalise", "pascal"])
if context.segment.raw != context.segment.raw.lower():
refuted_cases.update(["lower"])
else:
refuted_cases.update(["lower"])
if context.segment.raw != context.segment.raw.upper():
refuted_cases.update(["upper"])
if context.segment.raw != context.segment.raw.capitalize():
refuted_cases.update(["capitalise"])
if not context.segment.raw.isalnum():
refuted_cases.update(["pascal"])
# Update the memory
memory["refuted_cases"] = refuted_cases
self.logger.debug(
f"Refuted cases after segment '{context.segment.raw}': {refuted_cases}"
)
# Skip if no inconsistencies, otherwise compute a concrete policy
# to convert to.
if cap_policy == "consistent":
possible_cases = [c for c in cap_policy_opts if c not in refuted_cases]
self.logger.debug(
f"Possible cases after segment '{context.segment.raw}': {possible_cases}"
)
if possible_cases:
# Save the latest possible case and skip
memory["latest_possible_case"] = possible_cases[0]
self.logger.debug(
f"Consistent capitalization, returning with memory: {memory}"
)
return LintResult(memory=memory)
else:
concrete_policy = memory.get("latest_possible_case", "upper")
self.logger.debug(
f"Getting concrete policy '{concrete_policy}' from memory"
)
else:
if cap_policy not in refuted_cases:
# Skip
self.logger.debug(
f"Consistent capitalization {cap_policy}, returning with "
f"memory: {memory}"
)
return LintResult(memory=memory)
else:
concrete_policy = cap_policy
self.logger.debug(
f"Setting concrete policy '{concrete_policy}' from cap_policy"
)
# Set the fixed to same as initial in case any of below don't match
fixed_raw = context.segment.raw
# We need to change the segment to match the concrete policy
if concrete_policy in ["upper", "lower", "capitalise"]:
if concrete_policy == "upper":
fixed_raw = fixed_raw.upper()
elif concrete_policy == "lower":
fixed_raw = fixed_raw.lower()
elif concrete_policy == "capitalise":
fixed_raw = fixed_raw.capitalize()
elif concrete_policy == "pascal":
# For Pascal we set the first letter in each "word" to uppercase
# We do not lowercase other letters to allow for PascalCase style
# words. This does mean we allow all UPPERCASE and also don't
# correct Pascalcase to PascalCase, but there's only so much we can
# do. We do correct underscore_words to Underscore_Words.
fixed_raw = regex.sub(
"([^a-zA-Z0-9]+|^)([a-zA-Z0-9])([a-zA-Z0-9]*)",
lambda match: match.group(1) + match.group(2).upper() + match.group(3),
context.segment.raw,
)
if fixed_raw == context.segment.raw:
# No need to fix
self.logger.debug(
f"Capitalisation of segment '{context.segment.raw}' already OK with policy "
f"'{concrete_policy}', returning with memory {memory}"
)
return LintResult(memory=memory)
else:
# build description based on the policy in use
consistency = "consistently " if cap_policy == "consistent" else ""
if concrete_policy in ["upper", "lower"]:
policy = f"{concrete_policy} case."
elif concrete_policy == "capitalise":
policy = "capitalised."
elif concrete_policy == "pascal":
policy = "pascal case."
# Return the fixed segment
self.logger.debug(
f"INCONSISTENT Capitalisation of segment '{context.segment.raw}', fixing to "
f"'{fixed_raw}' and returning with memory {memory}"
)
return LintResult(
anchor=context.segment,
fixes=[self._get_fix(context.segment, fixed_raw)],
memory=memory,
description=f"{self._description_elem} must be {consistency}{policy}",
)
def _get_fix(self, segment, fixed_raw):
"""Given a segment found to have a fix, returns a LintFix for it.
May be overridden by subclasses, which is useful when the parse tree
structure varies from this simple base case.
"""
return LintFix("edit", segment, segment.edit(fixed_raw))
def _init_capitalisation_policy(self):
"""Called first time rule is evaluated to fetch & cache the policy."""
cap_policy_name = next(
k for k in self.config_keywords if k.endswith("capitalisation_policy")
)
self.cap_policy = getattr(self, cap_policy_name)
self.cap_policy_opts = [
opt
for opt in get_config_info()[cap_policy_name]["validation"]
if opt != "consistent"
]
self.logger.debug(
f"Selected '{cap_policy_name}': '{self.cap_policy}' from options "
f"{self.cap_policy_opts}"
)
cap_policy = self.cap_policy
cap_policy_opts = self.cap_policy_opts
return cap_policy, cap_policy_opts
| 38.449761 | 93 | 0.589348 |
7955d4bc78df0cc5152ee0c23449733511ce74b6 | 4,661 | py | Python | launcher/autorun.py | aliyun/oss-ftp | 1670b67b6ce726314ca0081841567934435128d4 | [
"MIT"
] | 69 | 2015-12-28T07:02:51.000Z | 2022-03-31T13:36:42.000Z | launcher/autorun.py | aliyun/oss-ftp | 1670b67b6ce726314ca0081841567934435128d4 | [
"MIT"
] | 23 | 2016-03-04T10:43:24.000Z | 2021-03-17T09:58:19.000Z | launcher/autorun.py | aliyun/oss-ftp | 1670b67b6ce726314ca0081841567934435128d4 | [
"MIT"
] | 24 | 2016-02-29T11:45:47.000Z | 2021-12-24T08:41:37.000Z | #!/usr/bin/env python
"""A simple crossplatform autostart helper"""
from __future__ import with_statement
import os
import sys
import launcher_log
current_path = os.path.dirname(os.path.abspath(__file__))
root_path = os.path.abspath( os.path.join(current_path, os.pardir))
if sys.platform == 'win32':
is_py2 = (sys.version_info[0] == 2)
if is_py2:
import _winreg
else:
import winreg as _winreg
_registry = _winreg.ConnectRegistry(None, _winreg.HKEY_CURRENT_USER)
def get_runonce():
return _winreg.OpenKey(_registry,
r"Software\Microsoft\Windows\CurrentVersion\Run", 0,
_winreg.KEY_ALL_ACCESS)
def add(name, application):
"""add a new autostart entry"""
key = get_runonce()
_winreg.SetValueEx(key, name, 0, _winreg.REG_SZ, application)
_winreg.CloseKey(key)
def exists(name):
"""check if an autostart entry exists"""
key = get_runonce()
exists = True
try:
_winreg.QueryValueEx(key, name)
except : #WindowsError
exists = False
_winreg.CloseKey(key)
return exists
def remove(name):
if not exists(name):
return
"""delete an autostart entry"""
key = get_runonce()
_winreg.DeleteValue(key, name)
_winreg.CloseKey(key)
run_cmd = ''
ossftp_exe = os.path.join(root_path, 'ossftp.exe')
if (os.path.isfile(ossftp_exe)):
run_cmd = os.path.abspath(ossftp_exe)
else:
run_cmd = "\"" + os.path.abspath( os.path.join(root_path, "python27", "win32", "pythonw.exe")) + "\" \"" +\
os.path.abspath( os.path.join(root_path, "launcher", "start.py")) + "\""
elif sys.platform == 'linux' or sys.platform == 'linux2':
_xdg_config_home = os.environ.get("XDG_CONFIG_HOME", "~/.config")
_xdg_user_autostart = os.path.join(os.path.expanduser(_xdg_config_home),
"autostart")
def getfilename(name):
"""get the filename of an autostart (.desktop) file"""
return os.path.join(_xdg_user_autostart, name + ".desktop")
def add(name, application):
if not os.path.isdir(os.path.expanduser(_xdg_config_home)):
launcher_log.warn("autorun linux config path not found:%s", os.path.expanduser(_xdg_config_home))
return
if not os.path.isdir(_xdg_user_autostart):
os.mkdir(_xdg_user_autostart)
"""add a new autostart entry"""
desktop_entry = "[Desktop Entry]\n"\
"Name=%s\n"\
"Exec=%s\n"\
"Type=Application\n"\
"Terminal=false\n" % (name, application)
with open(getfilename(name), "w") as f:
f.write(desktop_entry)
def exists(name):
"""check if an autostart entry exists"""
return os.path.exists(getfilename(name))
def remove(name):
"""delete an autostart entry"""
if(exists(name)):
os.unlink(getfilename(name))
run_cmd = os.path.abspath( os.path.join(root_path, "start.sh"))
elif sys.platform == 'darwin':
plist_template = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.xxnet.launcher</string>
<key>LimitLoadToSessionType</key>
<string>Aqua</string>
<key>ProgramArguments</key>
<array>
<string>%s</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>StandardErrorPath</key>
<string>/dev/null</string>
<key>StandardOutPath</key>
<string>/dev/null</string>
</dict>
</plist>"""
run_cmd = os.path.abspath( os.path.join(root_path, "start.sh"))
from os.path import expanduser
home = expanduser("~")
plist_file_path = os.path.join(home, "Library/LaunchAgents/com.aliyun.ossftp.launcher.plist")
def add(name, cmd):
file_content = plist_template % cmd
launcher_log.info("create file:%s", plist_file_path)
with open(plist_file_path, "w") as f:
f.write(file_content)
def remove(name):
if(os.path.isfile(plist_file_path)):
os.unlink(plist_file_path)
launcher_log.info("remove file:%s", plist_file_path)
else:
def add(name, cmd):
pass
def remove(name):
pass
def enable():
add("ossftp", run_cmd)
def disable():
remove("ossftp")
def test():
assert not exists("test_xxx")
try:
add("test_xxx", "test")
assert exists("test_xxx")
finally:
remove("test_xxx")
assert not exists("test_xxx")
if __name__ == "__main__":
test()
| 29.687898 | 115 | 0.620253 |
7955d53070d2ec297ad76b40bb2c5c2828870132 | 710 | py | Python | covsirphy/util/file.py | rebeccadavidsson/covid19-sir | ca7a408c5fcf87e4857edd14a9276cae0b6737cf | [
"Apache-2.0"
] | null | null | null | covsirphy/util/file.py | rebeccadavidsson/covid19-sir | ca7a408c5fcf87e4857edd14a9276cae0b6737cf | [
"Apache-2.0"
] | 100 | 2021-02-06T17:50:15.000Z | 2022-03-31T20:30:49.000Z | covsirphy/util/file.py | ardhani31/Covid19-SIRV | 813bc66f668a3d2945dc97474ea1149bbc6e40c2 | [
"Apache-2.0"
] | 1 | 2021-09-18T12:36:11.000Z | 2021-09-18T12:36:11.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pathlib import Path
import pandas as pd
def save_dataframe(df, filename, index=True):
"""
Save dataframe as a CSV file.
Args:
df (pd.DataFrame): the dataframe
filename (str or None): CSV filename
index (bool): if True, include index column.
Note:
If @filename is None or OSError was raised, the dataframe will not be saved.
"""
if not isinstance(df, pd.DataFrame):
raise TypeError(
f"@df should be a pandas.DataFrame, but {type(df)} was applied.")
try:
filepath = Path(filename)
df.to_csv(filepath, index=index)
except (TypeError, OSError):
pass
| 25.357143 | 84 | 0.616901 |
7955d5f394e100d05ff7152909d3344689516032 | 1,311 | py | Python | dis_snek/utils/attr_utils.py | Toricane/Dis-Snek | e70710ead89ffb259b5c00ef7dfd896fbd0f777f | [
"MIT"
] | null | null | null | dis_snek/utils/attr_utils.py | Toricane/Dis-Snek | e70710ead89ffb259b5c00ef7dfd896fbd0f777f | [
"MIT"
] | null | null | null | dis_snek/utils/attr_utils.py | Toricane/Dis-Snek | e70710ead89ffb259b5c00ef7dfd896fbd0f777f | [
"MIT"
] | null | null | null | import logging
from functools import partial
from typing import Any, Dict
import attr
from dis_snek.const import logger_name, MISSING
log = logging.getLogger(logger_name)
class_defaults = dict(
eq=False,
order=False,
hash=False,
slots=True,
kw_only=True,
on_setattr=[attr.setters.convert, attr.setters.validate],
)
field_defaults = dict(repr=False)
define = partial(attr.define, **class_defaults) # type: ignore
field = partial(attr.field, **field_defaults)
def copy_converter(value):
if isinstance(value, (list, set)):
return value.copy()
return value
def docs(doc_string: str) -> Dict[str, str]:
"""Makes it easier to quickly type attr documentation"""
return {"docs": doc_string}
# def converter(attribute):
# def decorator(func):
# attribute.converter = func
# return staticmethod(func)
#
# return decorator
def str_validator(self, attribute: attr.Attribute, value: Any):
if not isinstance(value, str):
if value is MISSING:
return
setattr(self, attribute.name, str(value))
log.warning(
f"Value of {attribute.name} has been automatically converted to a string. Please use strings in future.\n"
"Note: Discord will always return value as a string"
)
| 24.277778 | 118 | 0.67582 |
7955d69163b928e9358407768e3cf1287c6f0343 | 2,962 | py | Python | src/dev/serial/Uart.py | He-Liu-ooo/Computer-Architecture-THUEE-2022-spring- | 9d36aaacbc7eea357608524113bec97bae2ea229 | [
"BSD-3-Clause"
] | 4 | 2020-12-11T18:48:36.000Z | 2021-11-08T20:11:51.000Z | src/dev/serial/Uart.py | He-Liu-ooo/Computer-Architecture-THUEE-2022-spring- | 9d36aaacbc7eea357608524113bec97bae2ea229 | [
"BSD-3-Clause"
] | 3 | 2021-03-26T20:33:59.000Z | 2022-01-24T22:54:03.000Z | src/dev/serial/Uart.py | He-Liu-ooo/Computer-Architecture-THUEE-2022-spring- | 9d36aaacbc7eea357608524113bec97bae2ea229 | [
"BSD-3-Clause"
] | 3 | 2021-03-27T16:36:19.000Z | 2022-03-28T18:32:57.000Z | # Copyright (c) 2018 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2005-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from m5.params import *
from m5.proxy import *
from m5.objects.Device import BasicPioDevice
from m5.objects.Serial import SerialDevice
class Uart(BasicPioDevice):
type = 'Uart'
abstract = True
cxx_header = "dev/serial/uart.hh"
platform = Param.Platform(Parent.any, "Platform this device is part of.")
device = Param.SerialDevice(Parent.any, "The terminal")
class SimpleUart(Uart):
type = 'SimpleUart'
cxx_header = "dev/serial/simple.hh"
byte_order = Param.ByteOrder("little", "Device byte order")
pio_size = Param.Addr(0x4, "Size of address range")
end_on_eot = Param.Bool(False, "End the simulation when a EOT is "\
"received on the UART")
class Uart8250(Uart):
type = 'Uart8250'
cxx_header = "dev/serial/uart8250.hh"
| 47.015873 | 77 | 0.765699 |
7955d6d9ce65e73a77686af068525ec9fdfa32eb | 662 | py | Python | library/migrations/0006_auto_20181121_1718.py | kairon-v/tppe-ws | 626d298b99c5ac0f6e9b8f3b1bbfa8ed238db16c | [
"MIT"
] | null | null | null | library/migrations/0006_auto_20181121_1718.py | kairon-v/tppe-ws | 626d298b99c5ac0f6e9b8f3b1bbfa8ed238db16c | [
"MIT"
] | null | null | null | library/migrations/0006_auto_20181121_1718.py | kairon-v/tppe-ws | 626d298b99c5ac0f6e9b8f3b1bbfa8ed238db16c | [
"MIT"
] | null | null | null | # Generated by Django 2.1.3 on 2018-11-21 17:18
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('library', '0005_bookloan'),
]
operations = [
migrations.AlterField(
model_name='bookloan',
name='book',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='library.Book'),
),
migrations.AlterField(
model_name='bookloan',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='library.User'),
),
]
| 26.48 | 100 | 0.614804 |
7955d79c3ca949ba63040b06f82d1d7e28a265b1 | 3,436 | py | Python | Sthima/settings.py | Diorgeles/Sthima | d83b545d9a65a8f792fc8fe39a12ee1a4e5b85c4 | [
"CC0-1.0"
] | null | null | null | Sthima/settings.py | Diorgeles/Sthima | d83b545d9a65a8f792fc8fe39a12ee1a4e5b85c4 | [
"CC0-1.0"
] | null | null | null | Sthima/settings.py | Diorgeles/Sthima | d83b545d9a65a8f792fc8fe39a12ee1a4e5b85c4 | [
"CC0-1.0"
] | null | null | null | """
Django settings for Sthima project.
Generated by 'django-admin startproject' using Django 1.11.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^dd65_r*pcz$vsfpeeybiti2%ooyodx8c@1f5(1t(h)65ex6l1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'todo_list',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Sthima.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Sthima.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
ALLOWED_HOSTS = ['*']
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles') | 26.229008 | 91 | 0.705471 |
7955d869bfcae25def3963a5c4f5140f961beee9 | 38,899 | py | Python | nova/api/openstack/__init__.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/api/openstack/__init__.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/api/openstack/__init__.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | 2 | 2017-07-20T17:31:34.000Z | 2020-07-24T02:42:19.000Z | begin_unit
comment|'# Copyright 2010 United States Government as represented by the'
nl|'\n'
comment|'# Administrator of the National Aeronautics and Space Administration.'
nl|'\n'
comment|'# All Rights Reserved.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
string|'"""\nWSGI middleware for OpenStack API controllers.\n"""'
newline|'\n'
nl|'\n'
name|'from'
name|'oslo_log'
name|'import'
name|'log'
name|'as'
name|'logging'
newline|'\n'
name|'import'
name|'routes'
newline|'\n'
name|'import'
name|'six'
newline|'\n'
name|'import'
name|'stevedore'
newline|'\n'
name|'import'
name|'webob'
op|'.'
name|'dec'
newline|'\n'
name|'import'
name|'webob'
op|'.'
name|'exc'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'extensions'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'wsgi'
newline|'\n'
name|'import'
name|'nova'
op|'.'
name|'conf'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'i18n'
name|'import'
name|'_'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'i18n'
name|'import'
name|'_LC'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'i18n'
name|'import'
name|'_LE'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'i18n'
name|'import'
name|'_LI'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'i18n'
name|'import'
name|'_LW'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'i18n'
name|'import'
name|'translate'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'notifications'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'utils'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'wsgi'
name|'as'
name|'base_wsgi'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|LOG
name|'LOG'
op|'='
name|'logging'
op|'.'
name|'getLogger'
op|'('
name|'__name__'
op|')'
newline|'\n'
DECL|variable|CONF
name|'CONF'
op|'='
name|'nova'
op|'.'
name|'conf'
op|'.'
name|'CONF'
newline|'\n'
nl|'\n'
comment|'# List of v21 API extensions which are considered to form'
nl|'\n'
comment|'# the core API and so must be present'
nl|'\n'
comment|'# TODO(cyeoh): Expand this list as the core APIs are ported to v21'
nl|'\n'
DECL|variable|API_V21_CORE_EXTENSIONS
name|'API_V21_CORE_EXTENSIONS'
op|'='
name|'set'
op|'('
op|'['
string|"'os-consoles'"
op|','
nl|'\n'
string|"'extensions'"
op|','
nl|'\n'
string|"'os-flavor-extra-specs'"
op|','
nl|'\n'
string|"'os-flavor-manage'"
op|','
nl|'\n'
string|"'flavors'"
op|','
nl|'\n'
string|"'ips'"
op|','
nl|'\n'
string|"'os-keypairs'"
op|','
nl|'\n'
string|"'os-flavor-access'"
op|','
nl|'\n'
string|"'server-metadata'"
op|','
nl|'\n'
string|"'servers'"
op|','
nl|'\n'
string|"'versions'"
op|']'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FaultWrapper
name|'class'
name|'FaultWrapper'
op|'('
name|'base_wsgi'
op|'.'
name|'Middleware'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Calls down the middleware stack, making exceptions into faults."""'
newline|'\n'
nl|'\n'
DECL|variable|_status_to_type
name|'_status_to_type'
op|'='
op|'{'
op|'}'
newline|'\n'
nl|'\n'
op|'@'
name|'staticmethod'
newline|'\n'
DECL|member|status_to_type
name|'def'
name|'status_to_type'
op|'('
name|'status'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'FaultWrapper'
op|'.'
name|'_status_to_type'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'clazz'
name|'in'
name|'utils'
op|'.'
name|'walk_class_hierarchy'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPError'
op|')'
op|':'
newline|'\n'
indent|' '
name|'FaultWrapper'
op|'.'
name|'_status_to_type'
op|'['
name|'clazz'
op|'.'
name|'code'
op|']'
op|'='
name|'clazz'
newline|'\n'
dedent|''
dedent|''
name|'return'
name|'FaultWrapper'
op|'.'
name|'_status_to_type'
op|'.'
name|'get'
op|'('
nl|'\n'
name|'status'
op|','
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPInternalServerError'
op|')'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_error
dedent|''
name|'def'
name|'_error'
op|'('
name|'self'
op|','
name|'inner'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'exception'
op|'('
name|'_LE'
op|'('
string|'"Caught error: %s"'
op|')'
op|','
name|'six'
op|'.'
name|'text_type'
op|'('
name|'inner'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'safe'
op|'='
name|'getattr'
op|'('
name|'inner'
op|','
string|"'safe'"
op|','
name|'False'
op|')'
newline|'\n'
name|'headers'
op|'='
name|'getattr'
op|'('
name|'inner'
op|','
string|"'headers'"
op|','
name|'None'
op|')'
newline|'\n'
name|'status'
op|'='
name|'getattr'
op|'('
name|'inner'
op|','
string|"'code'"
op|','
number|'500'
op|')'
newline|'\n'
name|'if'
name|'status'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'status'
op|'='
number|'500'
newline|'\n'
nl|'\n'
dedent|''
name|'msg_dict'
op|'='
name|'dict'
op|'('
name|'url'
op|'='
name|'req'
op|'.'
name|'url'
op|','
name|'status'
op|'='
name|'status'
op|')'
newline|'\n'
name|'LOG'
op|'.'
name|'info'
op|'('
name|'_LI'
op|'('
string|'"%(url)s returned with HTTP %(status)d"'
op|')'
op|','
name|'msg_dict'
op|')'
newline|'\n'
name|'outer'
op|'='
name|'self'
op|'.'
name|'status_to_type'
op|'('
name|'status'
op|')'
newline|'\n'
name|'if'
name|'headers'
op|':'
newline|'\n'
indent|' '
name|'outer'
op|'.'
name|'headers'
op|'='
name|'headers'
newline|'\n'
comment|'# NOTE(johannes): We leave the explanation empty here on'
nl|'\n'
comment|'# purpose. It could possibly have sensitive information'
nl|'\n'
comment|'# that should not be returned back to the user. See'
nl|'\n'
comment|'# bugs 868360 and 874472'
nl|'\n'
comment|'# NOTE(eglynn): However, it would be over-conservative and'
nl|'\n'
comment|'# inconsistent with the EC2 API to hide every exception,'
nl|'\n'
comment|'# including those that are safe to expose, see bug 1021373'
nl|'\n'
dedent|''
name|'if'
name|'safe'
op|':'
newline|'\n'
indent|' '
name|'user_locale'
op|'='
name|'req'
op|'.'
name|'best_match_language'
op|'('
op|')'
newline|'\n'
name|'inner_msg'
op|'='
name|'translate'
op|'('
name|'inner'
op|'.'
name|'message'
op|','
name|'user_locale'
op|')'
newline|'\n'
name|'outer'
op|'.'
name|'explanation'
op|'='
string|"'%s: %s'"
op|'%'
op|'('
name|'inner'
op|'.'
name|'__class__'
op|'.'
name|'__name__'
op|','
nl|'\n'
name|'inner_msg'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'notifications'
op|'.'
name|'send_api_fault'
op|'('
name|'req'
op|'.'
name|'url'
op|','
name|'status'
op|','
name|'inner'
op|')'
newline|'\n'
name|'return'
name|'wsgi'
op|'.'
name|'Fault'
op|'('
name|'outer'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'webob'
op|'.'
name|'dec'
op|'.'
name|'wsgify'
op|'('
name|'RequestClass'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|')'
newline|'\n'
DECL|member|__call__
name|'def'
name|'__call__'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'req'
op|'.'
name|'get_response'
op|'('
name|'self'
op|'.'
name|'application'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'Exception'
name|'as'
name|'ex'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'_error'
op|'('
name|'ex'
op|','
name|'req'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|LegacyV2CompatibleWrapper
dedent|''
dedent|''
dedent|''
name|'class'
name|'LegacyV2CompatibleWrapper'
op|'('
name|'base_wsgi'
op|'.'
name|'Middleware'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|_filter_request_headers
indent|' '
name|'def'
name|'_filter_request_headers'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""For keeping same behavior with v2 API, ignores microversions\n HTTP header X-OpenStack-Nova-API-Version in the request.\n """'
newline|'\n'
nl|'\n'
name|'if'
name|'wsgi'
op|'.'
name|'API_VERSION_REQUEST_HEADER'
name|'in'
name|'req'
op|'.'
name|'headers'
op|':'
newline|'\n'
indent|' '
name|'del'
name|'req'
op|'.'
name|'headers'
op|'['
name|'wsgi'
op|'.'
name|'API_VERSION_REQUEST_HEADER'
op|']'
newline|'\n'
dedent|''
name|'return'
name|'req'
newline|'\n'
nl|'\n'
DECL|member|_filter_response_headers
dedent|''
name|'def'
name|'_filter_response_headers'
op|'('
name|'self'
op|','
name|'response'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""For keeping same behavior with v2 API, filter out microversions\n HTTP header and microversions field in header \'Vary\'.\n """'
newline|'\n'
nl|'\n'
name|'if'
name|'wsgi'
op|'.'
name|'API_VERSION_REQUEST_HEADER'
name|'in'
name|'response'
op|'.'
name|'headers'
op|':'
newline|'\n'
indent|' '
name|'del'
name|'response'
op|'.'
name|'headers'
op|'['
name|'wsgi'
op|'.'
name|'API_VERSION_REQUEST_HEADER'
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
string|"'Vary'"
name|'in'
name|'response'
op|'.'
name|'headers'
op|':'
newline|'\n'
indent|' '
name|'vary_headers'
op|'='
name|'response'
op|'.'
name|'headers'
op|'['
string|"'Vary'"
op|']'
op|'.'
name|'split'
op|'('
string|"','"
op|')'
newline|'\n'
name|'filtered_vary'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'vary'
name|'in'
name|'vary_headers'
op|':'
newline|'\n'
indent|' '
name|'vary'
op|'='
name|'vary'
op|'.'
name|'strip'
op|'('
op|')'
newline|'\n'
name|'if'
name|'vary'
op|'=='
name|'wsgi'
op|'.'
name|'API_VERSION_REQUEST_HEADER'
op|':'
newline|'\n'
indent|' '
name|'continue'
newline|'\n'
dedent|''
name|'filtered_vary'
op|'.'
name|'append'
op|'('
name|'vary'
op|')'
newline|'\n'
dedent|''
name|'if'
name|'filtered_vary'
op|':'
newline|'\n'
indent|' '
name|'response'
op|'.'
name|'headers'
op|'['
string|"'Vary'"
op|']'
op|'='
string|"','"
op|'.'
name|'join'
op|'('
name|'filtered_vary'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'del'
name|'response'
op|'.'
name|'headers'
op|'['
string|"'Vary'"
op|']'
newline|'\n'
dedent|''
dedent|''
name|'return'
name|'response'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'webob'
op|'.'
name|'dec'
op|'.'
name|'wsgify'
op|'('
name|'RequestClass'
op|'='
name|'wsgi'
op|'.'
name|'Request'
op|')'
newline|'\n'
DECL|member|__call__
name|'def'
name|'__call__'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'req'
op|'.'
name|'set_legacy_v2'
op|'('
op|')'
newline|'\n'
name|'req'
op|'='
name|'self'
op|'.'
name|'_filter_request_headers'
op|'('
name|'req'
op|')'
newline|'\n'
name|'response'
op|'='
name|'req'
op|'.'
name|'get_response'
op|'('
name|'self'
op|'.'
name|'application'
op|')'
newline|'\n'
name|'return'
name|'self'
op|'.'
name|'_filter_response_headers'
op|'('
name|'response'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|APIMapper
dedent|''
dedent|''
name|'class'
name|'APIMapper'
op|'('
name|'routes'
op|'.'
name|'Mapper'
op|')'
op|':'
newline|'\n'
DECL|member|routematch
indent|' '
name|'def'
name|'routematch'
op|'('
name|'self'
op|','
name|'url'
op|'='
name|'None'
op|','
name|'environ'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'url'
op|'=='
string|'""'
op|':'
newline|'\n'
indent|' '
name|'result'
op|'='
name|'self'
op|'.'
name|'_match'
op|'('
string|'""'
op|','
name|'environ'
op|')'
newline|'\n'
name|'return'
name|'result'
op|'['
number|'0'
op|']'
op|','
name|'result'
op|'['
number|'1'
op|']'
newline|'\n'
dedent|''
name|'return'
name|'routes'
op|'.'
name|'Mapper'
op|'.'
name|'routematch'
op|'('
name|'self'
op|','
name|'url'
op|','
name|'environ'
op|')'
newline|'\n'
nl|'\n'
DECL|member|connect
dedent|''
name|'def'
name|'connect'
op|'('
name|'self'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kargs'
op|')'
op|':'
newline|'\n'
comment|'# NOTE(vish): Default the format part of a route to only accept json'
nl|'\n'
comment|"# and xml so it doesn't eat all characters after a '.'"
nl|'\n'
comment|'# in the url.'
nl|'\n'
indent|' '
name|'kargs'
op|'.'
name|'setdefault'
op|'('
string|"'requirements'"
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'if'
name|'not'
name|'kargs'
op|'['
string|"'requirements'"
op|']'
op|'.'
name|'get'
op|'('
string|"'format'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'kargs'
op|'['
string|"'requirements'"
op|']'
op|'['
string|"'format'"
op|']'
op|'='
string|"'json|xml'"
newline|'\n'
dedent|''
name|'return'
name|'routes'
op|'.'
name|'Mapper'
op|'.'
name|'connect'
op|'('
name|'self'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kargs'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ProjectMapper
dedent|''
dedent|''
name|'class'
name|'ProjectMapper'
op|'('
name|'APIMapper'
op|')'
op|':'
newline|'\n'
DECL|member|resource
indent|' '
name|'def'
name|'resource'
op|'('
name|'self'
op|','
name|'member_name'
op|','
name|'collection_name'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
comment|'# NOTE(sdague): project_id parameter is only valid if its hex'
nl|'\n'
comment|'# or hex + dashes (note, integers are a subset of this). This'
nl|'\n'
comment|'# is required to hand our overlaping routes issues.'
nl|'\n'
indent|' '
name|'project_id_regex'
op|'='
string|"'[0-9a-f\\-]+'"
newline|'\n'
name|'if'
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'project_id_regex'
op|':'
newline|'\n'
indent|' '
name|'project_id_regex'
op|'='
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'project_id_regex'
newline|'\n'
nl|'\n'
dedent|''
name|'project_id_token'
op|'='
string|"'{project_id:%s}'"
op|'%'
name|'project_id_regex'
newline|'\n'
name|'if'
string|"'parent_resource'"
name|'not'
name|'in'
name|'kwargs'
op|':'
newline|'\n'
indent|' '
name|'kwargs'
op|'['
string|"'path_prefix'"
op|']'
op|'='
string|"'%s/'"
op|'%'
name|'project_id_token'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'parent_resource'
op|'='
name|'kwargs'
op|'['
string|"'parent_resource'"
op|']'
newline|'\n'
name|'p_collection'
op|'='
name|'parent_resource'
op|'['
string|"'collection_name'"
op|']'
newline|'\n'
name|'p_member'
op|'='
name|'parent_resource'
op|'['
string|"'member_name'"
op|']'
newline|'\n'
name|'kwargs'
op|'['
string|"'path_prefix'"
op|']'
op|'='
string|"'%s/%s/:%s_id'"
op|'%'
op|'('
nl|'\n'
name|'project_id_token'
op|','
nl|'\n'
name|'p_collection'
op|','
nl|'\n'
name|'p_member'
op|')'
newline|'\n'
dedent|''
name|'routes'
op|'.'
name|'Mapper'
op|'.'
name|'resource'
op|'('
nl|'\n'
name|'self'
op|','
nl|'\n'
name|'member_name'
op|','
nl|'\n'
name|'collection_name'
op|','
nl|'\n'
op|'**'
name|'kwargs'
op|')'
newline|'\n'
nl|'\n'
comment|'# while we are in transition mode, create additional routes'
nl|'\n'
comment|'# for the resource that do not include project_id.'
nl|'\n'
name|'if'
string|"'parent_resource'"
name|'not'
name|'in'
name|'kwargs'
op|':'
newline|'\n'
indent|' '
name|'del'
name|'kwargs'
op|'['
string|"'path_prefix'"
op|']'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'parent_resource'
op|'='
name|'kwargs'
op|'['
string|"'parent_resource'"
op|']'
newline|'\n'
name|'p_collection'
op|'='
name|'parent_resource'
op|'['
string|"'collection_name'"
op|']'
newline|'\n'
name|'p_member'
op|'='
name|'parent_resource'
op|'['
string|"'member_name'"
op|']'
newline|'\n'
name|'kwargs'
op|'['
string|"'path_prefix'"
op|']'
op|'='
string|"'%s/:%s_id'"
op|'%'
op|'('
name|'p_collection'
op|','
nl|'\n'
name|'p_member'
op|')'
newline|'\n'
dedent|''
name|'routes'
op|'.'
name|'Mapper'
op|'.'
name|'resource'
op|'('
name|'self'
op|','
name|'member_name'
op|','
nl|'\n'
name|'collection_name'
op|','
nl|'\n'
op|'**'
name|'kwargs'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|PlainMapper
dedent|''
dedent|''
name|'class'
name|'PlainMapper'
op|'('
name|'APIMapper'
op|')'
op|':'
newline|'\n'
DECL|member|resource
indent|' '
name|'def'
name|'resource'
op|'('
name|'self'
op|','
name|'member_name'
op|','
name|'collection_name'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
string|"'parent_resource'"
name|'in'
name|'kwargs'
op|':'
newline|'\n'
indent|' '
name|'parent_resource'
op|'='
name|'kwargs'
op|'['
string|"'parent_resource'"
op|']'
newline|'\n'
name|'p_collection'
op|'='
name|'parent_resource'
op|'['
string|"'collection_name'"
op|']'
newline|'\n'
name|'p_member'
op|'='
name|'parent_resource'
op|'['
string|"'member_name'"
op|']'
newline|'\n'
name|'kwargs'
op|'['
string|"'path_prefix'"
op|']'
op|'='
string|"'%s/:%s_id'"
op|'%'
op|'('
name|'p_collection'
op|','
name|'p_member'
op|')'
newline|'\n'
dedent|''
name|'routes'
op|'.'
name|'Mapper'
op|'.'
name|'resource'
op|'('
name|'self'
op|','
name|'member_name'
op|','
nl|'\n'
name|'collection_name'
op|','
nl|'\n'
op|'**'
name|'kwargs'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|APIRouter
dedent|''
dedent|''
name|'class'
name|'APIRouter'
op|'('
name|'base_wsgi'
op|'.'
name|'Router'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Routes requests on the OpenStack API to the appropriate controller\n and method.\n """'
newline|'\n'
DECL|variable|ExtensionManager
name|'ExtensionManager'
op|'='
name|'None'
comment|'# override in subclasses'
newline|'\n'
nl|'\n'
op|'@'
name|'classmethod'
newline|'\n'
DECL|member|factory
name|'def'
name|'factory'
op|'('
name|'cls'
op|','
name|'global_config'
op|','
op|'**'
name|'local_config'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Simple paste factory, :class:`nova.wsgi.Router` doesn\'t have one."""'
newline|'\n'
name|'return'
name|'cls'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|__init__
dedent|''
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'ext_mgr'
op|'='
name|'None'
op|','
name|'init_only'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'ext_mgr'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'self'
op|'.'
name|'ExtensionManager'
op|':'
newline|'\n'
indent|' '
name|'ext_mgr'
op|'='
name|'self'
op|'.'
name|'ExtensionManager'
op|'('
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'Exception'
op|'('
name|'_'
op|'('
string|'"Must specify an ExtensionManager class"'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'mapper'
op|'='
name|'ProjectMapper'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'resources'
op|'='
op|'{'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'_setup_routes'
op|'('
name|'mapper'
op|','
name|'ext_mgr'
op|','
name|'init_only'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_setup_ext_routes'
op|'('
name|'mapper'
op|','
name|'ext_mgr'
op|','
name|'init_only'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_setup_extensions'
op|'('
name|'ext_mgr'
op|')'
newline|'\n'
name|'super'
op|'('
name|'APIRouter'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
name|'mapper'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_setup_ext_routes
dedent|''
name|'def'
name|'_setup_ext_routes'
op|'('
name|'self'
op|','
name|'mapper'
op|','
name|'ext_mgr'
op|','
name|'init_only'
op|')'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'resource'
name|'in'
name|'ext_mgr'
op|'.'
name|'get_resources'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'debug'
op|'('
string|"'Extending resource: %s'"
op|','
nl|'\n'
name|'resource'
op|'.'
name|'collection'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'init_only'
name|'is'
name|'not'
name|'None'
name|'and'
name|'resource'
op|'.'
name|'collection'
name|'not'
name|'in'
name|'init_only'
op|':'
newline|'\n'
indent|' '
name|'continue'
newline|'\n'
nl|'\n'
dedent|''
name|'inherits'
op|'='
name|'None'
newline|'\n'
name|'if'
name|'resource'
op|'.'
name|'inherits'
op|':'
newline|'\n'
indent|' '
name|'inherits'
op|'='
name|'self'
op|'.'
name|'resources'
op|'.'
name|'get'
op|'('
name|'resource'
op|'.'
name|'inherits'
op|')'
newline|'\n'
name|'if'
name|'not'
name|'resource'
op|'.'
name|'controller'
op|':'
newline|'\n'
indent|' '
name|'resource'
op|'.'
name|'controller'
op|'='
name|'inherits'
op|'.'
name|'controller'
newline|'\n'
dedent|''
dedent|''
name|'wsgi_resource'
op|'='
name|'wsgi'
op|'.'
name|'Resource'
op|'('
name|'resource'
op|'.'
name|'controller'
op|','
nl|'\n'
name|'inherits'
op|'='
name|'inherits'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'resources'
op|'['
name|'resource'
op|'.'
name|'collection'
op|']'
op|'='
name|'wsgi_resource'
newline|'\n'
name|'kargs'
op|'='
name|'dict'
op|'('
nl|'\n'
name|'controller'
op|'='
name|'wsgi_resource'
op|','
nl|'\n'
name|'collection'
op|'='
name|'resource'
op|'.'
name|'collection_actions'
op|','
nl|'\n'
name|'member'
op|'='
name|'resource'
op|'.'
name|'member_actions'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'resource'
op|'.'
name|'parent'
op|':'
newline|'\n'
indent|' '
name|'kargs'
op|'['
string|"'parent_resource'"
op|']'
op|'='
name|'resource'
op|'.'
name|'parent'
newline|'\n'
nl|'\n'
dedent|''
name|'mapper'
op|'.'
name|'resource'
op|'('
name|'resource'
op|'.'
name|'collection'
op|','
name|'resource'
op|'.'
name|'collection'
op|','
op|'**'
name|'kargs'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'resource'
op|'.'
name|'custom_routes_fn'
op|':'
newline|'\n'
indent|' '
name|'resource'
op|'.'
name|'custom_routes_fn'
op|'('
name|'mapper'
op|','
name|'wsgi_resource'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_setup_extensions
dedent|''
dedent|''
dedent|''
name|'def'
name|'_setup_extensions'
op|'('
name|'self'
op|','
name|'ext_mgr'
op|')'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'extension'
name|'in'
name|'ext_mgr'
op|'.'
name|'get_controller_extensions'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'collection'
op|'='
name|'extension'
op|'.'
name|'collection'
newline|'\n'
name|'controller'
op|'='
name|'extension'
op|'.'
name|'controller'
newline|'\n'
nl|'\n'
name|'msg_format_dict'
op|'='
op|'{'
string|"'collection'"
op|':'
name|'collection'
op|','
nl|'\n'
string|"'ext_name'"
op|':'
name|'extension'
op|'.'
name|'extension'
op|'.'
name|'name'
op|'}'
newline|'\n'
name|'if'
name|'collection'
name|'not'
name|'in'
name|'self'
op|'.'
name|'resources'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'warning'
op|'('
name|'_LW'
op|'('
string|"'Extension %(ext_name)s: Cannot extend '"
nl|'\n'
string|"'resource %(collection)s: No such resource'"
op|')'
op|','
nl|'\n'
name|'msg_format_dict'
op|')'
newline|'\n'
name|'continue'
newline|'\n'
nl|'\n'
dedent|''
name|'LOG'
op|'.'
name|'debug'
op|'('
string|"'Extension %(ext_name)s extended resource: '"
nl|'\n'
string|"'%(collection)s'"
op|','
nl|'\n'
name|'msg_format_dict'
op|')'
newline|'\n'
nl|'\n'
name|'resource'
op|'='
name|'self'
op|'.'
name|'resources'
op|'['
name|'collection'
op|']'
newline|'\n'
name|'resource'
op|'.'
name|'register_actions'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'resource'
op|'.'
name|'register_extensions'
op|'('
name|'controller'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_setup_routes
dedent|''
dedent|''
name|'def'
name|'_setup_routes'
op|'('
name|'self'
op|','
name|'mapper'
op|','
name|'ext_mgr'
op|','
name|'init_only'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'NotImplementedError'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|APIRouterV21
dedent|''
dedent|''
name|'class'
name|'APIRouterV21'
op|'('
name|'base_wsgi'
op|'.'
name|'Router'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Routes requests on the OpenStack v2.1 API to the appropriate controller\n and method.\n """'
newline|'\n'
nl|'\n'
op|'@'
name|'classmethod'
newline|'\n'
DECL|member|factory
name|'def'
name|'factory'
op|'('
name|'cls'
op|','
name|'global_config'
op|','
op|'**'
name|'local_config'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Simple paste factory, :class:`nova.wsgi.Router` doesn\'t have one."""'
newline|'\n'
name|'return'
name|'cls'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'staticmethod'
newline|'\n'
DECL|member|api_extension_namespace
name|'def'
name|'api_extension_namespace'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
string|"'nova.api.v21.extensions'"
newline|'\n'
nl|'\n'
DECL|member|__init__
dedent|''
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'init_only'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
comment|'# TODO(cyeoh): bp v3-api-extension-framework. Currently load'
nl|'\n'
comment|'# all extensions but eventually should be able to exclude'
nl|'\n'
comment|'# based on a config file'
nl|'\n'
DECL|function|_check_load_extension
indent|' '
name|'def'
name|'_check_load_extension'
op|'('
name|'ext'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
op|'('
name|'self'
op|'.'
name|'init_only'
name|'is'
name|'None'
name|'or'
name|'ext'
op|'.'
name|'obj'
op|'.'
name|'alias'
name|'in'
nl|'\n'
name|'self'
op|'.'
name|'init_only'
op|')'
name|'and'
name|'isinstance'
op|'('
name|'ext'
op|'.'
name|'obj'
op|','
nl|'\n'
name|'extensions'
op|'.'
name|'V21APIExtensionBase'
op|')'
op|':'
newline|'\n'
nl|'\n'
comment|'# Check whitelist is either empty or if not then the extension'
nl|'\n'
comment|'# is in the whitelist'
nl|'\n'
indent|' '
name|'if'
op|'('
name|'not'
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'extensions_whitelist'
name|'or'
nl|'\n'
name|'ext'
op|'.'
name|'obj'
op|'.'
name|'alias'
name|'in'
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'extensions_whitelist'
op|')'
op|':'
newline|'\n'
nl|'\n'
comment|'# Check the extension is not in the blacklist'
nl|'\n'
indent|' '
name|'blacklist'
op|'='
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'extensions_blacklist'
newline|'\n'
name|'if'
name|'ext'
op|'.'
name|'obj'
op|'.'
name|'alias'
name|'not'
name|'in'
name|'blacklist'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'_register_extension'
op|'('
name|'ext'
op|')'
newline|'\n'
dedent|''
dedent|''
dedent|''
name|'return'
name|'False'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
op|'('
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'extensions_blacklist'
name|'or'
nl|'\n'
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'extensions_whitelist'
op|')'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'warning'
op|'('
nl|'\n'
name|'_LW'
op|'('
string|"'In the M release you must run all of the API. '"
nl|'\n'
string|"'The concept of API extensions will be removed from '"
nl|'\n'
string|"'the codebase to ensure there is a single Compute API.'"
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'init_only'
op|'='
name|'init_only'
newline|'\n'
name|'LOG'
op|'.'
name|'debug'
op|'('
string|'"v21 API Extension Blacklist: %s"'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'extensions_blacklist'
op|')'
newline|'\n'
name|'LOG'
op|'.'
name|'debug'
op|'('
string|'"v21 API Extension Whitelist: %s"'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'extensions_whitelist'
op|')'
newline|'\n'
nl|'\n'
name|'in_blacklist_and_whitelist'
op|'='
name|'set'
op|'('
nl|'\n'
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'extensions_whitelist'
op|')'
op|'.'
name|'intersection'
op|'('
nl|'\n'
name|'CONF'
op|'.'
name|'osapi_v21'
op|'.'
name|'extensions_blacklist'
op|')'
newline|'\n'
name|'if'
name|'len'
op|'('
name|'in_blacklist_and_whitelist'
op|')'
op|'!='
number|'0'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'warning'
op|'('
name|'_LW'
op|'('
string|'"Extensions in both blacklist and whitelist: %s"'
op|')'
op|','
nl|'\n'
name|'list'
op|'('
name|'in_blacklist_and_whitelist'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'api_extension_manager'
op|'='
name|'stevedore'
op|'.'
name|'enabled'
op|'.'
name|'EnabledExtensionManager'
op|'('
nl|'\n'
name|'namespace'
op|'='
name|'self'
op|'.'
name|'api_extension_namespace'
op|'('
op|')'
op|','
nl|'\n'
name|'check_func'
op|'='
name|'_check_load_extension'
op|','
nl|'\n'
name|'invoke_on_load'
op|'='
name|'True'
op|','
nl|'\n'
name|'invoke_kwds'
op|'='
op|'{'
string|'"extension_info"'
op|':'
name|'self'
op|'.'
name|'loaded_extension_info'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'mapper'
op|'='
name|'ProjectMapper'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'resources'
op|'='
op|'{'
op|'}'
newline|'\n'
nl|'\n'
comment|'# NOTE(cyeoh) Core API support is rewritten as extensions'
nl|'\n'
comment|'# but conceptually still have core'
nl|'\n'
name|'if'
name|'list'
op|'('
name|'self'
op|'.'
name|'api_extension_manager'
op|')'
op|':'
newline|'\n'
comment|'# NOTE(cyeoh): Stevedore raises an exception if there are'
nl|'\n'
comment|'# no plugins detected. I wonder if this is a bug.'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'_register_resources_check_inherits'
op|'('
name|'mapper'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'api_extension_manager'
op|'.'
name|'map'
op|'('
name|'self'
op|'.'
name|'_register_controllers'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'missing_core_extensions'
op|'='
name|'self'
op|'.'
name|'get_missing_core_extensions'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'loaded_extension_info'
op|'.'
name|'get_extensions'
op|'('
op|')'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
newline|'\n'
name|'if'
name|'not'
name|'self'
op|'.'
name|'init_only'
name|'and'
name|'missing_core_extensions'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'critical'
op|'('
name|'_LC'
op|'('
string|'"Missing core API extensions: %s"'
op|')'
op|','
nl|'\n'
name|'missing_core_extensions'
op|')'
newline|'\n'
name|'raise'
name|'exception'
op|'.'
name|'CoreAPIMissing'
op|'('
nl|'\n'
name|'missing_apis'
op|'='
name|'missing_core_extensions'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'LOG'
op|'.'
name|'info'
op|'('
name|'_LI'
op|'('
string|'"Loaded extensions: %s"'
op|')'
op|','
nl|'\n'
name|'sorted'
op|'('
name|'self'
op|'.'
name|'loaded_extension_info'
op|'.'
name|'get_extensions'
op|'('
op|')'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
op|')'
newline|'\n'
name|'super'
op|'('
name|'APIRouterV21'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
name|'mapper'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_register_resources_list
dedent|''
name|'def'
name|'_register_resources_list'
op|'('
name|'self'
op|','
name|'ext_list'
op|','
name|'mapper'
op|')'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'ext'
name|'in'
name|'ext_list'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_register_resources'
op|'('
name|'ext'
op|','
name|'mapper'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_register_resources_check_inherits
dedent|''
dedent|''
name|'def'
name|'_register_resources_check_inherits'
op|'('
name|'self'
op|','
name|'mapper'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ext_has_inherits'
op|'='
op|'['
op|']'
newline|'\n'
name|'ext_no_inherits'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'for'
name|'ext'
name|'in'
name|'self'
op|'.'
name|'api_extension_manager'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'resource'
name|'in'
name|'ext'
op|'.'
name|'obj'
op|'.'
name|'get_resources'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'resource'
op|'.'
name|'inherits'
op|':'
newline|'\n'
indent|' '
name|'ext_has_inherits'
op|'.'
name|'append'
op|'('
name|'ext'
op|')'
newline|'\n'
name|'break'
newline|'\n'
dedent|''
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'ext_no_inherits'
op|'.'
name|'append'
op|'('
name|'ext'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'self'
op|'.'
name|'_register_resources_list'
op|'('
name|'ext_no_inherits'
op|','
name|'mapper'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_register_resources_list'
op|'('
name|'ext_has_inherits'
op|','
name|'mapper'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'staticmethod'
newline|'\n'
DECL|member|get_missing_core_extensions
name|'def'
name|'get_missing_core_extensions'
op|'('
name|'extensions_loaded'
op|')'
op|':'
newline|'\n'
indent|' '
name|'extensions_loaded'
op|'='
name|'set'
op|'('
name|'extensions_loaded'
op|')'
newline|'\n'
name|'missing_extensions'
op|'='
name|'API_V21_CORE_EXTENSIONS'
op|'-'
name|'extensions_loaded'
newline|'\n'
name|'return'
name|'list'
op|'('
name|'missing_extensions'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'property'
newline|'\n'
DECL|member|loaded_extension_info
name|'def'
name|'loaded_extension_info'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'NotImplementedError'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_register_extension
dedent|''
name|'def'
name|'_register_extension'
op|'('
name|'self'
op|','
name|'ext'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'NotImplementedError'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_register_resources
dedent|''
name|'def'
name|'_register_resources'
op|'('
name|'self'
op|','
name|'ext'
op|','
name|'mapper'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Register resources defined by the extensions\n\n Extensions define what resources they want to add through a\n get_resources function\n """'
newline|'\n'
nl|'\n'
name|'handler'
op|'='
name|'ext'
op|'.'
name|'obj'
newline|'\n'
name|'LOG'
op|'.'
name|'debug'
op|'('
string|'"Running _register_resources on %s"'
op|','
name|'ext'
op|'.'
name|'obj'
op|')'
newline|'\n'
nl|'\n'
name|'for'
name|'resource'
name|'in'
name|'handler'
op|'.'
name|'get_resources'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'debug'
op|'('
string|"'Extended resource: %s'"
op|','
name|'resource'
op|'.'
name|'collection'
op|')'
newline|'\n'
nl|'\n'
name|'inherits'
op|'='
name|'None'
newline|'\n'
name|'if'
name|'resource'
op|'.'
name|'inherits'
op|':'
newline|'\n'
indent|' '
name|'inherits'
op|'='
name|'self'
op|'.'
name|'resources'
op|'.'
name|'get'
op|'('
name|'resource'
op|'.'
name|'inherits'
op|')'
newline|'\n'
name|'if'
name|'not'
name|'resource'
op|'.'
name|'controller'
op|':'
newline|'\n'
indent|' '
name|'resource'
op|'.'
name|'controller'
op|'='
name|'inherits'
op|'.'
name|'controller'
newline|'\n'
dedent|''
dedent|''
name|'wsgi_resource'
op|'='
name|'wsgi'
op|'.'
name|'ResourceV21'
op|'('
name|'resource'
op|'.'
name|'controller'
op|','
nl|'\n'
name|'inherits'
op|'='
name|'inherits'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'resources'
op|'['
name|'resource'
op|'.'
name|'collection'
op|']'
op|'='
name|'wsgi_resource'
newline|'\n'
name|'kargs'
op|'='
name|'dict'
op|'('
nl|'\n'
name|'controller'
op|'='
name|'wsgi_resource'
op|','
nl|'\n'
name|'collection'
op|'='
name|'resource'
op|'.'
name|'collection_actions'
op|','
nl|'\n'
name|'member'
op|'='
name|'resource'
op|'.'
name|'member_actions'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'resource'
op|'.'
name|'parent'
op|':'
newline|'\n'
indent|' '
name|'kargs'
op|'['
string|"'parent_resource'"
op|']'
op|'='
name|'resource'
op|'.'
name|'parent'
newline|'\n'
nl|'\n'
comment|'# non core-API plugins use the collection name as the'
nl|'\n'
comment|'# member name, but the core-API plugins use the'
nl|'\n'
comment|'# singular/plural convention for member/collection names'
nl|'\n'
dedent|''
name|'if'
name|'resource'
op|'.'
name|'member_name'
op|':'
newline|'\n'
indent|' '
name|'member_name'
op|'='
name|'resource'
op|'.'
name|'member_name'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'member_name'
op|'='
name|'resource'
op|'.'
name|'collection'
newline|'\n'
dedent|''
name|'mapper'
op|'.'
name|'resource'
op|'('
name|'member_name'
op|','
name|'resource'
op|'.'
name|'collection'
op|','
nl|'\n'
op|'**'
name|'kargs'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'resource'
op|'.'
name|'custom_routes_fn'
op|':'
newline|'\n'
indent|' '
name|'resource'
op|'.'
name|'custom_routes_fn'
op|'('
name|'mapper'
op|','
name|'wsgi_resource'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_register_controllers
dedent|''
dedent|''
dedent|''
name|'def'
name|'_register_controllers'
op|'('
name|'self'
op|','
name|'ext'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Register controllers defined by the extensions\n\n Extensions define what resources they want to add through\n a get_controller_extensions function\n """'
newline|'\n'
nl|'\n'
name|'handler'
op|'='
name|'ext'
op|'.'
name|'obj'
newline|'\n'
name|'LOG'
op|'.'
name|'debug'
op|'('
string|'"Running _register_controllers on %s"'
op|','
name|'ext'
op|'.'
name|'obj'
op|')'
newline|'\n'
nl|'\n'
name|'for'
name|'extension'
name|'in'
name|'handler'
op|'.'
name|'get_controller_extensions'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'ext_name'
op|'='
name|'extension'
op|'.'
name|'extension'
op|'.'
name|'name'
newline|'\n'
name|'collection'
op|'='
name|'extension'
op|'.'
name|'collection'
newline|'\n'
name|'controller'
op|'='
name|'extension'
op|'.'
name|'controller'
newline|'\n'
nl|'\n'
name|'if'
name|'collection'
name|'not'
name|'in'
name|'self'
op|'.'
name|'resources'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'warning'
op|'('
name|'_LW'
op|'('
string|"'Extension %(ext_name)s: Cannot extend '"
nl|'\n'
string|"'resource %(collection)s: No such resource'"
op|')'
op|','
nl|'\n'
op|'{'
string|"'ext_name'"
op|':'
name|'ext_name'
op|','
string|"'collection'"
op|':'
name|'collection'
op|'}'
op|')'
newline|'\n'
name|'continue'
newline|'\n'
nl|'\n'
dedent|''
name|'LOG'
op|'.'
name|'debug'
op|'('
string|"'Extension %(ext_name)s extending resource: '"
nl|'\n'
string|"'%(collection)s'"
op|','
nl|'\n'
op|'{'
string|"'ext_name'"
op|':'
name|'ext_name'
op|','
string|"'collection'"
op|':'
name|'collection'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'resource'
op|'='
name|'self'
op|'.'
name|'resources'
op|'['
name|'collection'
op|']'
newline|'\n'
name|'resource'
op|'.'
name|'register_actions'
op|'('
name|'controller'
op|')'
newline|'\n'
name|'resource'
op|'.'
name|'register_extensions'
op|'('
name|'controller'
op|')'
newline|'\n'
dedent|''
dedent|''
dedent|''
endmarker|''
end_unit
| 13.473848 | 186 | 0.60855 |
7955d87e376beba023d3f591341097b0897a100d | 5,469 | py | Python | submodules/torchskeleton/bin/benchmark/efficientnet_throughput_dist.py | khy0809/WeightNet | cd5ea53b42c6169ffd5a0d7d883788fdc871cd1e | [
"MIT"
] | null | null | null | submodules/torchskeleton/bin/benchmark/efficientnet_throughput_dist.py | khy0809/WeightNet | cd5ea53b42c6169ffd5a0d7d883788fdc871cd1e | [
"MIT"
] | null | null | null | submodules/torchskeleton/bin/benchmark/efficientnet_throughput_dist.py | khy0809/WeightNet | cd5ea53b42c6169ffd5a0d7d883788fdc871cd1e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import sys
import logging
import torch
import torch.distributed as dist
base_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(base_dir)
import skeleton
import efficientnet
LOGGER = logging.getLogger(__name__)
def parse_args():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--architecture', type=str, default='efficientnet-b4')
parser.add_argument('--half', action='store_true')
parser.add_argument('--batch', type=int, default=None)
parser.add_argument('--steps', type=int, default=50)
parser.add_argument('--dist-backend', default='nccl', type=str, help='distributed backend')
parser.add_argument('--local_rank', default=0, type=int,
help='Used for multi-process training. Can either be manually set ' +
'or automatically set by using \'python -m multiproc\'.')
parser.add_argument('--log-filename', type=str, default='')
parser.add_argument('--debug', action='store_true')
return parser.parse_args()
def main():
timer = skeleton.utils.Timer()
args = parse_args()
log_format = '[%(asctime)s] [%(levelname)s] [%(filename)s:%(lineno)03d] %(message)s'
level = logging.DEBUG if args.debug else logging.INFO
if not args.log_filename:
logging.basicConfig(level=level, format=log_format, stream=sys.stderr)
else:
logging.basicConfig(level=level, format=log_format, filename=args.log_filename)
torch.backends.cudnn.benchmark = True
assert 'efficientnet' in args.architecture
assert args.architecture.split('-')[1] in ['b0', 'b1', 'b2', 'b3', 'b4', 'b5', 'b6', 'b7']
logging.info('Distributed: wait dist process group:%d', args.local_rank)
dist.init_process_group(backend=args.dist_backend, init_method='env://', world_size=int(os.environ['WORLD_SIZE']))
assert (int(os.environ['WORLD_SIZE']) == dist.get_world_size())
world_size = dist.get_world_size()
logging.info('Distributed: success device:%d (%d/%d)', args.local_rank, dist.get_rank(), dist.get_world_size())
environments = skeleton.utils.Environments()
device = torch.device('cuda', args.local_rank)
torch.cuda.set_device(device)
if args.batch is None:
args.batch = 128 if 'b0' in args.architecture else args.batch
args.batch = 96 if 'b1' in args.architecture else args.batch
args.batch = 64 if 'b2' in args.architecture else args.batch
args.batch = 32 if 'b3' in args.architecture else args.batch
args.batch = 16 if 'b4' in args.architecture else args.batch
args.batch = 8 if 'b5' in args.architecture else args.batch
args.batch = 6 if 'b6' in args.architecture else args.batch
args.batch = 4 if 'b7' in args.architecture else args.batch
args.batch *= 2
input_size = efficientnet.EfficientNet.get_image_size(args.architecture)
model = efficientnet.EfficientNet.from_name(args.architecture).to(device=device)
criterion = torch.nn.CrossEntropyLoss(reduction='mean')
optimizer = torch.optim.SGD(model.parameters(), lr=0.01, momentum=0.9, weight_decay=1e-5, nesterov=True)
profiler = skeleton.nn.Profiler(model)
params = profiler.params()
flops = profiler.flops(torch.ones(1, 3, input_size, input_size, dtype=torch.float, device=device))
LOGGER.info('environemtns\n%s', environments)
LOGGER.info('arechitecture\n%s\ninput:%d\nprarms:%.2fM\nGFLOPs:%.3f', args.architecture, input_size, params / (1024 * 1024), flops / (1024 * 1024 * 1024))
LOGGER.info('optimizers\nloss:%s\noptimizer:%s', str(criterion), str(optimizer))
LOGGER.info('args\n%s', args)
# model = torch.nn.DataParallel(model, device_ids=list(range(args.num_gpus)))
batch = args.batch * (2 if args.half else 1)
inputs = torch.ones(batch, 3, input_size, input_size, dtype=torch.float, device=device)
targets = torch.zeros(batch, dtype=torch.long, device=device)
if args.half:
inputs = inputs.half()
for module in model.modules():
if not isinstance(module, torch.nn.BatchNorm2d):
module.half()
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.local_rank], output_device=args.local_rank)
# warmup
for _ in range(2):
logits = model(inputs)
loss = criterion(logits, targets)
loss.backward()
model.zero_grad()
torch.cuda.synchronize()
timer('init', reset_step=True, exclude_total=True)
for step in range(args.steps):
timer('init', reset_step=True)
logits = model(inputs)
loss = criterion(logits, targets)
timer('forward')
loss.backward()
timer('backward')
optimizer.step()
optimizer.zero_grad()
timer('optimize')
LOGGER.info('[%02d] %s', step, timer)
if dist.get_rank() == 0:
images = args.steps * batch * world_size
LOGGER.info('throughput:%.4f images/sec', images * timer.throughput())
if __name__ == '__main__':
# single node
# > python -m torch.distributed.launch --nproc_per_node=4 --nnodes=1 bin/benchmark/efficientnet_throughput_dist.py
# multi node
# > python -m torch.distributed.launch --nproc_per_node=4 --nnodes=4 --master_addr=MASTER_HOST_NAME --master_port=MASTER_PORT --node_rank=0 bin/benchmark/efficientnet_throughput_dist.py
main()
| 39.345324 | 189 | 0.677638 |
7955da956de5018021d2fdadca197c8efeb6db42 | 2,148 | py | Python | tests/simplelogging.py | chadrosenquist/logging-test-case | abad36fb3da3ccdb290d2b00defeadb9465d9891 | [
"MIT"
] | 4 | 2017-03-05T07:17:51.000Z | 2020-05-20T10:32:48.000Z | tests/simplelogging.py | chadrosenquist/logging-test-case | abad36fb3da3ccdb290d2b00defeadb9465d9891 | [
"MIT"
] | 4 | 2018-04-16T03:14:42.000Z | 2018-12-02T22:16:00.000Z | tests/simplelogging.py | chadrosenquist/logging-test-case | abad36fb3da3ccdb290d2b00defeadb9465d9891 | [
"MIT"
] | 2 | 2018-04-16T01:57:10.000Z | 2020-05-20T10:39:31.000Z | """
MIT License
Copyright (c) 2016 Chad Rosenquist
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Created on Dec 21, 2016
@author: Chad Rosenquist
"""
import logging
class SimpleLogging(object):
"""
Simple object used to test LoggingTestCase.
"""
CRITICAL_MESSAGE = 'SimpleLogging Critical'
ERROR_MESSAGE = 'SimpleLogging Error'
WARNING_MESSAGE = 'SimpleLogging Warning'
INFO_MESSAGE = 'SimpleLogging Info'
DEBUG_MESSAGE = 'SimpleLogging Debug'
def __init__(self):
"""
Constructor
"""
self.__logger = logging.getLogger(__name__)
@property
def logger(self):
return self.__logger
def critical(self):
self.logger.critical(self.CRITICAL_MESSAGE)
def error(self):
self.logger.error(self.ERROR_MESSAGE)
def warning(self):
self.logger.warning(self.WARNING_MESSAGE)
def info(self):
self.logger.info(self.INFO_MESSAGE)
def debug(self):
self.logger.debug(self.DEBUG_MESSAGE)
def all(self):
self.critical()
self.error()
self.warning()
self.info()
self.debug()
| 29.424658 | 78 | 0.712756 |
7955dacbb5e3b44880c5a65d09443d366bcedd71 | 5,575 | py | Python | tests/test_modules/test_pandablocks/test_pandabussespart.py | dinojugosloven/pymalcolm | 0b856ee1113efdb42f2f3b15986f8ac5f9e1b35a | [
"Apache-2.0"
] | null | null | null | tests/test_modules/test_pandablocks/test_pandabussespart.py | dinojugosloven/pymalcolm | 0b856ee1113efdb42f2f3b15986f8ac5f9e1b35a | [
"Apache-2.0"
] | null | null | null | tests/test_modules/test_pandablocks/test_pandabussespart.py | dinojugosloven/pymalcolm | 0b856ee1113efdb42f2f3b15986f8ac5f9e1b35a | [
"Apache-2.0"
] | null | null | null | from collections import OrderedDict
import unittest
from mock import call, MagicMock
from malcolm.core import TimeStamp
from malcolm.modules.pandablocks.parts.pandabussespart import PandABussesPart
from malcolm.modules.pandablocks.util import PositionCapture, BitsTable, \
PositionsTable
class PandABussesPartTest(unittest.TestCase):
def setUp(self):
self.o = PandABussesPart("busses", MagicMock())
self.o.setup(MagicMock())
pcap_bits_fields = OrderedDict()
pcap_bits_fields["PCAP.BITS0.CAPTURE"] = [
"B1.B%d" % i for i in range(6)]
pcap_bits_fields["PCAP.BITS1.CAPTURE"] = [
"B2.B%d" % i for i in range(12, 15)] + [""] * 12
pos_names = ["B1.P%d" % i for i in range(3)] + ["B2.P33"]
self.o.create_busses(pcap_bits_fields, pos_names)
self.expected_bit_names = [
'B1.B0', 'B1.B1', 'B1.B2', 'B1.B3', 'B1.B4', 'B1.B5',
'B2.B12', 'B2.B13', 'B2.B14']
self.expected_pos_names = [
'B1.P0', 'B1.P1', 'B1.P2', 'B2.P33']
def test_init(self):
assert list(self.o.bits.meta.elements) == ["name", "value", "capture"]
assert self.o.bits.value.name == self.expected_bit_names
assert self.o.bits.value.value == [False] * 9
assert self.o.bits.value.value.seq.dtype == bool
assert self.o.bits.value.capture == [False] * 9
assert self.o.bits.meta.elements["capture"].tags == ["widget:checkbox"]
assert list(self.o.positions.meta.elements) == [
"name", "value", "units", "scale", "offset", "capture"]
assert self.o.positions.value.name == self.expected_pos_names
assert self.o.positions.value.value == [0.0] * 4
assert self.o.positions.value.value.seq.dtype == float
assert self.o.positions.value.units == [""] * 4
assert self.o.positions.value.scale == [1.0] * 4
assert self.o.positions.value.offset == [0.0] * 4
assert self.o.positions.value.capture == [PositionCapture.NO] * 4
def test_scale_offset(self):
ts = TimeStamp()
changes = {
"B1.P0.SCALE": "32",
'B1.P0.OFFSET': "0.1",
"B1.P0": "100"
}
self.o.handle_changes(changes, ts)
assert self.o.positions.timeStamp is ts
assert list(self.o.positions.value.rows())[0] == [
'B1.P0', 3200.1, '', 32.0, 0.1, PositionCapture.NO]
self.o.handle_changes({"B1.P0.SCALE": "64"}, ts)
assert list(self.o.positions.value.rows())[0] == [
'B1.P0', 6400.1, '', 64.0, 0.1, PositionCapture.NO]
self.o.handle_changes({"B1.P0": "200"}, ts)
assert list(self.o.positions.value.rows())[0] == [
'B1.P0', 12800.1, '', 64.0, 0.1, PositionCapture.NO]
def test_pos_capture(self):
ts = TimeStamp()
changes = {
"B1.P2.CAPTURE": "Min Max Mean",
"B1.P2.SCALE": "1",
"B1.P2": "100"
}
self.o.handle_changes(changes, ts)
assert list(self.o.positions.value.rows())[2] == [
'B1.P2', 100, '', 1.0, 0.0, PositionCapture.MIN_MAX_MEAN]
def test_pos_set_capture(self):
value = PositionsTable(
name=["B1.P2"], value=[23.0], units=["mm"], scale=[0.1],
offset=[0.0], capture=[PositionCapture.MEAN])
self.o.set_positions(value)
assert self.o.positions.value.name == self.expected_pos_names
assert self.o.positions.value.value == [0.0] * 4
assert self.o.positions.value.units == ['', '', 'mm', '']
assert self.o.positions.value.scale == [1.0, 1.0, 0.1, 1.0]
assert self.o.positions.value.offset == [0.0, 0.0, 0.0, 0.0]
assert self.o.positions.value.capture == [
PositionCapture.NO, PositionCapture.NO, PositionCapture.MEAN,
PositionCapture.NO]
assert self.o._client.set_fields.call_args_list == [
call({'B1.P2.CAPTURE': 'Mean',
'B1.P1.CAPTURE': 'No',
'B2.P33.CAPTURE': 'No',
'B1.P0.CAPTURE': 'No'}),
call({'B1.P0.SCALE': 1.0,
'B2.P33.SCALE': 1.0,
'B1.P1.SCALE': 1.0,
'B1.P2.SCALE': 0.1}),
call({'B2.P33.UNITS': '',
'B1.P2.UNITS': 'mm',
'B1.P1.UNITS': '',
'B1.P0.UNITS': ''})]
def test_bits(self):
ts = TimeStamp()
changes = {
"B1.B1": True,
'B1.B3': True,
}
self.o.handle_changes(changes, ts)
assert self.o.bits.timeStamp is ts
assert list(self.o.bits.value.rows())[1] == [
'B1.B1', True, False]
assert list(self.o.bits.value.rows())[2] == [
'B1.B2', False, False]
assert list(self.o.bits.value.rows())[3] == [
'B1.B3', True, False]
def test_bit_capture_change(self):
ts = TimeStamp()
changes = {
"PCAP.BITS0.CAPTURE": "Value"
}
self.o.handle_changes(changes, ts)
assert self.o.bits.value.capture == [True] * 6 + [False] * 3
def test_bit_set_capture(self):
value = BitsTable(name=["B1.B1"], value=[True], capture=[True])
self.o.set_bits(value)
assert self.o.bits.value.name == self.expected_bit_names
assert self.o.bits.value.capture == [False, True] + [False] * 7
assert self.o.bits.value.value == [False] * 9
self.o._client.set_fields.assert_called_once_with(
{'PCAP.BITS0.CAPTURE': 'Value'})
| 42.234848 | 79 | 0.554978 |
7955db8dfd6a9af1c342558e14bea48beb38eecb | 3,468 | py | Python | lib/modules/powershell/trollsploit/message.py | 1r-f0rhun73r/Empire | 6e2bb66e39c2c02f9a593c64d922e89b0a57ac3e | [
"BSD-3-Clause"
] | null | null | null | lib/modules/powershell/trollsploit/message.py | 1r-f0rhun73r/Empire | 6e2bb66e39c2c02f9a593c64d922e89b0a57ac3e | [
"BSD-3-Clause"
] | null | null | null | lib/modules/powershell/trollsploit/message.py | 1r-f0rhun73r/Empire | 6e2bb66e39c2c02f9a593c64d922e89b0a57ac3e | [
"BSD-3-Clause"
] | null | null | null | from builtins import str
from builtins import object
from lib.common import helpers
class Module(object):
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-Message',
'Author': ['@harmj0y'],
'Description': ("Displays a specified message to the user."),
'Software': '',
'Techniques': [''],
'Background' : True,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : False,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'http://blog.logrhythm.com/security/do-you-trust-your-computer/'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'MsgText' : {
'Description' : 'Message text to display.',
'Required' : True,
'Value' : 'Lost contact with the Domain Controller.'
},
'IconType' : {
'Description' : 'Critical, Question, Exclamation, or Information',
'Required' : True,
'Value' : 'Critical'
},
'Title' : {
'Description' : 'Title of the message box to display.',
'Required' : True,
'Value' : 'ERROR - 0xA801B720'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
script = """
function Invoke-Message {
[CmdletBinding()]
Param (
[Parameter(Mandatory = $True, Position = 0)]
[String] $MsgText,
[Parameter(Mandatory = $False, Position = 1)]
[String] $IconType = 'Critical',
[Parameter(Mandatory = $False, Position = 2)]
[String] $Title = 'ERROR - 0xA801B720'
)
Add-Type -AssemblyName Microsoft.VisualBasic
$null = [Microsoft.VisualBasic.Interaction]::MsgBox($MsgText, "OKOnly,MsgBoxSetForeground,SystemModal,$IconType", $Title)
}
Invoke-Message"""
for option,values in self.options.items():
if option.lower() != "agent" and option.lower() != "computername":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
script += " -" + str(option) + " \"" + str(values['Value'].strip("\"")) + "\""
if obfuscate:
script = helpers.obfuscate(self.mainMenu.installPath, psScript=script, obfuscationCommand=obfuscationCommand)
return script
| 32.716981 | 125 | 0.494521 |
7955dca016a938f5f83cfdfa9131c5f75ba117ac | 3,477 | py | Python | bindings/python/ensmallen/datasets/string/nesterenkoniaspan1.py | AnacletoLAB/ensmallen_graph | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 5 | 2021-02-17T00:44:45.000Z | 2021-08-09T16:41:47.000Z | bindings/python/ensmallen/datasets/string/nesterenkoniaspan1.py | AnacletoLAB/ensmallen_graph | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 18 | 2021-01-07T16:47:39.000Z | 2021-08-12T21:51:32.000Z | bindings/python/ensmallen/datasets/string/nesterenkoniaspan1.py | AnacletoLAB/ensmallen | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 3 | 2021-01-14T02:20:59.000Z | 2021-08-04T19:09:52.000Z | """
This file offers the methods to automatically retrieve the graph Nesterenkonia sp. AN1.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def NesterenkoniaSpAn1(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Nesterenkonia sp. AN1 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Nesterenkonia sp. AN1 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="NesterenkoniaSpAn1",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 33.114286 | 223 | 0.676445 |
7955dd244da2bd30ece9183ef8402f1f2722b34c | 6,947 | py | Python | src/StaticPhotos.py | robdobsn/KitchenProjectorCalPhotoApp | b02c6f375a0c7a5dfc9c60a1194f985098f1884c | [
"MIT"
] | null | null | null | src/StaticPhotos.py | robdobsn/KitchenProjectorCalPhotoApp | b02c6f375a0c7a5dfc9c60a1194f985098f1884c | [
"MIT"
] | 3 | 2019-10-24T07:51:04.000Z | 2020-11-03T10:54:01.000Z | src/StaticPhotos.py | robdobsn/KitchenProjectorCalPhotoApp | b02c6f375a0c7a5dfc9c60a1194f985098f1884c | [
"MIT"
] | null | null | null | from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import (QParallelAnimationGroup, QTimer, QCoreApplication, Qt, qsrand, QTime, QRectF, QPointF, pyqtSignal, QSize, QObject, pyqtProperty)
from PyQt5.QtGui import (QBrush, QColor, QPainter, QPixmap, QFont, QPalette, QImage, QTransform)
from PyQt5.QtWidgets import (QWidget, QApplication, QGraphicsScene, QGraphicsView, QPushButton, QVBoxLayout, QTextEdit, QGridLayout, QGraphicsRectItem, QGraphicsTextItem, QSizePolicy, QGraphicsPixmapItem, QGraphicsItem)
import datetime
from PhotoFileManager import PhotoFileManager
from PhotoInfo import PhotoInfo
import json
class StaticPhotos(QGraphicsView):
def __init__(self, photoBaseDir, validPhotoFileExts, photoDeltas, picChangeMs, parent=None):
QGraphicsView.__init__(self, parent)
self.setAlignment(Qt.AlignLeft | Qt.AlignTop)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
# Vars
self.picChangeMs = picChangeMs
self.photoBaseDir = photoBaseDir
# Widget
self.scene = QGraphicsScene()
self.setScene(self.scene)
self.setBackgroundBrush(QColor("black"))
self.setLineWidth(0)
self.setFrameShape(QtWidgets.QFrame.NoFrame)
# Class vars
self.picChgTimer = None
self.photoFileManager = PhotoFileManager(validPhotoFileExts, self.photoBaseDir, 7200.00, photoDeltas)
self.photoFileManager.startPhotoListUpdate()
self.userMovedBack = False
# def sizeHint(self):
# return QSize(600, 400)
def resizeEvent(self, evt=None):
xWindowSize = self.width()
yWindowSize = self.height()
pass
def start(self):
self.picChgTimer = QTimer()
self.picChgTimer.setInterval(self.picChangeMs)
self.picChgTimer.setSingleShot(True)
self.picChgTimer.timeout.connect(self.picChangeFn)
self.picChgTimer.start()
# self.picChangeFn()
print("Static Photos - starting")
def stop(self):
if self.picChgTimer is not None:
self.picChgTimer.stop()
print("Static Photos - stopping")
# self.picChgTimer.disconnect(self.picChangeFn)
self.photoFileManager.stop()
def moveNext(self):
self.nextPicItem()
def movePrev(self):
self.prevPicItem()
self.userMovedBack = True
def reshow(self):
self.showImage()
def getCurPhotoFilename(self):
return self.photoFileManager.getCurPhotoFilename()
def picChangeFn(self):
# pass
if self.userMovedBack:
# Skip this update
self.userMovedBack = False
else:
self.nextPicItem()
self.picChgTimer.setInterval(self.picChangeMs)
self.picChgTimer.start()
def loadImage(self):
self.newImg = QImage()
self.newImg.load(self.photoFileManager.getCurPhotoFilename())
self.newImgInfo = self.photoFileManager.getCurPhotoInfo()
transform = QTransform()
transform.rotate(self.newImgInfo.rotationAngle)
self.interImg = self.newImg.transformed(transform, Qt.SmoothTransformation)
# xReqdSize = self.cellSize.width() * xFactor + self.xBetweenPics * (xFactor-1)
# yReqdSize = self.cellSize.height() * yFactor + self.yBetweenPics * (yFactor-1)
self.inter2Img = self.interImg.scaled(QSize(self.width(),self.height()),
Qt.KeepAspectRatio, Qt.SmoothTransformation)
# finalImg = interImg.copy(0,0,xReqdSize,yReqdSize)
# print("XY Size", xFactor, yFactor, xReqdSize,yReqdSize)
return self.inter2Img, self.newImgInfo
def showImage(self):
(newImg, newImgInfo) = self.loadImage()
# return PicItem(Pixmap(QPixmap(newImg)), -1, -1, xFactor, yFactor, newImgInfo)
self.scene.clear()
imgSz = newImgInfo.imgSize
self.setSceneRect(QRectF(0,0,imgSz.width(), imgSz.height()))
pixMap = QPixmap.fromImage(newImg)
# # pixMap.setWidth(self.width())
pixMapItem = self.scene.addPixmap(pixMap)
# pixMapItem.setPos(50,50)
# self.fitInView(QRectF(0, 0, self.width(), self.height()), Qt.KeepAspectRatio)
# Add caption
caption = QGraphicsTextItem()
caption.setDefaultTextColor(QColor(255,255,255))
caption.setPos(0, self.height()*0.94)
caption.setFont(QFont("Segoe UI", 30))
caption.setTextWidth(self.width())
# caption.setPos(100, 100)
# caption.setTextWidth(1500)
# if newImgInfo.createDate is not None:
# caption.setPlainText(newImgInfo.createDate.format());
# else:
# caption.setPlainText("Image is called bananas");
# print("Tags", newImgInfo.tags)
# tagStr = ""
# for tag in newImgInfo.tags:
# if tag != "Duplicate":
# tagStr += (", " if len(tagStr) != 0 else "") + tag
# if tagStr == "":
# tagStr = "NO TAGS"
# captionStr = '<h1 style="text-align:center;width:100%">' + tagStr + '</h1>'
# if newImgInfo.createDate is not None:
# print(newImgInfo.createDate.format())
# captionStr += '<BR><h2>' + newImgInfo.createDate.format() + '</h2>'
captionStr = ""
try:
if newImgInfo.rating is not None:
for i in range(newImgInfo.rating):
captionStr += "★"
for i in range(5-newImgInfo.rating):
captionStr += "☆"
if newImgInfo.mainDate is not None:
if len(captionStr) != 0:
captionStr += " "
captionStr += newImgInfo.mainDate.strftime("%d %b %Y")
except Exception as excp:
print("StaticPhotos: Cannot set caption")
captionStr = '<div style="background-color:#000000;text-align: right;padding-right:10dp;">' + captionStr + "</div>"
print(captionStr)
caption.setHtml(captionStr)
self.scene.addItem(caption)
self.scene.update()
def prevPicItem(self):
if self.photoFileManager.getNumPhotos() == 0:
return None
self.photoFileManager.movePrev()
# print ("Loaded photo", self.sourcePhotoList[self.curPhotoIdx], " w", finalImg.width(), " h", finalImg.height(), " facs", xFactor, yFactor)
self.showImage()
def nextPicItem(self):
if self.photoFileManager.getNumPhotos() == 0:
return None
# print ("Loaded photo", self.sourcePhotoList[self.curPhotoIdx], " w", finalImg.width(), " h", finalImg.height(), " facs", xFactor, yFactor)
self.photoFileManager.moveNext()
self.showImage()
def keyPressEvent(self, event): #QKeyEvent
event.ignore()
# print("keypressStaticPhotos", event.text(), event.key())
| 42.10303 | 219 | 0.632647 |
7955dd5e140c3dffa1de961cf1741b3cdf6ee283 | 132,757 | py | Python | tests/blockchain/test_blockchain.py | Tranzact-Network/tranzact-blockchain | 692362155e46563aa70559123b93bc9379cac111 | [
"Apache-2.0"
] | 8 | 2021-09-19T18:57:49.000Z | 2022-02-09T04:32:50.000Z | tests/blockchain/test_blockchain.py | Tranzact-Network/tranzact-blockchain | 692362155e46563aa70559123b93bc9379cac111 | [
"Apache-2.0"
] | 3 | 2021-09-29T10:56:48.000Z | 2021-11-19T00:09:28.000Z | tests/blockchain/test_blockchain.py | Tranzact-Network/tranzact-blockchain | 692362155e46563aa70559123b93bc9379cac111 | [
"Apache-2.0"
] | null | null | null | # flake8: noqa: F811, F401
import asyncio
import logging
import multiprocessing
import time
from dataclasses import replace
from secrets import token_bytes
from typing import Optional
import pytest
from blspy import AugSchemeMPL, G2Element
from clvm.casts import int_to_bytes
from tranzact.consensus.block_rewards import calculate_base_farmer_reward
from tranzact.consensus.blockchain import ReceiveBlockResult
from tranzact.consensus.coinbase import create_farmer_coin
from tranzact.consensus.pot_iterations import is_overflow_block
from tranzact.full_node.bundle_tools import detect_potential_template_generator
from tranzact.types.blockchain_format.classgroup import ClassgroupElement
from tranzact.types.blockchain_format.coin import Coin
from tranzact.types.blockchain_format.foliage import TransactionsInfo
from tranzact.types.blockchain_format.program import SerializedProgram
from tranzact.types.blockchain_format.sized_bytes import bytes32
from tranzact.types.blockchain_format.slots import InfusedChallengeChainSubSlot
from tranzact.types.blockchain_format.vdf import VDFInfo, VDFProof
from tranzact.types.condition_opcodes import ConditionOpcode
from tranzact.types.condition_with_args import ConditionWithArgs
from tranzact.types.end_of_slot_bundle import EndOfSubSlotBundle
from tranzact.types.full_block import FullBlock
from tranzact.types.generator_types import BlockGenerator
from tranzact.types.spend_bundle import SpendBundle
from tranzact.types.unfinished_block import UnfinishedBlock
from tests.block_tools import create_block_tools_async, get_vdf_info_and_proof
from tranzact.util.errors import Err
from tranzact.util.hash import std_hash
from tranzact.util.ints import uint8, uint64, uint32
from tranzact.util.merkle_set import MerkleSet
from tranzact.util.recursive_replace import recursive_replace
from tests.wallet_tools import WalletTool
from tests.setup_nodes import bt, test_constants
from tests.util.blockchain import create_blockchain
from tests.util.keyring import TempKeyring
from tranzact.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE_HASH,
calculate_synthetic_secret_key,
)
log = logging.getLogger(__name__)
bad_element = ClassgroupElement.from_bytes(b"\x00")
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
class TestGenesisBlock:
@pytest.mark.asyncio
async def test_block_tools_proofs_400(self, default_400_blocks):
vdf, proof = get_vdf_info_and_proof(
test_constants, ClassgroupElement.get_default_element(), test_constants.GENESIS_CHALLENGE, uint64(231)
)
if proof.is_valid(test_constants, ClassgroupElement.get_default_element(), vdf) is False:
raise Exception("invalid proof")
@pytest.mark.asyncio
async def test_block_tools_proofs_1000(self, default_1000_blocks):
vdf, proof = get_vdf_info_and_proof(
test_constants, ClassgroupElement.get_default_element(), test_constants.GENESIS_CHALLENGE, uint64(231)
)
if proof.is_valid(test_constants, ClassgroupElement.get_default_element(), vdf) is False:
raise Exception("invalid proof")
@pytest.mark.asyncio
async def test_block_tools_proofs(self):
vdf, proof = get_vdf_info_and_proof(
test_constants, ClassgroupElement.get_default_element(), test_constants.GENESIS_CHALLENGE, uint64(231)
)
if proof.is_valid(test_constants, ClassgroupElement.get_default_element(), vdf) is False:
raise Exception("invalid proof")
@pytest.mark.asyncio
async def test_non_overflow_genesis(self, empty_blockchain):
assert empty_blockchain.get_peak() is None
genesis = bt.get_consecutive_blocks(1, force_overflow=False)[0]
result, err, _, _ = await empty_blockchain.receive_block(genesis)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
assert empty_blockchain.get_peak().height == 0
@pytest.mark.asyncio
async def test_overflow_genesis(self, empty_blockchain):
genesis = bt.get_consecutive_blocks(1, force_overflow=True)[0]
result, err, _, _ = await empty_blockchain.receive_block(genesis)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_genesis_empty_slots(self, empty_blockchain):
genesis = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=30)[0]
result, err, _, _ = await empty_blockchain.receive_block(genesis)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_overflow_genesis_empty_slots(self, empty_blockchain):
genesis = bt.get_consecutive_blocks(1, force_overflow=True, skip_slots=3)[0]
result, err, _, _ = await empty_blockchain.receive_block(genesis)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_genesis_validate_1(self, empty_blockchain):
genesis = bt.get_consecutive_blocks(1, force_overflow=False)[0]
bad_prev = bytes([1] * 32)
genesis = recursive_replace(genesis, "foliage.prev_block_hash", bad_prev)
result, err, _, _ = await empty_blockchain.receive_block(genesis)
assert err == Err.INVALID_PREV_BLOCK_HASH
class TestBlockHeaderValidation:
@pytest.mark.asyncio
async def test_long_chain(self, empty_blockchain, default_1000_blocks):
blocks = default_1000_blocks
for block in blocks:
if (
len(block.finished_sub_slots) > 0
and block.finished_sub_slots[0].challenge_chain.subepoch_summary_hash is not None
):
# Sub/Epoch. Try using a bad ssi and difficulty to test 2m and 2n
new_finished_ss = recursive_replace(
block.finished_sub_slots[0],
"challenge_chain.new_sub_slot_iters",
uint64(10000000),
)
block_bad = recursive_replace(
block, "finished_sub_slots", [new_finished_ss] + block.finished_sub_slots[1:]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
assert err == Err.INVALID_NEW_SUB_SLOT_ITERS
new_finished_ss_2 = recursive_replace(
block.finished_sub_slots[0],
"challenge_chain.new_difficulty",
uint64(10000000),
)
block_bad_2 = recursive_replace(
block, "finished_sub_slots", [new_finished_ss_2] + block.finished_sub_slots[1:]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad_2)
assert err == Err.INVALID_NEW_DIFFICULTY
# 3c
new_finished_ss_3: EndOfSubSlotBundle = recursive_replace(
block.finished_sub_slots[0],
"challenge_chain.subepoch_summary_hash",
bytes([0] * 32),
)
new_finished_ss_3 = recursive_replace(
new_finished_ss_3,
"reward_chain.challenge_chain_sub_slot_hash",
new_finished_ss_3.challenge_chain.get_hash(),
)
block_bad_3 = recursive_replace(
block, "finished_sub_slots", [new_finished_ss_3] + block.finished_sub_slots[1:]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad_3)
assert err == Err.INVALID_SUB_EPOCH_SUMMARY
# 3d
new_finished_ss_4 = recursive_replace(
block.finished_sub_slots[0],
"challenge_chain.subepoch_summary_hash",
None,
)
new_finished_ss_4 = recursive_replace(
new_finished_ss_4,
"reward_chain.challenge_chain_sub_slot_hash",
new_finished_ss_4.challenge_chain.get_hash(),
)
block_bad_4 = recursive_replace(
block, "finished_sub_slots", [new_finished_ss_4] + block.finished_sub_slots[1:]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad_4)
assert err == Err.INVALID_SUB_EPOCH_SUMMARY or err == Err.INVALID_NEW_SUB_SLOT_ITERS
result, err, _, _ = await empty_blockchain.receive_block(block)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
log.info(
f"Added block {block.height} total iters {block.total_iters} "
f"new slot? {len(block.finished_sub_slots)}"
)
assert empty_blockchain.get_peak().height == len(blocks) - 1
@pytest.mark.asyncio
async def test_unfinished_blocks(self, empty_blockchain):
blockchain = empty_blockchain
blocks = bt.get_consecutive_blocks(3)
for block in blocks[:-1]:
result, err, _, _ = await blockchain.receive_block(block)
assert result == ReceiveBlockResult.NEW_PEAK
block = blocks[-1]
unf = UnfinishedBlock(
block.finished_sub_slots,
block.reward_chain_block.get_unfinished(),
block.challenge_chain_sp_proof,
block.reward_chain_sp_proof,
block.foliage,
block.foliage_transaction_block,
block.transactions_info,
block.transactions_generator,
[],
)
npc_result = None
if unf.transactions_generator is not None:
block_generator: BlockGenerator = await blockchain.get_block_generator(unf)
block_bytes = bytes(unf)
npc_result = await blockchain.run_generator(block_bytes, block_generator)
validate_res = await blockchain.validate_unfinished_block(unf, npc_result, False)
err = validate_res.error
assert err is None
result, err, _, _ = await blockchain.receive_block(block)
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, force_overflow=True)
block = blocks[-1]
unf = UnfinishedBlock(
block.finished_sub_slots,
block.reward_chain_block.get_unfinished(),
block.challenge_chain_sp_proof,
block.reward_chain_sp_proof,
block.foliage,
block.foliage_transaction_block,
block.transactions_info,
block.transactions_generator,
[],
)
npc_result = None
if unf.transactions_generator is not None:
block_generator: BlockGenerator = await blockchain.get_block_generator(unf)
block_bytes = bytes(unf)
npc_result = await blockchain.run_generator(block_bytes, block_generator)
validate_res = await blockchain.validate_unfinished_block(unf, npc_result, False)
assert validate_res.error is None
@pytest.mark.asyncio
async def test_empty_genesis(self, empty_blockchain):
blockchain = empty_blockchain
for block in bt.get_consecutive_blocks(2, skip_slots=3):
result, err, _, _ = await blockchain.receive_block(block)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_empty_slots_non_genesis(self, empty_blockchain):
blockchain = empty_blockchain
blocks = bt.get_consecutive_blocks(10)
for block in blocks:
result, err, _, _ = await blockchain.receive_block(block)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
blocks = bt.get_consecutive_blocks(10, skip_slots=2, block_list_input=blocks)
for block in blocks[10:]:
result, err, _, _ = await blockchain.receive_block(block)
assert err is None
assert blockchain.get_peak().height == 19
@pytest.mark.asyncio
async def test_one_sb_per_slot(self, empty_blockchain):
blockchain = empty_blockchain
num_blocks = 20
blocks = []
for i in range(num_blocks):
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=1)
result, err, _, _ = await blockchain.receive_block(blocks[-1])
assert result == ReceiveBlockResult.NEW_PEAK
assert blockchain.get_peak().height == num_blocks - 1
@pytest.mark.asyncio
async def test_all_overflow(self, empty_blockchain):
blockchain = empty_blockchain
num_rounds = 5
blocks = []
num_blocks = 0
for i in range(1, num_rounds):
num_blocks += i
blocks = bt.get_consecutive_blocks(i, block_list_input=blocks, skip_slots=1, force_overflow=True)
for block in blocks[-i:]:
result, err, _, _ = await blockchain.receive_block(block)
assert result == ReceiveBlockResult.NEW_PEAK
assert err is None
assert blockchain.get_peak().height == num_blocks - 1
@pytest.mark.asyncio
async def test_unf_block_overflow(self, empty_blockchain):
blockchain = empty_blockchain
blocks = []
while True:
# This creates an overflow block, then a normal block, and then an overflow in the next sub-slot
# blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, force_overflow=True)
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, force_overflow=True)
await blockchain.receive_block(blocks[-2])
sb_1 = blockchain.block_record(blocks[-2].header_hash)
sb_2_next_ss = blocks[-1].total_iters - blocks[-2].total_iters < sb_1.sub_slot_iters
# We might not get a normal block for sb_2, and we might not get them in the right slots
# So this while loop keeps trying
if sb_1.overflow and sb_2_next_ss:
block = blocks[-1]
unf = UnfinishedBlock(
[],
block.reward_chain_block.get_unfinished(),
block.challenge_chain_sp_proof,
block.reward_chain_sp_proof,
block.foliage,
block.foliage_transaction_block,
block.transactions_info,
block.transactions_generator,
[],
)
npc_result = None
if block.transactions_generator is not None:
block_generator: BlockGenerator = await blockchain.get_block_generator(unf)
block_bytes = bytes(unf)
npc_result = await blockchain.run_generator(block_bytes, block_generator)
validate_res = await blockchain.validate_unfinished_block(
unf, npc_result, skip_overflow_ss_validation=True
)
assert validate_res.error is None
return None
await blockchain.receive_block(blocks[-1])
@pytest.mark.asyncio
async def test_one_sb_per_two_slots(self, empty_blockchain):
blockchain = empty_blockchain
num_blocks = 20
blocks = []
for i in range(num_blocks): # Same thing, but 2 sub-slots per block
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=2)
result, err, _, _ = await blockchain.receive_block(blocks[-1])
assert result == ReceiveBlockResult.NEW_PEAK
assert blockchain.get_peak().height == num_blocks - 1
@pytest.mark.asyncio
async def test_one_sb_per_five_slots(self, empty_blockchain):
blockchain = empty_blockchain
num_blocks = 10
blocks = []
for i in range(num_blocks): # Same thing, but 5 sub-slots per block
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=5)
result, err, _, _ = await blockchain.receive_block(blocks[-1])
assert result == ReceiveBlockResult.NEW_PEAK
assert blockchain.get_peak().height == num_blocks - 1
@pytest.mark.asyncio
async def test_basic_chain_overflow(self, empty_blockchain):
blocks = bt.get_consecutive_blocks(5, force_overflow=True)
for block in blocks:
result, err, _, _ = await empty_blockchain.receive_block(block)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
assert empty_blockchain.get_peak().height == len(blocks) - 1
@pytest.mark.asyncio
async def test_one_sb_per_two_slots_force_overflow(self, empty_blockchain):
blockchain = empty_blockchain
num_blocks = 10
blocks = []
for i in range(num_blocks):
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=2, force_overflow=True)
result, err, _, _ = await blockchain.receive_block(blocks[-1])
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
assert blockchain.get_peak().height == num_blocks - 1
@pytest.mark.asyncio
async def test_invalid_prev(self, empty_blockchain):
# 1
blocks = bt.get_consecutive_blocks(2, force_overflow=False)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_1_bad = recursive_replace(blocks[-1], "foliage.prev_block_hash", bytes([0] * 32))
result, err, _, _ = await empty_blockchain.receive_block(block_1_bad)
assert result == ReceiveBlockResult.DISCONNECTED_BLOCK
@pytest.mark.asyncio
async def test_invalid_pospace(self, empty_blockchain):
# 2
blocks = bt.get_consecutive_blocks(2, force_overflow=False)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_1_bad = recursive_replace(blocks[-1], "reward_chain_block.proof_of_space.proof", bytes([0] * 32))
result, err, _, _ = await empty_blockchain.receive_block(block_1_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.INVALID_POSPACE
@pytest.mark.asyncio
async def test_invalid_sub_slot_challenge_hash_genesis(self, empty_blockchain):
# 2a
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=1)
new_finished_ss = recursive_replace(
blocks[0].finished_sub_slots[0],
"challenge_chain.challenge_chain_end_of_slot_vdf.challenge",
bytes([2] * 32),
)
block_0_bad = recursive_replace(
blocks[0], "finished_sub_slots", [new_finished_ss] + blocks[0].finished_sub_slots[1:]
)
result, err, _, _ = await empty_blockchain.receive_block(block_0_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
@pytest.mark.asyncio
async def test_invalid_sub_slot_challenge_hash_non_genesis(self, empty_blockchain):
# 2b
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=0)
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=1, block_list_input=blocks)
new_finished_ss = recursive_replace(
blocks[1].finished_sub_slots[0],
"challenge_chain.challenge_chain_end_of_slot_vdf.challenge",
bytes([2] * 32),
)
block_1_bad = recursive_replace(
blocks[1], "finished_sub_slots", [new_finished_ss] + blocks[1].finished_sub_slots[1:]
)
_, _, _, _ = await empty_blockchain.receive_block(blocks[0])
result, err, _, _ = await empty_blockchain.receive_block(block_1_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
@pytest.mark.asyncio
async def test_invalid_sub_slot_challenge_hash_empty_ss(self, empty_blockchain):
# 2c
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=0)
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=2, block_list_input=blocks)
new_finished_ss = recursive_replace(
blocks[1].finished_sub_slots[-1],
"challenge_chain.challenge_chain_end_of_slot_vdf.challenge",
bytes([2] * 32),
)
block_1_bad = recursive_replace(
blocks[1], "finished_sub_slots", blocks[1].finished_sub_slots[:-1] + [new_finished_ss]
)
_, _, _, _ = await empty_blockchain.receive_block(blocks[0])
result, err, _, _ = await empty_blockchain.receive_block(block_1_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
@pytest.mark.asyncio
async def test_genesis_no_icc(self, empty_blockchain):
# 2d
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=1)
new_finished_ss = recursive_replace(
blocks[0].finished_sub_slots[0],
"infused_challenge_chain",
InfusedChallengeChainSubSlot(
VDFInfo(
bytes([0] * 32),
uint64(1200),
ClassgroupElement.get_default_element(),
)
),
)
block_0_bad = recursive_replace(
blocks[0], "finished_sub_slots", [new_finished_ss] + blocks[0].finished_sub_slots[1:]
)
result, err, _, _ = await empty_blockchain.receive_block(block_0_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.SHOULD_NOT_HAVE_ICC
async def do_test_invalid_icc_sub_slot_vdf(self, keychain):
bt_high_iters = await create_block_tools_async(
constants=test_constants.replace(SUB_SLOT_ITERS_STARTING=(2 ** 12), DIFFICULTY_STARTING=(2 ** 14)),
keychain=keychain,
)
bc1, connection, db_path = await create_blockchain(bt_high_iters.constants)
blocks = bt_high_iters.get_consecutive_blocks(10)
for block in blocks:
if len(block.finished_sub_slots) > 0 and block.finished_sub_slots[-1].infused_challenge_chain is not None:
# Bad iters
new_finished_ss = recursive_replace(
block.finished_sub_slots[-1],
"infused_challenge_chain",
InfusedChallengeChainSubSlot(
replace(
block.finished_sub_slots[
-1
].infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf,
number_of_iterations=10000000,
)
),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss]
)
result, err, _, _ = await bc1.receive_block(block_bad)
assert err == Err.INVALID_ICC_EOS_VDF
# Bad output
new_finished_ss_2 = recursive_replace(
block.finished_sub_slots[-1],
"infused_challenge_chain",
InfusedChallengeChainSubSlot(
replace(
block.finished_sub_slots[
-1
].infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf,
output=ClassgroupElement.get_default_element(),
)
),
)
log.warning(f"Proof: {block.finished_sub_slots[-1].proofs}")
block_bad_2 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_2]
)
result, err, _, _ = await bc1.receive_block(block_bad_2)
assert err == Err.INVALID_ICC_EOS_VDF
# Bad challenge hash
new_finished_ss_3 = recursive_replace(
block.finished_sub_slots[-1],
"infused_challenge_chain",
InfusedChallengeChainSubSlot(
replace(
block.finished_sub_slots[
-1
].infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf,
challenge=bytes([0] * 32),
)
),
)
block_bad_3 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_3]
)
result, err, _, _ = await bc1.receive_block(block_bad_3)
assert err == Err.INVALID_ICC_EOS_VDF
# Bad proof
new_finished_ss_5 = recursive_replace(
block.finished_sub_slots[-1],
"proofs.infused_challenge_chain_slot_proof",
VDFProof(uint8(0), b"1239819023890", False),
)
block_bad_5 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_5]
)
result, err, _, _ = await bc1.receive_block(block_bad_5)
assert err == Err.INVALID_ICC_EOS_VDF
result, err, _, _ = await bc1.receive_block(block)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
await connection.close()
bc1.shut_down()
db_path.unlink()
@pytest.mark.asyncio
async def test_invalid_icc_sub_slot_vdf(self):
with TempKeyring() as keychain:
await self.do_test_invalid_icc_sub_slot_vdf(keychain)
@pytest.mark.asyncio
async def test_invalid_icc_into_cc(self, empty_blockchain):
blockchain = empty_blockchain
blocks = bt.get_consecutive_blocks(1)
assert (await blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
case_1, case_2 = False, False
while not case_1 or not case_2:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=1)
block = blocks[-1]
if len(block.finished_sub_slots) > 0 and block.finished_sub_slots[-1].infused_challenge_chain is not None:
if block.finished_sub_slots[-1].reward_chain.deficit == test_constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
# 2g
case_1 = True
new_finished_ss = recursive_replace(
block.finished_sub_slots[-1],
"challenge_chain",
replace(
block.finished_sub_slots[-1].challenge_chain,
infused_challenge_chain_sub_slot_hash=bytes([1] * 32),
),
)
else:
# 2h
case_2 = True
new_finished_ss = recursive_replace(
block.finished_sub_slots[-1],
"challenge_chain",
replace(
block.finished_sub_slots[-1].challenge_chain,
infused_challenge_chain_sub_slot_hash=block.finished_sub_slots[
-1
].infused_challenge_chain.get_hash(),
),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss]
)
result, err, _, _ = await blockchain.receive_block(block_bad)
assert err == Err.INVALID_ICC_HASH_CC
# 2i
new_finished_ss_bad_rc = recursive_replace(
block.finished_sub_slots[-1],
"reward_chain",
replace(block.finished_sub_slots[-1].reward_chain, infused_challenge_chain_sub_slot_hash=None),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_bad_rc]
)
result, err, _, _ = await blockchain.receive_block(block_bad)
assert err == Err.INVALID_ICC_HASH_RC
elif len(block.finished_sub_slots) > 0 and block.finished_sub_slots[-1].infused_challenge_chain is None:
# 2j
new_finished_ss_bad_cc = recursive_replace(
block.finished_sub_slots[-1],
"challenge_chain",
replace(
block.finished_sub_slots[-1].challenge_chain,
infused_challenge_chain_sub_slot_hash=bytes([1] * 32),
),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_bad_cc]
)
result, err, _, _ = await blockchain.receive_block(block_bad)
assert err == Err.INVALID_ICC_HASH_CC
# 2k
new_finished_ss_bad_rc = recursive_replace(
block.finished_sub_slots[-1],
"reward_chain",
replace(
block.finished_sub_slots[-1].reward_chain, infused_challenge_chain_sub_slot_hash=bytes([1] * 32)
),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_bad_rc]
)
result, err, _, _ = await blockchain.receive_block(block_bad)
assert err == Err.INVALID_ICC_HASH_RC
# Finally, add the block properly
result, err, _, _ = await blockchain.receive_block(block)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_empty_slot_no_ses(self, empty_blockchain):
# 2l
blockchain = empty_blockchain
blocks = bt.get_consecutive_blocks(1)
assert (await blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=4)
new_finished_ss = recursive_replace(
blocks[-1].finished_sub_slots[-1],
"challenge_chain",
replace(blocks[-1].finished_sub_slots[-1].challenge_chain, subepoch_summary_hash=std_hash(b"0")),
)
block_bad = recursive_replace(
blocks[-1], "finished_sub_slots", blocks[-1].finished_sub_slots[:-1] + [new_finished_ss]
)
result, err, _, _ = await blockchain.receive_block(block_bad)
assert err == Err.INVALID_SUB_EPOCH_SUMMARY_HASH
@pytest.mark.asyncio
async def test_empty_sub_slots_epoch(self, empty_blockchain):
# 2m
# Tests adding an empty sub slot after the sub-epoch / epoch.
# Also tests overflow block in epoch
blocks_base = bt.get_consecutive_blocks(test_constants.EPOCH_BLOCKS)
blocks_1 = bt.get_consecutive_blocks(1, block_list_input=blocks_base, force_overflow=True)
blocks_2 = bt.get_consecutive_blocks(1, skip_slots=1, block_list_input=blocks_base, force_overflow=True)
blocks_3 = bt.get_consecutive_blocks(1, skip_slots=2, block_list_input=blocks_base, force_overflow=True)
blocks_4 = bt.get_consecutive_blocks(1, block_list_input=blocks_base)
for block in blocks_base:
result, err, _, _ = await empty_blockchain.receive_block(block)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
for block in [blocks_1[-1], blocks_2[-1], blocks_3[-1], blocks_4[-1]]:
result, err, _, _ = await empty_blockchain.receive_block(block)
assert err is None
@pytest.mark.asyncio
async def test_wrong_cc_hash_rc(self, empty_blockchain):
# 2o
blockchain = empty_blockchain
blocks = bt.get_consecutive_blocks(1, skip_slots=1)
blocks = bt.get_consecutive_blocks(1, skip_slots=1, block_list_input=blocks)
assert (await blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
new_finished_ss = recursive_replace(
blocks[-1].finished_sub_slots[-1],
"reward_chain",
replace(blocks[-1].finished_sub_slots[-1].reward_chain, challenge_chain_sub_slot_hash=bytes([3] * 32)),
)
block_1_bad = recursive_replace(
blocks[-1], "finished_sub_slots", blocks[-1].finished_sub_slots[:-1] + [new_finished_ss]
)
result, err, _, _ = await blockchain.receive_block(block_1_bad)
assert result == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.INVALID_CHALLENGE_SLOT_HASH_RC
@pytest.mark.asyncio
async def test_invalid_cc_sub_slot_vdf(self, empty_blockchain):
# 2q
blocks = bt.get_consecutive_blocks(10)
for block in blocks:
if len(block.finished_sub_slots):
# Bad iters
new_finished_ss = recursive_replace(
block.finished_sub_slots[-1],
"challenge_chain",
recursive_replace(
block.finished_sub_slots[-1].challenge_chain,
"challenge_chain_end_of_slot_vdf.number_of_iterations",
uint64(10000000),
),
)
new_finished_ss = recursive_replace(
new_finished_ss,
"reward_chain.challenge_chain_sub_slot_hash",
new_finished_ss.challenge_chain.get_hash(),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
assert err == Err.INVALID_CC_EOS_VDF
# Bad output
new_finished_ss_2 = recursive_replace(
block.finished_sub_slots[-1],
"challenge_chain",
recursive_replace(
block.finished_sub_slots[-1].challenge_chain,
"challenge_chain_end_of_slot_vdf.output",
ClassgroupElement.get_default_element(),
),
)
new_finished_ss_2 = recursive_replace(
new_finished_ss_2,
"reward_chain.challenge_chain_sub_slot_hash",
new_finished_ss_2.challenge_chain.get_hash(),
)
block_bad_2 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_2]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad_2)
assert err == Err.INVALID_CC_EOS_VDF
# Bad challenge hash
new_finished_ss_3 = recursive_replace(
block.finished_sub_slots[-1],
"challenge_chain",
recursive_replace(
block.finished_sub_slots[-1].challenge_chain,
"challenge_chain_end_of_slot_vdf.challenge",
bytes([1] * 32),
),
)
new_finished_ss_3 = recursive_replace(
new_finished_ss_3,
"reward_chain.challenge_chain_sub_slot_hash",
new_finished_ss_3.challenge_chain.get_hash(),
)
block_bad_3 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_3]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad_3)
assert err == Err.INVALID_CC_EOS_VDF or err == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
# Bad proof
new_finished_ss_5 = recursive_replace(
block.finished_sub_slots[-1],
"proofs.challenge_chain_slot_proof",
VDFProof(uint8(0), b"1239819023890", False),
)
block_bad_5 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_5]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad_5)
assert err == Err.INVALID_CC_EOS_VDF
result, err, _, _ = await empty_blockchain.receive_block(block)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_invalid_rc_sub_slot_vdf(self, empty_blockchain):
# 2p
blocks = bt.get_consecutive_blocks(10)
for block in blocks:
if len(block.finished_sub_slots):
# Bad iters
new_finished_ss = recursive_replace(
block.finished_sub_slots[-1],
"reward_chain",
recursive_replace(
block.finished_sub_slots[-1].reward_chain,
"end_of_slot_vdf.number_of_iterations",
uint64(10000000),
),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
assert err == Err.INVALID_RC_EOS_VDF
# Bad output
new_finished_ss_2 = recursive_replace(
block.finished_sub_slots[-1],
"reward_chain",
recursive_replace(
block.finished_sub_slots[-1].reward_chain,
"end_of_slot_vdf.output",
ClassgroupElement.get_default_element(),
),
)
block_bad_2 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_2]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad_2)
assert err == Err.INVALID_RC_EOS_VDF
# Bad challenge hash
new_finished_ss_3 = recursive_replace(
block.finished_sub_slots[-1],
"reward_chain",
recursive_replace(
block.finished_sub_slots[-1].reward_chain,
"end_of_slot_vdf.challenge",
bytes([1] * 32),
),
)
block_bad_3 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_3]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad_3)
assert err == Err.INVALID_RC_EOS_VDF
# Bad proof
new_finished_ss_5 = recursive_replace(
block.finished_sub_slots[-1],
"proofs.reward_chain_slot_proof",
VDFProof(uint8(0), b"1239819023890", False),
)
block_bad_5 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_5]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad_5)
assert err == Err.INVALID_RC_EOS_VDF
result, err, _, _ = await empty_blockchain.receive_block(block)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_genesis_bad_deficit(self, empty_blockchain):
# 2r
block = bt.get_consecutive_blocks(1, skip_slots=2)[0]
new_finished_ss = recursive_replace(
block.finished_sub_slots[-1],
"reward_chain",
recursive_replace(
block.finished_sub_slots[-1].reward_chain,
"deficit",
test_constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1,
),
)
block_bad = recursive_replace(block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss])
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
assert err == Err.INVALID_DEFICIT
@pytest.mark.asyncio
async def test_reset_deficit(self, empty_blockchain):
# 2s, 2t
blockchain = empty_blockchain
blocks = bt.get_consecutive_blocks(2)
await empty_blockchain.receive_block(blocks[0])
await empty_blockchain.receive_block(blocks[1])
case_1, case_2 = False, False
while not case_1 or not case_2:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=1)
if len(blocks[-1].finished_sub_slots) > 0:
new_finished_ss = recursive_replace(
blocks[-1].finished_sub_slots[-1],
"reward_chain",
recursive_replace(
blocks[-1].finished_sub_slots[-1].reward_chain,
"deficit",
uint8(0),
),
)
if blockchain.block_record(blocks[-2].header_hash).deficit == 0:
case_1 = True
else:
case_2 = True
block_bad = recursive_replace(
blocks[-1], "finished_sub_slots", blocks[-1].finished_sub_slots[:-1] + [new_finished_ss]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
assert err == Err.INVALID_DEFICIT or err == Err.INVALID_ICC_HASH_CC
result, err, _, _ = await empty_blockchain.receive_block(blocks[-1])
assert result == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_genesis_has_ses(self, empty_blockchain):
# 3a
block = bt.get_consecutive_blocks(1, skip_slots=1)[0]
new_finished_ss = recursive_replace(
block.finished_sub_slots[0],
"challenge_chain",
recursive_replace(
block.finished_sub_slots[0].challenge_chain,
"subepoch_summary_hash",
bytes([0] * 32),
),
)
new_finished_ss = recursive_replace(
new_finished_ss,
"reward_chain",
replace(
new_finished_ss.reward_chain, challenge_chain_sub_slot_hash=new_finished_ss.challenge_chain.get_hash()
),
)
block_bad = recursive_replace(block, "finished_sub_slots", [new_finished_ss] + block.finished_sub_slots[1:])
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
assert err == Err.INVALID_SUB_EPOCH_SUMMARY_HASH
@pytest.mark.asyncio
async def test_no_ses_if_no_se(self, empty_blockchain):
# 3b
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
while True:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if len(blocks[-1].finished_sub_slots) > 0:
new_finished_ss: EndOfSubSlotBundle = recursive_replace(
blocks[-1].finished_sub_slots[0],
"challenge_chain",
recursive_replace(
blocks[-1].finished_sub_slots[0].challenge_chain,
"subepoch_summary_hash",
bytes([0] * 32),
),
)
new_finished_ss = recursive_replace(
new_finished_ss,
"reward_chain",
replace(
new_finished_ss.reward_chain,
challenge_chain_sub_slot_hash=new_finished_ss.challenge_chain.get_hash(),
),
)
block_bad = recursive_replace(
blocks[-1], "finished_sub_slots", [new_finished_ss] + blocks[-1].finished_sub_slots[1:]
)
result, err, _, _ = await empty_blockchain.receive_block(block_bad)
assert err == Err.INVALID_SUB_EPOCH_SUMMARY_HASH
return None
await empty_blockchain.receive_block(blocks[-1])
@pytest.mark.asyncio
async def test_too_many_blocks(self, empty_blockchain):
# 4: TODO
pass
@pytest.mark.asyncio
async def test_bad_pos(self, empty_blockchain):
# 5
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad = recursive_replace(blocks[-1], "reward_chain_block.proof_of_space.challenge", std_hash(b""))
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_POSPACE
block_bad = recursive_replace(
blocks[-1], "reward_chain_block.proof_of_space.pool_contract_puzzle_hash", std_hash(b"")
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_POSPACE
block_bad = recursive_replace(blocks[-1], "reward_chain_block.proof_of_space.size", 62)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_POSPACE
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.proof_of_space.plot_public_key",
AugSchemeMPL.key_gen(std_hash(b"1231n")).get_g1(),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_POSPACE
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.proof_of_space.size",
32,
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_POSPACE
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.proof_of_space.proof",
bytes([1] * int(blocks[-1].reward_chain_block.proof_of_space.size * 64 / 8)),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_POSPACE
# TODO: test not passing the plot filter
@pytest.mark.asyncio
async def test_bad_signage_point_index(self, empty_blockchain):
# 6
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
with pytest.raises(ValueError):
block_bad = recursive_replace(
blocks[-1], "reward_chain_block.signage_point_index", test_constants.NUM_SPS_SUB_SLOT
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_SP_INDEX
with pytest.raises(ValueError):
block_bad = recursive_replace(
blocks[-1], "reward_chain_block.signage_point_index", test_constants.NUM_SPS_SUB_SLOT + 1
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_SP_INDEX
@pytest.mark.asyncio
async def test_sp_0_no_sp(self, empty_blockchain):
# 7
blocks = []
case_1, case_2 = False, False
while not case_1 or not case_2:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if blocks[-1].reward_chain_block.signage_point_index == 0:
case_1 = True
block_bad = recursive_replace(blocks[-1], "reward_chain_block.signage_point_index", uint8(1))
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_SP_INDEX
elif not is_overflow_block(test_constants, blocks[-1].reward_chain_block.signage_point_index):
case_2 = True
block_bad = recursive_replace(blocks[-1], "reward_chain_block.signage_point_index", uint8(0))
error_code = (await empty_blockchain.receive_block(block_bad))[1]
assert error_code == Err.INVALID_SP_INDEX or error_code == Err.INVALID_POSPACE
assert (await empty_blockchain.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_epoch_overflows(self, empty_blockchain):
# 9. TODO. This is hard to test because it requires modifying the block tools to make these special blocks
pass
@pytest.mark.asyncio
async def test_bad_total_iters(self, empty_blockchain):
# 10
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad = recursive_replace(
blocks[-1], "reward_chain_block.total_iters", blocks[-1].reward_chain_block.total_iters + 1
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_TOTAL_ITERS
@pytest.mark.asyncio
async def test_bad_rc_sp_vdf(self, empty_blockchain):
# 11
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
while True:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if blocks[-1].reward_chain_block.signage_point_index != 0:
block_bad = recursive_replace(
blocks[-1], "reward_chain_block.reward_chain_sp_vdf.challenge", std_hash(b"1")
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_RC_SP_VDF
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.reward_chain_sp_vdf.output",
bad_element,
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_RC_SP_VDF
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.reward_chain_sp_vdf.number_of_iterations",
uint64(1111111111111),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_RC_SP_VDF
block_bad = recursive_replace(
blocks[-1],
"reward_chain_sp_proof",
VDFProof(uint8(0), std_hash(b""), False),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_RC_SP_VDF
return None
assert (await empty_blockchain.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_bad_rc_sp_sig(self, empty_blockchain):
# 12
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad = recursive_replace(blocks[-1], "reward_chain_block.reward_chain_sp_signature", G2Element.generator())
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_RC_SIGNATURE
@pytest.mark.asyncio
async def test_bad_cc_sp_vdf(self, empty_blockchain):
# 13. Note: does not validate fully due to proof of space being validated first
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
while True:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if blocks[-1].reward_chain_block.signage_point_index != 0:
block_bad = recursive_replace(
blocks[-1], "reward_chain_block.challenge_chain_sp_vdf.challenge", std_hash(b"1")
)
assert (await empty_blockchain.receive_block(block_bad))[0] == ReceiveBlockResult.INVALID_BLOCK
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.challenge_chain_sp_vdf.output",
bad_element,
)
assert (await empty_blockchain.receive_block(block_bad))[0] == ReceiveBlockResult.INVALID_BLOCK
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.challenge_chain_sp_vdf.number_of_iterations",
uint64(1111111111111),
)
assert (await empty_blockchain.receive_block(block_bad))[0] == ReceiveBlockResult.INVALID_BLOCK
block_bad = recursive_replace(
blocks[-1],
"challenge_chain_sp_proof",
VDFProof(uint8(0), std_hash(b""), False),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_CC_SP_VDF
return None
assert (await empty_blockchain.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_bad_cc_sp_sig(self, empty_blockchain):
# 14
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad = recursive_replace(
blocks[-1], "reward_chain_block.challenge_chain_sp_signature", G2Element.generator()
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_CC_SIGNATURE
@pytest.mark.asyncio
async def test_is_transaction_block(self, empty_blockchain):
# 15: TODO
pass
@pytest.mark.asyncio
async def test_bad_foliage_sb_sig(self, empty_blockchain):
# 16
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad = recursive_replace(blocks[-1], "foliage.foliage_block_data_signature", G2Element.generator())
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_PLOT_SIGNATURE
@pytest.mark.asyncio
async def test_bad_foliage_transaction_block_sig(self, empty_blockchain):
# 17
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
while True:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if blocks[-1].foliage_transaction_block is not None:
block_bad = recursive_replace(
blocks[-1], "foliage.foliage_transaction_block_signature", G2Element.generator()
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_PLOT_SIGNATURE
return None
assert (await empty_blockchain.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_unfinished_reward_chain_sb_hash(self, empty_blockchain):
# 18
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad: FullBlock = recursive_replace(
blocks[-1], "foliage.foliage_block_data.unfinished_reward_block_hash", std_hash(b"2")
)
new_m = block_bad.foliage.foliage_block_data.get_hash()
new_fsb_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_block_data_signature", new_fsb_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_URSB_HASH
@pytest.mark.asyncio
async def test_pool_target_height(self, empty_blockchain):
# 19
blocks = bt.get_consecutive_blocks(3)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await empty_blockchain.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad: FullBlock = recursive_replace(blocks[-1], "foliage.foliage_block_data.pool_target.max_height", 1)
new_m = block_bad.foliage.foliage_block_data.get_hash()
new_fsb_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_block_data_signature", new_fsb_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.OLD_POOL_TARGET
@pytest.mark.asyncio
async def test_pool_target_pre_farm(self, empty_blockchain):
# 20a
blocks = bt.get_consecutive_blocks(1)
block_bad: FullBlock = recursive_replace(
blocks[-1], "foliage.foliage_block_data.pool_target.puzzle_hash", std_hash(b"12")
)
new_m = block_bad.foliage.foliage_block_data.get_hash()
new_fsb_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_block_data_signature", new_fsb_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_PREFARM
@pytest.mark.asyncio
async def test_pool_target_signature(self, empty_blockchain):
# 20b
blocks_initial = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks_initial[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await empty_blockchain.receive_block(blocks_initial[1]))[0] == ReceiveBlockResult.NEW_PEAK
attempts = 0
while True:
# Go until we get a block that has a pool pk, as opposed to a pool contract
blocks = bt.get_consecutive_blocks(
1, blocks_initial, seed=std_hash(attempts.to_bytes(4, byteorder="big", signed=False))
)
if blocks[-1].foliage.foliage_block_data.pool_signature is not None:
block_bad: FullBlock = recursive_replace(
blocks[-1], "foliage.foliage_block_data.pool_signature", G2Element.generator()
)
new_m = block_bad.foliage.foliage_block_data.get_hash()
new_fsb_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_block_data_signature", new_fsb_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_POOL_SIGNATURE
return None
attempts += 1
@pytest.mark.asyncio
async def test_pool_target_contract(self, empty_blockchain):
# 20c invalid pool target with contract
blocks_initial = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks_initial[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await empty_blockchain.receive_block(blocks_initial[1]))[0] == ReceiveBlockResult.NEW_PEAK
attempts = 0
while True:
# Go until we get a block that has a pool contract opposed to a pool pk
blocks = bt.get_consecutive_blocks(
1, blocks_initial, seed=std_hash(attempts.to_bytes(4, byteorder="big", signed=False))
)
if blocks[-1].foliage.foliage_block_data.pool_signature is None:
block_bad: FullBlock = recursive_replace(
blocks[-1], "foliage.foliage_block_data.pool_target.puzzle_hash", bytes32(token_bytes(32))
)
new_m = block_bad.foliage.foliage_block_data.get_hash()
new_fsb_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_block_data_signature", new_fsb_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_POOL_TARGET
return None
attempts += 1
@pytest.mark.asyncio
async def test_foliage_data_presence(self, empty_blockchain):
# 22
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
case_1, case_2 = False, False
while not case_1 or not case_2:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if blocks[-1].foliage_transaction_block is not None:
case_1 = True
block_bad: FullBlock = recursive_replace(blocks[-1], "foliage.foliage_transaction_block_hash", None)
else:
case_2 = True
block_bad: FullBlock = recursive_replace(
blocks[-1], "foliage.foliage_transaction_block_hash", std_hash(b"")
)
err_code = (await empty_blockchain.receive_block(block_bad))[1]
assert err_code == Err.INVALID_FOLIAGE_BLOCK_PRESENCE or err_code == Err.INVALID_IS_TRANSACTION_BLOCK
await empty_blockchain.receive_block(blocks[-1])
@pytest.mark.asyncio
async def test_foliage_transaction_block_hash(self, empty_blockchain):
# 23
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
case_1, case_2 = False, False
while not case_1 or not case_2:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if blocks[-1].foliage_transaction_block is not None:
block_bad: FullBlock = recursive_replace(
blocks[-1], "foliage.foliage_transaction_block_hash", std_hash(b"2")
)
new_m = block_bad.foliage.foliage_transaction_block_hash
new_fbh_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_FOLIAGE_BLOCK_HASH
return None
await empty_blockchain.receive_block(blocks[-1])
@pytest.mark.asyncio
async def test_genesis_bad_prev_block(self, empty_blockchain):
# 24a
blocks = bt.get_consecutive_blocks(1)
block_bad: FullBlock = recursive_replace(
blocks[-1], "foliage_transaction_block.prev_transaction_block_hash", std_hash(b"2")
)
block_bad: FullBlock = recursive_replace(
block_bad, "foliage.foliage_transaction_block_hash", block_bad.foliage_transaction_block.get_hash()
)
new_m = block_bad.foliage.foliage_transaction_block_hash
new_fbh_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_PREV_BLOCK_HASH
@pytest.mark.asyncio
async def test_bad_prev_block_non_genesis(self, empty_blockchain):
# 24b
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
while True:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if blocks[-1].foliage_transaction_block is not None:
block_bad: FullBlock = recursive_replace(
blocks[-1], "foliage_transaction_block.prev_transaction_block_hash", std_hash(b"2")
)
block_bad: FullBlock = recursive_replace(
block_bad, "foliage.foliage_transaction_block_hash", block_bad.foliage_transaction_block.get_hash()
)
new_m = block_bad.foliage.foliage_transaction_block_hash
new_fbh_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_PREV_BLOCK_HASH
return None
await empty_blockchain.receive_block(blocks[-1])
@pytest.mark.asyncio
async def test_bad_filter_hash(self, empty_blockchain):
# 25
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
while True:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if blocks[-1].foliage_transaction_block is not None:
block_bad: FullBlock = recursive_replace(
blocks[-1], "foliage_transaction_block.filter_hash", std_hash(b"2")
)
block_bad: FullBlock = recursive_replace(
block_bad, "foliage.foliage_transaction_block_hash", block_bad.foliage_transaction_block.get_hash()
)
new_m = block_bad.foliage.foliage_transaction_block_hash
new_fbh_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_TRANSACTIONS_FILTER_HASH
return None
await empty_blockchain.receive_block(blocks[-1])
@pytest.mark.asyncio
async def test_bad_timestamp(self, empty_blockchain):
# 26
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
while True:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if blocks[-1].foliage_transaction_block is not None:
block_bad: FullBlock = recursive_replace(
blocks[-1],
"foliage_transaction_block.timestamp",
blocks[0].foliage_transaction_block.timestamp - 10,
)
block_bad: FullBlock = recursive_replace(
block_bad, "foliage.foliage_transaction_block_hash", block_bad.foliage_transaction_block.get_hash()
)
new_m = block_bad.foliage.foliage_transaction_block_hash
new_fbh_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.TIMESTAMP_TOO_FAR_IN_PAST
block_bad: FullBlock = recursive_replace(
blocks[-1],
"foliage_transaction_block.timestamp",
blocks[0].foliage_transaction_block.timestamp,
)
block_bad: FullBlock = recursive_replace(
block_bad, "foliage.foliage_transaction_block_hash", block_bad.foliage_transaction_block.get_hash()
)
new_m = block_bad.foliage.foliage_transaction_block_hash
new_fbh_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.TIMESTAMP_TOO_FAR_IN_PAST
block_bad: FullBlock = recursive_replace(
blocks[-1],
"foliage_transaction_block.timestamp",
blocks[0].foliage_transaction_block.timestamp + 10000000,
)
block_bad: FullBlock = recursive_replace(
block_bad, "foliage.foliage_transaction_block_hash", block_bad.foliage_transaction_block.get_hash()
)
new_m = block_bad.foliage.foliage_transaction_block_hash
new_fbh_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.TIMESTAMP_TOO_FAR_IN_FUTURE
return None
await empty_blockchain.receive_block(blocks[-1])
@pytest.mark.asyncio
async def test_height(self, empty_blockchain):
# 27
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad: FullBlock = recursive_replace(blocks[-1], "reward_chain_block.height", 2)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_HEIGHT
@pytest.mark.asyncio
async def test_height_genesis(self, empty_blockchain):
# 27
blocks = bt.get_consecutive_blocks(1)
block_bad: FullBlock = recursive_replace(blocks[-1], "reward_chain_block.height", 1)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_PREV_BLOCK_HASH
@pytest.mark.asyncio
async def test_weight(self, empty_blockchain):
# 28
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad: FullBlock = recursive_replace(blocks[-1], "reward_chain_block.weight", 22131)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_WEIGHT
@pytest.mark.asyncio
async def test_weight_genesis(self, empty_blockchain):
# 28
blocks = bt.get_consecutive_blocks(1)
block_bad: FullBlock = recursive_replace(blocks[-1], "reward_chain_block.weight", 0)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_WEIGHT
@pytest.mark.asyncio
async def test_bad_cc_ip_vdf(self, empty_blockchain):
# 29
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
block_bad = recursive_replace(blocks[-1], "reward_chain_block.challenge_chain_ip_vdf.challenge", std_hash(b"1"))
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_CC_IP_VDF
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.challenge_chain_ip_vdf.output",
bad_element,
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_CC_IP_VDF
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.challenge_chain_ip_vdf.number_of_iterations",
uint64(1111111111111),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_CC_IP_VDF
block_bad = recursive_replace(
blocks[-1],
"challenge_chain_ip_proof",
VDFProof(uint8(0), std_hash(b""), False),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_CC_IP_VDF
@pytest.mark.asyncio
async def test_bad_rc_ip_vdf(self, empty_blockchain):
# 30
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
block_bad = recursive_replace(blocks[-1], "reward_chain_block.reward_chain_ip_vdf.challenge", std_hash(b"1"))
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_RC_IP_VDF
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.reward_chain_ip_vdf.output",
bad_element,
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_RC_IP_VDF
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.reward_chain_ip_vdf.number_of_iterations",
uint64(1111111111111),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_RC_IP_VDF
block_bad = recursive_replace(
blocks[-1],
"reward_chain_ip_proof",
VDFProof(uint8(0), std_hash(b""), False),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_RC_IP_VDF
@pytest.mark.asyncio
async def test_bad_icc_ip_vdf(self, empty_blockchain):
# 31
blocks = bt.get_consecutive_blocks(1)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
block_bad = recursive_replace(
blocks[-1], "reward_chain_block.infused_challenge_chain_ip_vdf.challenge", std_hash(b"1")
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_ICC_VDF
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_ICC_VDF
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.infused_challenge_chain_ip_vdf.output",
bad_element,
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_ICC_VDF
block_bad = recursive_replace(
blocks[-1],
"reward_chain_block.infused_challenge_chain_ip_vdf.number_of_iterations",
uint64(1111111111111),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_ICC_VDF
block_bad = recursive_replace(
blocks[-1],
"infused_challenge_chain_ip_proof",
VDFProof(uint8(0), std_hash(b""), False),
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_ICC_VDF
@pytest.mark.asyncio
async def test_reward_block_hash(self, empty_blockchain):
# 32
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad: FullBlock = recursive_replace(blocks[-1], "foliage.reward_block_hash", std_hash(b""))
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_REWARD_BLOCK_HASH
@pytest.mark.asyncio
async def test_reward_block_hash_2(self, empty_blockchain):
# 33
blocks = bt.get_consecutive_blocks(1)
block_bad: FullBlock = recursive_replace(blocks[0], "reward_chain_block.is_transaction_block", False)
block_bad: FullBlock = recursive_replace(
block_bad, "foliage.reward_block_hash", block_bad.reward_chain_block.get_hash()
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_FOLIAGE_BLOCK_PRESENCE
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
# Test one which should not be a tx block
while True:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
if not blocks[-1].is_transaction_block():
block_bad: FullBlock = recursive_replace(blocks[-1], "reward_chain_block.is_transaction_block", True)
block_bad: FullBlock = recursive_replace(
block_bad, "foliage.reward_block_hash", block_bad.reward_chain_block.get_hash()
)
assert (await empty_blockchain.receive_block(block_bad))[1] == Err.INVALID_FOLIAGE_BLOCK_PRESENCE
return None
assert (await empty_blockchain.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
class TestPreValidation:
@pytest.mark.asyncio
async def test_pre_validation_fails_bad_blocks(self, empty_blockchain):
blocks = bt.get_consecutive_blocks(2)
assert (await empty_blockchain.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block_bad = recursive_replace(
blocks[-1], "reward_chain_block.total_iters", blocks[-1].reward_chain_block.total_iters + 1
)
res = await empty_blockchain.pre_validate_blocks_multiprocessing([blocks[0], block_bad], {})
assert res[0].error is None
assert res[1].error is not None
@pytest.mark.asyncio
async def test_pre_validation(self, empty_blockchain, default_1000_blocks):
blocks = default_1000_blocks[:100]
start = time.time()
n_at_a_time = min(multiprocessing.cpu_count(), 32)
times_pv = []
times_rb = []
for i in range(0, len(blocks), n_at_a_time):
end_i = min(i + n_at_a_time, len(blocks))
blocks_to_validate = blocks[i:end_i]
start_pv = time.time()
res = await empty_blockchain.pre_validate_blocks_multiprocessing(blocks_to_validate, {})
end_pv = time.time()
times_pv.append(end_pv - start_pv)
assert res is not None
for n in range(end_i - i):
assert res[n] is not None
assert res[n].error is None
block = blocks_to_validate[n]
start_rb = time.time()
result, err, _, _ = await empty_blockchain.receive_block(block, res[n])
end_rb = time.time()
times_rb.append(end_rb - start_rb)
assert err is None
assert result == ReceiveBlockResult.NEW_PEAK
log.info(
f"Added block {block.height} total iters {block.total_iters} "
f"new slot? {len(block.finished_sub_slots)}, time {end_rb - start_rb}"
)
end = time.time()
log.info(f"Total time: {end - start} seconds")
log.info(f"Average pv: {sum(times_pv)/(len(blocks)/n_at_a_time)}")
log.info(f"Average rb: {sum(times_rb)/(len(blocks))}")
class TestBodyValidation:
@pytest.mark.asyncio
@pytest.mark.parametrize("opcode", [ConditionOpcode.AGG_SIG_ME, ConditionOpcode.AGG_SIG_UNSAFE])
@pytest.mark.parametrize(
"with_garbage,expected",
[
(True, (ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_CONDITION, None)),
(False, (ReceiveBlockResult.NEW_PEAK, None, 2)),
],
)
async def test_aggsig_garbage(self, empty_blockchain, opcode, with_garbage, expected):
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
genesis_timestamp=10000,
time_per_block=10,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx1: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
coin1: Coin = tx1.additions()[0]
secret_key = wt.get_private_key_for_puzzle_hash(coin1.puzzle_hash)
synthetic_secret_key = calculate_synthetic_secret_key(secret_key, DEFAULT_HIDDEN_PUZZLE_HASH)
public_key = synthetic_secret_key.get_g1()
args = [public_key, b"msg"]
if with_garbage:
args.append(b"garbage")
conditions = {opcode: [ConditionWithArgs(opcode, args)]}
tx2: SpendBundle = wt.generate_signed_transaction(10, wt.get_new_puzzlehash(), coin1, condition_dic=conditions)
assert coin1 in tx2.removals()
coin2: Coin = tx2.additions()[0]
bundles = SpendBundle.aggregate([tx1, tx2])
blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
guarantee_transaction_block=True,
transaction_data=bundles,
time_per_block=10,
)
assert (await b.receive_block(blocks[-1]))[0:-1] == expected
@pytest.mark.asyncio
@pytest.mark.parametrize(
"opcode,lock_value,expected",
[
(ConditionOpcode.ASSERT_SECONDS_RELATIVE, -2, ReceiveBlockResult.NEW_PEAK),
(ConditionOpcode.ASSERT_SECONDS_RELATIVE, -1, ReceiveBlockResult.NEW_PEAK),
(ConditionOpcode.ASSERT_SECONDS_RELATIVE, 0, ReceiveBlockResult.NEW_PEAK),
(ConditionOpcode.ASSERT_SECONDS_RELATIVE, 1, ReceiveBlockResult.INVALID_BLOCK),
(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, -2, ReceiveBlockResult.NEW_PEAK),
(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, -1, ReceiveBlockResult.NEW_PEAK),
(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, 0, ReceiveBlockResult.INVALID_BLOCK),
(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, 1, ReceiveBlockResult.INVALID_BLOCK),
(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, 2, ReceiveBlockResult.NEW_PEAK),
(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, 3, ReceiveBlockResult.INVALID_BLOCK),
(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, 4, ReceiveBlockResult.INVALID_BLOCK),
# genesis timestamp is 10000 and each block is 10 seconds
(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, 10029, ReceiveBlockResult.NEW_PEAK),
(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, 10030, ReceiveBlockResult.NEW_PEAK),
(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, 10031, ReceiveBlockResult.INVALID_BLOCK),
(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, 10032, ReceiveBlockResult.INVALID_BLOCK),
],
)
async def test_ephemeral_timelock(self, empty_blockchain, opcode, lock_value, expected):
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
genesis_timestamp=10000,
time_per_block=10,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
conditions = {opcode: [ConditionWithArgs(opcode, [int_to_bytes(lock_value)])]}
tx1: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
coin1: Coin = tx1.additions()[0]
tx2: SpendBundle = wt.generate_signed_transaction(10, wt.get_new_puzzlehash(), coin1, condition_dic=conditions)
assert coin1 in tx2.removals()
coin2: Coin = tx2.additions()[0]
bundles = SpendBundle.aggregate([tx1, tx2])
blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
guarantee_transaction_block=True,
transaction_data=bundles,
time_per_block=10,
)
assert (await b.receive_block(blocks[-1]))[0] == expected
if expected == ReceiveBlockResult.NEW_PEAK:
# ensure coin1 was in fact spent
c = await b.coin_store.get_coin_record(coin1.name())
assert c is not None and c.spent
# ensure coin2 was NOT spent
c = await b.coin_store.get_coin_record(coin2.name())
assert c is not None and not c.spent
@pytest.mark.asyncio
async def test_not_tx_block_but_has_data(self, empty_blockchain):
# 1
b = empty_blockchain
blocks = bt.get_consecutive_blocks(1)
while blocks[-1].foliage_transaction_block is not None:
assert (await b.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
original_block: FullBlock = blocks[-1]
block = recursive_replace(original_block, "transactions_generator", SerializedProgram())
assert (await b.receive_block(block))[1] == Err.NOT_BLOCK_BUT_HAS_DATA
h = std_hash(b"")
i = uint64(1)
block = recursive_replace(
original_block,
"transactions_info",
TransactionsInfo(h, h, G2Element(), uint64(1), uint64(1), []),
)
assert (await b.receive_block(block))[1] == Err.NOT_BLOCK_BUT_HAS_DATA
block = recursive_replace(original_block, "transactions_generator_ref_list", [i])
assert (await b.receive_block(block))[1] == Err.NOT_BLOCK_BUT_HAS_DATA
@pytest.mark.asyncio
async def test_tx_block_missing_data(self, empty_blockchain):
# 2
b = empty_blockchain
blocks = bt.get_consecutive_blocks(2, guarantee_transaction_block=True)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block = recursive_replace(
blocks[-1],
"foliage_transaction_block",
None,
)
err = (await b.receive_block(block))[1]
assert err == Err.IS_TRANSACTION_BLOCK_BUT_NO_DATA or err == Err.INVALID_FOLIAGE_BLOCK_PRESENCE
block = recursive_replace(
blocks[-1],
"transactions_info",
None,
)
try:
err = (await b.receive_block(block))[1]
except AssertionError:
return None
assert err == Err.IS_TRANSACTION_BLOCK_BUT_NO_DATA or err == Err.INVALID_FOLIAGE_BLOCK_PRESENCE
@pytest.mark.asyncio
async def test_invalid_transactions_info_hash(self, empty_blockchain):
# 3
b = empty_blockchain
blocks = bt.get_consecutive_blocks(2, guarantee_transaction_block=True)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
h = std_hash(b"")
block = recursive_replace(
blocks[-1],
"foliage_transaction_block.transactions_info_hash",
h,
)
block = recursive_replace(
block, "foliage.foliage_transaction_block_hash", std_hash(block.foliage_transaction_block)
)
new_m = block.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block = recursive_replace(block, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block))[1]
assert err == Err.INVALID_TRANSACTIONS_INFO_HASH
@pytest.mark.asyncio
async def test_invalid_transactions_block_hash(self, empty_blockchain):
# 4
b = empty_blockchain
blocks = bt.get_consecutive_blocks(2, guarantee_transaction_block=True)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
h = std_hash(b"")
block = recursive_replace(blocks[-1], "foliage.foliage_transaction_block_hash", h)
new_m = block.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
block = recursive_replace(block, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block))[1]
assert err == Err.INVALID_FOLIAGE_BLOCK_HASH
@pytest.mark.asyncio
async def test_invalid_reward_claims(self, empty_blockchain):
# 5
b = empty_blockchain
blocks = bt.get_consecutive_blocks(2, guarantee_transaction_block=True)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
block: FullBlock = blocks[-1]
# Too few
assert block.transactions_info
too_few_reward_claims = block.transactions_info.reward_claims_incorporated[:-1]
block_2: FullBlock = recursive_replace(
block, "transactions_info.reward_claims_incorporated", too_few_reward_claims
)
assert block_2.transactions_info
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
assert block_2.foliage_transaction_block
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_REWARD_COINS
# Too many
h = std_hash(b"")
too_many_reward_claims = block.transactions_info.reward_claims_incorporated + [
Coin(h, h, too_few_reward_claims[0].amount)
]
block_2 = recursive_replace(block, "transactions_info.reward_claims_incorporated", too_many_reward_claims)
assert block_2.transactions_info
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
assert block_2.foliage_transaction_block
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_REWARD_COINS
# Duplicates
duplicate_reward_claims = block.transactions_info.reward_claims_incorporated + [
block.transactions_info.reward_claims_incorporated[-1]
]
block_2 = recursive_replace(block, "transactions_info.reward_claims_incorporated", duplicate_reward_claims)
assert block_2.transactions_info
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
assert block_2.foliage_transaction_block
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_REWARD_COINS
@pytest.mark.asyncio
async def test_invalid_transactions_generator_hash(self, empty_blockchain):
# 7
b = empty_blockchain
blocks = bt.get_consecutive_blocks(2, guarantee_transaction_block=True)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
# No tx should have all zeroes
block: FullBlock = blocks[-1]
block_2 = recursive_replace(block, "transactions_info.generator_root", bytes([1] * 32))
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_TRANSACTIONS_GENERATOR_HASH
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
blocks = bt.get_consecutive_blocks(
2,
block_list_input=blocks,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[3]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
# Non empty generator hash must be correct
block = blocks[-1]
block_2 = recursive_replace(block, "transactions_info.generator_root", bytes([0] * 32))
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_TRANSACTIONS_GENERATOR_HASH
@pytest.mark.asyncio
async def test_invalid_transactions_ref_list(self, empty_blockchain):
# No generator should have [1]s for the root
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
block: FullBlock = blocks[-1]
block_2 = recursive_replace(block, "transactions_info.generator_refs_root", bytes([0] * 32))
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT
# No generator should have no refs list
block_2 = recursive_replace(block, "transactions_generator_ref_list", [uint32(0)])
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT
# Hash should be correct when there is a ref list
assert (await b.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
blocks = bt.get_consecutive_blocks(5, block_list_input=blocks, guarantee_transaction_block=False)
for block in blocks[-5:]:
assert (await b.receive_block(block))[0] == ReceiveBlockResult.NEW_PEAK
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
assert (await b.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
generator_arg = detect_potential_template_generator(blocks[-1].height, blocks[-1].transactions_generator)
assert generator_arg is not None
blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
guarantee_transaction_block=True,
transaction_data=tx,
previous_generator=generator_arg,
)
block = blocks[-1]
assert len(block.transactions_generator_ref_list) > 0
block_2 = recursive_replace(block, "transactions_info.generator_refs_root", bytes([1] * 32))
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT
# Too many heights
block_2 = recursive_replace(block, "transactions_generator_ref_list", [block.height - 2, block.height - 1])
err = (await b.receive_block(block_2))[1]
assert err == Err.GENERATOR_REF_HAS_NO_GENERATOR
assert (await b.pre_validate_blocks_multiprocessing([block_2], {})) is None
# Not tx block
for h in range(0, block.height - 1):
block_2 = recursive_replace(block, "transactions_generator_ref_list", [h])
err = (await b.receive_block(block_2))[1]
assert err == Err.GENERATOR_REF_HAS_NO_GENERATOR or err == Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT
assert (await b.pre_validate_blocks_multiprocessing([block_2], {})) is None
@pytest.mark.asyncio
async def test_cost_exceeds_max(self, empty_blockchain):
# 7
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
condition_dict = {ConditionOpcode.CREATE_COIN: []}
for i in range(7000):
output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bt.pool_ph, int_to_bytes(i)])
condition_dict[ConditionOpcode.CREATE_COIN].append(output)
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0], condition_dic=condition_dict
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
assert (await b.receive_block(blocks[-1]))[1] in [Err.BLOCK_COST_EXCEEDS_MAX, Err.INVALID_BLOCK_COST]
@pytest.mark.asyncio
async def test_clvm_must_not_fail(self, empty_blockchain):
# 8
pass
@pytest.mark.asyncio
async def test_invalid_cost_in_block(self, empty_blockchain):
# 9
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
block: FullBlock = blocks[-1]
# zero
block_2: FullBlock = recursive_replace(block, "transactions_info.cost", uint64(0))
assert block_2.transactions_info
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
assert block_2.foliage_transaction_block
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_BLOCK_COST
# too low
block_2: FullBlock = recursive_replace(block, "transactions_info.cost", uint64(1))
assert block_2.transactions_info
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
assert block_2.foliage_transaction_block
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_BLOCK_COST
# too high
block_2: FullBlock = recursive_replace(block, "transactions_info.cost", uint64(1000000))
assert block_2.transactions_info
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
assert block_2.foliage_transaction_block
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
# when the CLVM program exceeds cost during execution, it will fail with
# a general runtime error
assert err == Err.GENERATOR_RUNTIME_ERROR
err = (await b.receive_block(block))[1]
assert err is None
@pytest.mark.asyncio
async def test_max_coin_amount(self):
# 10
# TODO: fix, this is not reaching validation. Because we can't create a block with such amounts due to uint64
# limit in Coin
pass
#
# with TempKeyring() as keychain:
# new_test_constants = test_constants.replace(
# **{"GENESIS_PRE_FARM_POOL_PUZZLE_HASH": bt.pool_ph, "GENESIS_PRE_FARM_FARMER_PUZZLE_HASH": bt.pool_ph}
# )
# b, connection, db_path = await create_blockchain(new_test_constants)
# bt_2 = await create_block_tools_async(constants=new_test_constants, keychain=keychain)
# bt_2.constants = bt_2.constants.replace(
# **{"GENESIS_PRE_FARM_POOL_PUZZLE_HASH": bt.pool_ph, "GENESIS_PRE_FARM_FARMER_PUZZLE_HASH": bt.pool_ph}
# )
# blocks = bt_2.get_consecutive_blocks(
# 3,
# guarantee_transaction_block=True,
# farmer_reward_puzzle_hash=bt.pool_ph,
# pool_reward_puzzle_hash=bt.pool_ph,
# )
# assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
# assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
# assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
# wt: WalletTool = bt_2.get_pool_wallet_tool()
# condition_dict = {ConditionOpcode.CREATE_COIN: []}
# output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bt_2.pool_ph, int_to_bytes(2 ** 64)])
# condition_dict[ConditionOpcode.CREATE_COIN].append(output)
# tx: SpendBundle = wt.generate_signed_transaction_multiple_coins(
# 10,
# wt.get_new_puzzlehash(),
# list(blocks[1].get_included_reward_coins()),
# condition_dic=condition_dict,
# )
# try:
# blocks = bt_2.get_consecutive_blocks(
# 1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
# )
# assert False
# except Exception as e:
# pass
# await connection.close()
# b.shut_down()
# db_path.unlink()
@pytest.mark.asyncio
async def test_invalid_merkle_roots(self, empty_blockchain):
# 11
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
block: FullBlock = blocks[-1]
merkle_set = MerkleSet()
# additions
block_2 = recursive_replace(block, "foliage_transaction_block.additions_root", merkle_set.get_root())
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.BAD_ADDITION_ROOT
# removals
merkle_set.add_already_hashed(std_hash(b"1"))
block_2 = recursive_replace(block, "foliage_transaction_block.removals_root", merkle_set.get_root())
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.BAD_REMOVAL_ROOT
@pytest.mark.asyncio
async def test_invalid_filter(self, empty_blockchain):
# 12
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
block: FullBlock = blocks[-1]
block_2 = recursive_replace(block, "foliage_transaction_block.filter_hash", std_hash(b"3"))
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_TRANSACTIONS_FILTER_HASH
@pytest.mark.asyncio
async def test_duplicate_outputs(self, empty_blockchain):
# 13
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
condition_dict = {ConditionOpcode.CREATE_COIN: []}
for i in range(2):
output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bt.pool_ph, int_to_bytes(1)])
condition_dict[ConditionOpcode.CREATE_COIN].append(output)
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0], condition_dic=condition_dict
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
assert (await b.receive_block(blocks[-1]))[1] == Err.DUPLICATE_OUTPUT
@pytest.mark.asyncio
async def test_duplicate_removals(self, empty_blockchain):
# 14
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
tx_2: SpendBundle = wt.generate_signed_transaction(
11, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
agg = SpendBundle.aggregate([tx, tx_2])
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=agg
)
assert (await b.receive_block(blocks[-1]))[1] == Err.DOUBLE_SPEND
@pytest.mark.asyncio
async def test_double_spent_in_coin_store(self, empty_blockchain):
# 15
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
assert (await b.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
tx_2: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-2].get_included_reward_coins())[0]
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx_2
)
assert (await b.receive_block(blocks[-1]))[1] == Err.DOUBLE_SPEND
@pytest.mark.asyncio
async def test_double_spent_in_reorg(self, empty_blockchain):
# 15
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
assert (await b.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
new_coin: Coin = tx.additions()[0]
tx_2: SpendBundle = wt.generate_signed_transaction(10, wt.get_new_puzzlehash(), new_coin)
# This is fine because coin exists
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx_2
)
assert (await b.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
blocks = bt.get_consecutive_blocks(5, block_list_input=blocks, guarantee_transaction_block=True)
for block in blocks[-5:]:
assert (await b.receive_block(block))[0] == ReceiveBlockResult.NEW_PEAK
blocks_reorg = bt.get_consecutive_blocks(2, block_list_input=blocks[:-7], guarantee_transaction_block=True)
assert (await b.receive_block(blocks_reorg[-2]))[0] == ReceiveBlockResult.ADDED_AS_ORPHAN
assert (await b.receive_block(blocks_reorg[-1]))[0] == ReceiveBlockResult.ADDED_AS_ORPHAN
# Coin does not exist in reorg
blocks_reorg = bt.get_consecutive_blocks(
1, block_list_input=blocks_reorg, guarantee_transaction_block=True, transaction_data=tx_2
)
assert (await b.receive_block(blocks_reorg[-1]))[1] == Err.UNKNOWN_UNSPENT
# Finally add the block to the fork (spending both in same bundle, this is ephemeral)
agg = SpendBundle.aggregate([tx, tx_2])
blocks_reorg = bt.get_consecutive_blocks(
1, block_list_input=blocks_reorg[:-1], guarantee_transaction_block=True, transaction_data=agg
)
assert (await b.receive_block(blocks_reorg[-1]))[1] is None
blocks_reorg = bt.get_consecutive_blocks(
1, block_list_input=blocks_reorg, guarantee_transaction_block=True, transaction_data=tx_2
)
assert (await b.receive_block(blocks_reorg[-1]))[1] == Err.DOUBLE_SPEND_IN_FORK
rewards_ph = wt.get_new_puzzlehash()
blocks_reorg = bt.get_consecutive_blocks(
10,
block_list_input=blocks_reorg[:-1],
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=rewards_ph,
)
for block in blocks_reorg[-10:]:
r, e, _, _ = await b.receive_block(block)
assert e is None
# ephemeral coin is spent
first_coin = await b.coin_store.get_coin_record(new_coin.name())
assert first_coin is not None and first_coin.spent
second_coin = await b.coin_store.get_coin_record(tx_2.additions()[0].name())
assert second_coin is not None and not second_coin.spent
farmer_coin = create_farmer_coin(
blocks_reorg[-1].height,
rewards_ph,
calculate_base_farmer_reward(blocks_reorg[-1].height),
bt.constants.GENESIS_CHALLENGE,
)
tx_3: SpendBundle = wt.generate_signed_transaction(10, wt.get_new_puzzlehash(), farmer_coin)
blocks_reorg = bt.get_consecutive_blocks(
1, block_list_input=blocks_reorg, guarantee_transaction_block=True, transaction_data=tx_3
)
assert (await b.receive_block(blocks_reorg[-1]))[1] is None
farmer_coin = await b.coin_store.get_coin_record(farmer_coin.name())
assert first_coin is not None and farmer_coin.spent
@pytest.mark.asyncio
async def test_minting_coin(self, empty_blockchain):
# 16 Minting coin check
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
spend = list(blocks[-1].get_included_reward_coins())[0]
print("spend=", spend)
# this create coin will spend all of the coin, so the 10 totos below
# will be "minted".
output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bt.pool_ph, int_to_bytes(spend.amount)])
condition_dict = {ConditionOpcode.CREATE_COIN: [output]}
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), spend, condition_dic=condition_dict
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
assert (await b.receive_block(blocks[-1]))[1] == Err.MINTING_COIN
# 17 is tested in mempool tests
pass
@pytest.mark.asyncio
async def test_max_coin_amount_fee(self):
# 18 TODO: we can't create a block with such amounts due to uint64
pass
@pytest.mark.asyncio
async def test_invalid_fees_in_block(self, empty_blockchain):
# 19
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=bt.pool_ph,
pool_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[-1].get_included_reward_coins())[0]
)
blocks = bt.get_consecutive_blocks(
1, block_list_input=blocks, guarantee_transaction_block=True, transaction_data=tx
)
block: FullBlock = blocks[-1]
# wrong feees
block_2: FullBlock = recursive_replace(block, "transactions_info.fees", uint64(1239))
assert block_2.transactions_info
block_2 = recursive_replace(
block_2, "foliage_transaction_block.transactions_info_hash", block_2.transactions_info.get_hash()
)
assert block_2.foliage_transaction_block
block_2 = recursive_replace(
block_2, "foliage.foliage_transaction_block_hash", block_2.foliage_transaction_block.get_hash()
)
new_m = block_2.foliage.foliage_transaction_block_hash
new_fsb_sig = bt.get_plot_signature(new_m, block.reward_chain_block.proof_of_space.plot_public_key)
block_2 = recursive_replace(block_2, "foliage.foliage_transaction_block_signature", new_fsb_sig)
err = (await b.receive_block(block_2))[1]
assert err == Err.INVALID_BLOCK_FEE_AMOUNT
class TestReorgs:
@pytest.mark.asyncio
async def test_basic_reorg(self, empty_blockchain):
b = empty_blockchain
blocks = bt.get_consecutive_blocks(15)
for block in blocks:
assert (await b.receive_block(block))[0] == ReceiveBlockResult.NEW_PEAK
assert b.get_peak().height == 14
blocks_reorg_chain = bt.get_consecutive_blocks(7, blocks[:10], seed=b"2")
for reorg_block in blocks_reorg_chain:
result, error_code, fork_height, _ = await b.receive_block(reorg_block)
if reorg_block.height < 10:
assert result == ReceiveBlockResult.ALREADY_HAVE_BLOCK
elif reorg_block.height < 14:
assert result == ReceiveBlockResult.ADDED_AS_ORPHAN
elif reorg_block.height >= 15:
assert result == ReceiveBlockResult.NEW_PEAK
assert error_code is None
assert b.get_peak().height == 16
@pytest.mark.asyncio
async def test_long_reorg(self, empty_blockchain, default_10000_blocks):
# Reorg longer than a difficulty adjustment
# Also tests higher weight chain but lower height
b = empty_blockchain
num_blocks_chain_1 = 3 * test_constants.EPOCH_BLOCKS + test_constants.MAX_SUB_SLOT_BLOCKS + 10
num_blocks_chain_2_start = test_constants.EPOCH_BLOCKS - 20
num_blocks_chain_2 = 3 * test_constants.EPOCH_BLOCKS + test_constants.MAX_SUB_SLOT_BLOCKS + 8
assert num_blocks_chain_1 < 10000
blocks = default_10000_blocks[:num_blocks_chain_1]
for block in blocks:
assert (await b.receive_block(block))[0] == ReceiveBlockResult.NEW_PEAK
chain_1_height = b.get_peak().height
chain_1_weight = b.get_peak().weight
assert chain_1_height == (num_blocks_chain_1 - 1)
# These blocks will have less time between them (timestamp) and therefore will make difficulty go up
# This means that the weight will grow faster, and we can get a heavier chain with lower height
blocks_reorg_chain = bt.get_consecutive_blocks(
num_blocks_chain_2 - num_blocks_chain_2_start,
blocks[:num_blocks_chain_2_start],
seed=b"2",
time_per_block=8,
)
found_orphan = False
for reorg_block in blocks_reorg_chain:
result, error_code, fork_height, _ = await b.receive_block(reorg_block)
if reorg_block.height < num_blocks_chain_2_start:
assert result == ReceiveBlockResult.ALREADY_HAVE_BLOCK
if reorg_block.weight <= chain_1_weight:
if result == ReceiveBlockResult.ADDED_AS_ORPHAN:
found_orphan = True
assert error_code is None
assert result == ReceiveBlockResult.ADDED_AS_ORPHAN or result == ReceiveBlockResult.ALREADY_HAVE_BLOCK
elif reorg_block.weight > chain_1_weight:
assert reorg_block.height < chain_1_height
assert result == ReceiveBlockResult.NEW_PEAK
assert error_code is None
assert found_orphan
assert b.get_peak().weight > chain_1_weight
assert b.get_peak().height < chain_1_height
@pytest.mark.asyncio
async def test_long_compact_blockchain(self, empty_blockchain, default_10000_blocks_compact):
b = empty_blockchain
for block in default_10000_blocks_compact:
assert (await b.receive_block(block))[0] == ReceiveBlockResult.NEW_PEAK
assert b.get_peak().height == len(default_10000_blocks_compact) - 1
@pytest.mark.asyncio
async def test_reorg_from_genesis(self, empty_blockchain):
b = empty_blockchain
WALLET_A = WalletTool(b.constants)
WALLET_A_PUZZLE_HASHES = [WALLET_A.get_new_puzzlehash() for _ in range(5)]
blocks = bt.get_consecutive_blocks(15)
for block in blocks:
assert (await b.receive_block(block))[0] == ReceiveBlockResult.NEW_PEAK
assert b.get_peak().height == 14
# Reorg to alternate chain that is 1 height longer
found_orphan = False
blocks_reorg_chain = bt.get_consecutive_blocks(16, [], seed=b"2")
for reorg_block in blocks_reorg_chain:
result, error_code, fork_height, _ = await b.receive_block(reorg_block)
if reorg_block.height < 14:
if result == ReceiveBlockResult.ADDED_AS_ORPHAN:
found_orphan = True
assert result == ReceiveBlockResult.ADDED_AS_ORPHAN or result == ReceiveBlockResult.ALREADY_HAVE_BLOCK
elif reorg_block.height >= 15:
assert result == ReceiveBlockResult.NEW_PEAK
assert error_code is None
# Back to original chain
blocks_reorg_chain_2 = bt.get_consecutive_blocks(3, blocks, seed=b"3")
result, error_code, fork_height, _ = await b.receive_block(blocks_reorg_chain_2[-3])
assert result == ReceiveBlockResult.ADDED_AS_ORPHAN
result, error_code, fork_height, _ = await b.receive_block(blocks_reorg_chain_2[-2])
assert result == ReceiveBlockResult.NEW_PEAK
result, error_code, fork_height, _ = await b.receive_block(blocks_reorg_chain_2[-1])
assert result == ReceiveBlockResult.NEW_PEAK
assert found_orphan
assert b.get_peak().height == 17
@pytest.mark.asyncio
async def test_reorg_transaction(self, empty_blockchain):
b = empty_blockchain
wallet_a = WalletTool(b.constants)
WALLET_A_PUZZLE_HASHES = [wallet_a.get_new_puzzlehash() for _ in range(5)]
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = WALLET_A_PUZZLE_HASHES[1]
blocks = bt.get_consecutive_blocks(10, farmer_reward_puzzle_hash=coinbase_puzzlehash)
blocks = bt.get_consecutive_blocks(
2, blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
spend_block = blocks[10]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
blocks = bt.get_consecutive_blocks(
2,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
)
blocks_fork = bt.get_consecutive_blocks(
1, blocks[:12], farmer_reward_puzzle_hash=coinbase_puzzlehash, seed=b"123", guarantee_transaction_block=True
)
blocks_fork = bt.get_consecutive_blocks(
2,
blocks_fork,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
seed=b"1245",
)
for block in blocks:
result, error_code, _, _ = await b.receive_block(block)
assert error_code is None and result == ReceiveBlockResult.NEW_PEAK
for block in blocks_fork:
result, error_code, _, _ = await b.receive_block(block)
assert error_code is None
@pytest.mark.asyncio
async def test_get_header_blocks_in_range_tx_filter(self, empty_blockchain):
b = empty_blockchain
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
pool_reward_puzzle_hash=bt.pool_ph,
farmer_reward_puzzle_hash=bt.pool_ph,
)
assert (await b.receive_block(blocks[0]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[1]))[0] == ReceiveBlockResult.NEW_PEAK
assert (await b.receive_block(blocks[2]))[0] == ReceiveBlockResult.NEW_PEAK
wt: WalletTool = bt.get_pool_wallet_tool()
tx: SpendBundle = wt.generate_signed_transaction(
10, wt.get_new_puzzlehash(), list(blocks[2].get_included_reward_coins())[0]
)
blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
guarantee_transaction_block=True,
transaction_data=tx,
)
err = (await b.receive_block(blocks[-1]))[1]
assert not err
blocks_with_filter = await b.get_header_blocks_in_range(0, 10, tx_filter=True)
blocks_without_filter = await b.get_header_blocks_in_range(0, 10, tx_filter=False)
header_hash = blocks[-1].header_hash
assert (
blocks_with_filter[header_hash].transactions_filter
!= blocks_without_filter[header_hash].transactions_filter
)
assert blocks_with_filter[header_hash].header_hash == blocks_without_filter[header_hash].header_hash
@pytest.mark.asyncio
async def test_get_blocks_at(self, empty_blockchain, default_1000_blocks):
b = empty_blockchain
heights = []
for block in default_1000_blocks[:200]:
heights.append(block.height)
result, error_code, _, _ = await b.receive_block(block)
assert error_code is None and result == ReceiveBlockResult.NEW_PEAK
blocks = await b.get_block_records_at(heights, batch_size=2)
assert blocks
assert len(blocks) == 200
assert blocks[-1].height == 199
| 47.961344 | 120 | 0.651348 |
7955dd80f0c82eec004a2b983e643ba9d7876bd9 | 114,111 | py | Python | tests/jobs/test_scheduler_job.py | marwan116/airflow | e7b8eb7f3189fa587c49a2856dfd570f7625ca78 | [
"Apache-2.0"
] | 2 | 2020-03-24T14:47:18.000Z | 2020-03-24T14:48:17.000Z | tests/jobs/test_scheduler_job.py | marwan116/airflow | e7b8eb7f3189fa587c49a2856dfd570f7625ca78 | [
"Apache-2.0"
] | null | null | null | tests/jobs/test_scheduler_job.py | marwan116/airflow | e7b8eb7f3189fa587c49a2856dfd570f7625ca78 | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import datetime
import os
import shutil
import unittest
from datetime import timedelta
from tempfile import mkdtemp
import mock
import psutil
import pytest
import six
from mock import MagicMock, patch
from parameterized import parameterized
import airflow.example_dags
from airflow import settings
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.executors.base_executor import BaseExecutor
from airflow.jobs.backfill_job import BackfillJob
from airflow.jobs.scheduler_job import DagFileProcessor, SchedulerJob
from airflow.models import DAG, DagBag, DagModel, Pool, SlaMiss, TaskInstance, errors
from airflow.models.dagrun import DagRun
from airflow.models.taskinstance import SimpleTaskInstance
from airflow.operators.bash import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.utils import timezone
from airflow.utils.dag_processing import SimpleDag, SimpleDagBag
from airflow.utils.dates import days_ago
from airflow.utils.file import list_py_file_paths
from airflow.utils.session import create_session, provide_session
from airflow.utils.state import State
from airflow.utils.types import DagRunType
from tests.test_utils.db import (
clear_db_dags, clear_db_errors, clear_db_pools, clear_db_runs, clear_db_sla_miss, set_default_pool_slots,
)
from tests.test_utils.mock_executor import MockExecutor
TEST_DAG_FOLDER = os.environ['AIRFLOW__CORE__DAGS_FOLDER']
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
TRY_NUMBER = 1
# Include the words "airflow" and "dag" in the file contents,
# tricking airflow into thinking these
# files contain a DAG (otherwise Airflow will skip them)
PARSEABLE_DAG_FILE_CONTENTS = '"airflow DAG"'
UNPARSEABLE_DAG_FILE_CONTENTS = 'airflow DAG'
# Filename to be used for dags that are created in an ad-hoc manner and can be removed/
# created at runtime
TEMP_DAG_FILENAME = "temp_dag.py"
class TestDagFileProcessor(unittest.TestCase):
def setUp(self):
clear_db_runs()
clear_db_pools()
clear_db_dags()
clear_db_sla_miss()
clear_db_errors()
# Speed up some tests by not running the tasks, just look at what we
# enqueue!
self.null_exec = MockExecutor()
def create_test_dag(self, start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + timedelta(hours=1), **kwargs):
dag = DAG(
dag_id='test_scheduler_reschedule',
start_date=start_date,
# Make sure it only creates a single DAG Run
end_date=end_date)
dag.clear()
dag.is_subdag = False
with create_session() as session:
orm_dag = DagModel(dag_id=dag.dag_id)
orm_dag.is_paused = False
session.merge(orm_dag)
session.commit()
return dag
@classmethod
def setUpClass(cls):
cls.dagbag = DagBag()
cls.old_val = None
if conf.has_option('core', 'load_examples'):
cls.old_val = conf.get('core', 'load_examples')
conf.set('core', 'load_examples', 'false')
@classmethod
def tearDownClass(cls):
if cls.old_val is not None:
conf.set('core', 'load_examples', cls.old_val)
else:
conf.remove_option('core', 'load_examples')
def test_dag_file_processor_sla_miss_callback(self):
"""
Test that the dag file processor calls the sla miss callback
"""
session = settings.Session()
sla_callback = MagicMock()
# Create dag with a start of 1 day ago, but an sla of 0
# so we'll already have an sla_miss on the books.
test_start_date = days_ago(1)
dag = DAG(dag_id='test_sla_miss',
sla_miss_callback=sla_callback,
default_args={'start_date': test_start_date,
'sla': datetime.timedelta()})
task = DummyOperator(task_id='dummy',
dag=dag,
owner='airflow')
session.merge(TaskInstance(task=task, execution_date=test_start_date, state='success'))
session.merge(SlaMiss(task_id='dummy', dag_id='test_sla_miss', execution_date=test_start_date))
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag_file_processor.manage_slas(dag=dag, session=session)
assert sla_callback.called
def test_dag_file_processor_sla_miss_callback_invalid_sla(self):
"""
Test that the dag file processor does not call the sla miss callback when
given an invalid sla
"""
session = settings.Session()
sla_callback = MagicMock()
# Create dag with a start of 1 day ago, but an sla of 0
# so we'll already have an sla_miss on the books.
# Pass anything besides a timedelta object to the sla argument.
test_start_date = days_ago(1)
dag = DAG(dag_id='test_sla_miss',
sla_miss_callback=sla_callback,
default_args={'start_date': test_start_date,
'sla': None})
task = DummyOperator(task_id='dummy', dag=dag, owner='airflow')
session.merge(TaskInstance(task=task, execution_date=test_start_date, state='success'))
session.merge(SlaMiss(task_id='dummy', dag_id='test_sla_miss', execution_date=test_start_date))
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag_file_processor.manage_slas(dag=dag, session=session)
sla_callback.assert_not_called()
def test_scheduler_executor_overflow(self):
"""
Test that tasks that are set back to scheduled and removed from the executor
queue in the case of an overflow.
"""
executor = MockExecutor(do_update=True, parallelism=3)
with create_session() as session:
dagbag = DagBag(dag_folder=os.path.join(settings.DAGS_FOLDER, "no_dags.py"))
dag = self.create_test_dag()
dag.clear()
dagbag.bag_dag(dag=dag, root_dag=dag, parent_dag=dag)
dag = self.create_test_dag()
dag.clear()
task = DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
tis = []
for i in range(1, 10):
ti = TaskInstance(task, DEFAULT_DATE + timedelta(days=i))
ti.state = State.SCHEDULED
tis.append(ti)
session.merge(ti)
# scheduler._process_dags(simple_dag_bag)
@mock.patch('airflow.models.DagBag', return_value=dagbag)
@mock.patch('airflow.models.DagBag.collect_dags')
@mock.patch('airflow.jobs.scheduler_job.SchedulerJob._change_state_for_tis_without_dagrun')
def do_schedule(mock_dagbag, mock_collect_dags, mock_change_state):
# Use a empty file since the above mock will return the
# expected DAGs. Also specify only a single file so that it doesn't
# try to schedule the above DAG repeatedly.
scheduler = SchedulerJob(num_runs=1,
executor=executor,
subdir=os.path.join(settings.DAGS_FOLDER,
"no_dags.py"))
scheduler.heartrate = 0
scheduler.run()
do_schedule() # pylint: disable=no-value-for-parameter
for ti in tis:
ti.refresh_from_db()
self.assertEqual(len(executor.queued_tasks), 0)
successful_tasks = [ti for ti in tis if ti.state == State.SUCCESS]
scheduled_tasks = [ti for ti in tis if ti.state == State.SCHEDULED]
self.assertEqual(3, len(successful_tasks))
self.assertEqual(6, len(scheduled_tasks))
def test_dag_file_processor_sla_miss_callback_sent_notification(self):
"""
Test that the dag file processor does not call the sla_miss_callback when a
notification has already been sent
"""
session = settings.Session()
# Mock the callback function so we can verify that it was not called
sla_callback = MagicMock()
# Create dag with a start of 2 days ago, but an sla of 1 day
# ago so we'll already have an sla_miss on the books
test_start_date = days_ago(2)
dag = DAG(dag_id='test_sla_miss',
sla_miss_callback=sla_callback,
default_args={'start_date': test_start_date,
'sla': datetime.timedelta(days=1)})
task = DummyOperator(task_id='dummy', dag=dag, owner='airflow')
# Create a TaskInstance for two days ago
session.merge(TaskInstance(task=task, execution_date=test_start_date, state='success'))
# Create an SlaMiss where notification was sent, but email was not
session.merge(SlaMiss(task_id='dummy',
dag_id='test_sla_miss',
execution_date=test_start_date,
email_sent=False,
notification_sent=True))
# Now call manage_slas and see if the sla_miss callback gets called
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag_file_processor.manage_slas(dag=dag, session=session)
sla_callback.assert_not_called()
def test_dag_file_processor_sla_miss_callback_exception(self):
"""
Test that the dag file processor gracefully logs an exception if there is a problem
calling the sla_miss_callback
"""
session = settings.Session()
sla_callback = MagicMock(side_effect=RuntimeError('Could not call function'))
test_start_date = days_ago(2)
dag = DAG(dag_id='test_sla_miss',
sla_miss_callback=sla_callback,
default_args={'start_date': test_start_date})
task = DummyOperator(task_id='dummy',
dag=dag,
owner='airflow',
sla=datetime.timedelta(hours=1))
session.merge(TaskInstance(task=task, execution_date=test_start_date, state='Success'))
# Create an SlaMiss where notification was sent, but email was not
session.merge(SlaMiss(task_id='dummy',
dag_id='test_sla_miss',
execution_date=test_start_date))
# Now call manage_slas and see if the sla_miss callback gets called
mock_log = mock.MagicMock()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock_log)
dag_file_processor.manage_slas(dag=dag, session=session)
assert sla_callback.called
mock_log.exception.assert_called_once_with(
'Could not call sla_miss_callback for DAG %s',
'test_sla_miss')
@mock.patch('airflow.jobs.scheduler_job.send_email')
def test_dag_file_processor_only_collect_emails_from_sla_missed_tasks(self, mock_send_email):
session = settings.Session()
test_start_date = days_ago(2)
dag = DAG(dag_id='test_sla_miss',
default_args={'start_date': test_start_date,
'sla': datetime.timedelta(days=1)})
email1 = 'test1@test.com'
task = DummyOperator(task_id='sla_missed',
dag=dag,
owner='airflow',
email=email1,
sla=datetime.timedelta(hours=1))
session.merge(TaskInstance(task=task, execution_date=test_start_date, state='Success'))
email2 = 'test2@test.com'
DummyOperator(task_id='sla_not_missed',
dag=dag,
owner='airflow',
email=email2)
session.merge(SlaMiss(task_id='sla_missed', dag_id='test_sla_miss', execution_date=test_start_date))
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag_file_processor.manage_slas(dag=dag, session=session)
self.assertTrue(len(mock_send_email.call_args_list), 1)
send_email_to = mock_send_email.call_args_list[0][0][0]
self.assertIn(email1, send_email_to)
self.assertNotIn(email2, send_email_to)
@mock.patch("airflow.utils.email.send_email")
def test_dag_file_processor_sla_miss_email_exception(self, mock_send_email):
"""
Test that the dag file processor gracefully logs an exception if there is a problem
sending an email
"""
session = settings.Session()
# Mock the callback function so we can verify that it was not called
mock_send_email.side_effect = RuntimeError('Could not send an email')
test_start_date = days_ago(2)
dag = DAG(dag_id='test_sla_miss',
default_args={'start_date': test_start_date,
'sla': datetime.timedelta(days=1)})
task = DummyOperator(task_id='dummy',
dag=dag,
owner='airflow',
email='test@test.com',
sla=datetime.timedelta(hours=1))
session.merge(TaskInstance(task=task, execution_date=test_start_date, state='Success'))
# Create an SlaMiss where notification was sent, but email was not
session.merge(SlaMiss(task_id='dummy', dag_id='test_sla_miss', execution_date=test_start_date))
mock_log = mock.MagicMock()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock_log)
dag_file_processor.manage_slas(dag=dag, session=session)
mock_log.exception.assert_called_once_with(
'Could not send SLA Miss email notification for DAG %s',
'test_sla_miss')
def test_dag_file_processor_sla_miss_deleted_task(self):
"""
Test that the dag file processor will not crash when trying to send
sla miss notification for a deleted task
"""
session = settings.Session()
test_start_date = days_ago(2)
dag = DAG(dag_id='test_sla_miss',
default_args={'start_date': test_start_date,
'sla': datetime.timedelta(days=1)})
task = DummyOperator(task_id='dummy',
dag=dag,
owner='airflow',
email='test@test.com',
sla=datetime.timedelta(hours=1))
session.merge(TaskInstance(task=task, execution_date=test_start_date, state='Success'))
# Create an SlaMiss where notification was sent, but email was not
session.merge(SlaMiss(task_id='dummy_deleted', dag_id='test_sla_miss',
execution_date=test_start_date))
mock_log = mock.MagicMock()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock_log)
dag_file_processor.manage_slas(dag=dag, session=session)
def test_dag_file_processor_dagrun_once(self):
"""
Test if the dag file proccessor does not create multiple dagruns
if a dag is scheduled with @once and a start_date
"""
dag = DAG(
'test_scheduler_dagrun_once',
start_date=timezone.datetime(2015, 1, 1),
schedule_interval="@once")
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNone(dr)
@parameterized.expand([
[State.NONE, None, None],
[State.UP_FOR_RETRY, timezone.utcnow() - datetime.timedelta(minutes=30),
timezone.utcnow() - datetime.timedelta(minutes=15)],
[State.UP_FOR_RESCHEDULE, timezone.utcnow() - datetime.timedelta(minutes=30),
timezone.utcnow() - datetime.timedelta(minutes=15)],
])
def test_dag_file_processor_process_task_instances(self, state, start_date, end_date):
"""
Test if _process_task_instances puts the right task instances into the
mock_list.
"""
dag = DAG(
dag_id='test_scheduler_process_execute_task',
start_date=DEFAULT_DATE)
dag_task1 = DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
with create_session() as session:
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
with create_session() as session:
tis = dr.get_task_instances(session=session)
for ti in tis:
ti.state = state
ti.start_date = start_date
ti.end_date = end_date
mock_list = dag_file_processor._process_task_instances(dag, dag_runs=[dr])
self.assertEqual(
[(dag.dag_id, dag_task1.task_id, DEFAULT_DATE, TRY_NUMBER)],
mock_list
)
@parameterized.expand([
[State.NONE, None, None],
[State.UP_FOR_RETRY, timezone.utcnow() - datetime.timedelta(minutes=30),
timezone.utcnow() - datetime.timedelta(minutes=15)],
[State.UP_FOR_RESCHEDULE, timezone.utcnow() - datetime.timedelta(minutes=30),
timezone.utcnow() - datetime.timedelta(minutes=15)],
])
def test_dag_file_processor_process_task_instances_with_task_concurrency(
self, state, start_date, end_date,
):
"""
Test if _process_task_instances puts the right task instances into the
mock_list.
"""
dag = DAG(
dag_id='test_scheduler_process_execute_task_with_task_concurrency',
start_date=DEFAULT_DATE)
dag_task1 = DummyOperator(
task_id='dummy',
task_concurrency=2,
dag=dag,
owner='airflow')
with create_session() as session:
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
with create_session() as session:
tis = dr.get_task_instances(session=session)
for ti in tis:
ti.state = state
ti.start_date = start_date
ti.end_date = end_date
ti_to_schedule = dag_file_processor._process_task_instances(dag, dag_runs=[dr])
assert ti_to_schedule == [
(dag.dag_id, dag_task1.task_id, DEFAULT_DATE, TRY_NUMBER),
]
@parameterized.expand([
[State.NONE, None, None],
[State.UP_FOR_RETRY, timezone.utcnow() - datetime.timedelta(minutes=30),
timezone.utcnow() - datetime.timedelta(minutes=15)],
[State.UP_FOR_RESCHEDULE, timezone.utcnow() - datetime.timedelta(minutes=30),
timezone.utcnow() - datetime.timedelta(minutes=15)],
])
def test_dag_file_processor_process_task_instances_depends_on_past(self, state, start_date, end_date):
"""
Test if _process_task_instances puts the right task instances into the
mock_list.
"""
dag = DAG(
dag_id='test_scheduler_process_execute_task_depends_on_past',
start_date=DEFAULT_DATE,
default_args={
'depends_on_past': True,
},
)
dag_task1 = DummyOperator(
task_id='dummy1',
dag=dag,
owner='airflow')
dag_task2 = DummyOperator(
task_id='dummy2',
dag=dag,
owner='airflow')
with create_session() as session:
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
with create_session() as session:
tis = dr.get_task_instances(session=session)
for ti in tis:
ti.state = state
ti.start_date = start_date
ti.end_date = end_date
ti_to_schedule = dag_file_processor._process_task_instances(dag, dag_runs=[dr])
assert ti_to_schedule == [
(dag.dag_id, dag_task1.task_id, DEFAULT_DATE, TRY_NUMBER),
(dag.dag_id, dag_task2.task_id, DEFAULT_DATE, TRY_NUMBER),
]
def test_dag_file_processor_do_not_schedule_removed_task(self):
dag = DAG(
dag_id='test_scheduler_do_not_schedule_removed_task',
start_date=DEFAULT_DATE)
DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
session.close()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
dr = DagRun.find(run_id=dr.run_id)[0]
dag = DAG(
dag_id='test_scheduler_do_not_schedule_removed_task',
start_date=DEFAULT_DATE)
mock_list = dag_file_processor._process_task_instances(dag, dag_runs=[dr])
self.assertEqual([], mock_list)
def test_dag_file_processor_do_not_schedule_too_early(self):
dag = DAG(
dag_id='test_scheduler_do_not_schedule_too_early',
start_date=timezone.datetime(2200, 1, 1))
DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
session.close()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNone(dr)
mock_list = dag_file_processor._process_task_instances(dag, dag_runs=[])
self.assertEqual([], mock_list)
def test_dag_file_processor_do_not_schedule_without_tasks(self):
dag = DAG(
dag_id='test_scheduler_do_not_schedule_without_tasks',
start_date=DEFAULT_DATE)
with create_session() as session:
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear(session=session)
dag.start_date = None
dr = dag_file_processor.create_dag_run(dag, session=session)
self.assertIsNone(dr)
def test_dag_file_processor_do_not_run_finished(self):
dag = DAG(
dag_id='test_scheduler_do_not_run_finished',
start_date=DEFAULT_DATE)
DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
tis = dr.get_task_instances(session=session)
for ti in tis:
ti.state = State.SUCCESS
session.commit()
session.close()
mock_list = dag_file_processor._process_task_instances(dag, dag_runs=[dr])
self.assertEqual([], mock_list)
def test_dag_file_processor_add_new_task(self):
"""
Test if a task instance will be added if the dag is updated
"""
dag = DAG(
dag_id='test_scheduler_add_new_task',
start_date=DEFAULT_DATE)
DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
session.close()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
tis = dr.get_task_instances()
self.assertEqual(len(tis), 1)
DummyOperator(
task_id='dummy2',
dag=dag,
owner='airflow')
dag_file_processor._process_task_instances(dag, dag_runs=[dr])
tis = dr.get_task_instances()
self.assertEqual(len(tis), 2)
def test_dag_file_processor_verify_max_active_runs(self):
"""
Test if a a dagrun will not be scheduled if max_dag_runs has been reached
"""
dag = DAG(
dag_id='test_scheduler_verify_max_active_runs',
start_date=DEFAULT_DATE)
dag.max_active_runs = 1
DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
session.close()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNone(dr)
def test_dag_file_processor_fail_dagrun_timeout(self):
"""
Test if a a dagrun wil be set failed if timeout
"""
dag = DAG(
dag_id='test_scheduler_fail_dagrun_timeout',
start_date=DEFAULT_DATE)
dag.dagrun_timeout = datetime.timedelta(seconds=60)
DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
dr.start_date = timezone.utcnow() - datetime.timedelta(days=1)
session.merge(dr)
session.commit()
dr2 = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr2)
dr.refresh_from_db(session=session)
self.assertEqual(dr.state, State.FAILED)
def test_dag_file_processor_verify_max_active_runs_and_dagrun_timeout(self):
"""
Test if a a dagrun will not be scheduled if max_dag_runs
has been reached and dagrun_timeout is not reached
Test if a a dagrun will be scheduled if max_dag_runs has
been reached but dagrun_timeout is also reached
"""
dag = DAG(
dag_id='test_scheduler_verify_max_active_runs_and_dagrun_timeout',
start_date=DEFAULT_DATE)
dag.max_active_runs = 1
dag.dagrun_timeout = datetime.timedelta(seconds=60)
DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
session.close()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
# Should not be scheduled as DagRun has not timedout and max_active_runs is reached
new_dr = dag_file_processor.create_dag_run(dag)
self.assertIsNone(new_dr)
# Should be scheduled as dagrun_timeout has passed
dr.start_date = timezone.utcnow() - datetime.timedelta(days=1)
session.merge(dr)
session.commit()
new_dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(new_dr)
def test_dag_file_processor_max_active_runs_respected_after_clear(self):
"""
Test if _process_task_instances only schedules ti's up to max_active_runs
(related to issue AIRFLOW-137)
"""
dag = DAG(
dag_id='test_scheduler_max_active_runs_respected_after_clear',
start_date=DEFAULT_DATE)
dag.max_active_runs = 3
dag_task1 = DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
session.close()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
# First create up to 3 dagruns in RUNNING state.
dr1 = dag_file_processor.create_dag_run(dag)
assert dr1 is not None
dr2 = dag_file_processor.create_dag_run(dag)
assert dr2 is not None
dr3 = dag_file_processor.create_dag_run(dag)
assert dr3 is not None
assert len(DagRun.find(dag_id=dag.dag_id, state=State.RUNNING, session=session)) == 3
# Reduce max_active_runs to 1
dag.max_active_runs = 1
# and schedule them in, so we can check how many
# tasks are put on the task_instances_list (should be one, not 3)
task_instances_list = dag_file_processor._process_task_instances(dag, dag_runs=[dr1, dr2, dr3])
self.assertEqual([(dag.dag_id, dag_task1.task_id, DEFAULT_DATE, TRY_NUMBER)], task_instances_list)
def test_find_dags_to_run_includes_subdags(self):
dag = self.dagbag.get_dag('test_subdag_operator')
print(self.dagbag.dag_ids)
print(self.dagbag.dag_folder)
self.assertGreater(len(dag.subdags), 0)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dags = dag_file_processor._find_dags_to_process(self.dagbag.dags.values(), paused_dag_ids=())
self.assertIn(dag, dags)
for subdag in dag.subdags:
self.assertIn(subdag, dags)
def test_find_dags_to_run_skip_paused_dags(self):
dagbag = DagBag(include_examples=False)
dag = dagbag.get_dag('test_subdag_operator')
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dags = dag_file_processor._find_dags_to_process(dagbag.dags.values(), paused_dag_ids=[dag.dag_id])
self.assertNotIn(dag, dags)
def test_dag_catchup_option(self):
"""
Test to check that a DAG with catchup = False only schedules beginning now, not back to the start date
"""
def setup_dag(dag_id, schedule_interval, start_date, catchup):
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': start_date
}
dag = DAG(dag_id,
schedule_interval=schedule_interval,
max_active_runs=1,
catchup=catchup,
default_args=default_args)
op1 = DummyOperator(task_id='t1', dag=dag)
op2 = DummyOperator(task_id='t2', dag=dag)
op2.set_upstream(op1)
op3 = DummyOperator(task_id='t3', dag=dag)
op3.set_upstream(op2)
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
session.close()
return dag
now = timezone.utcnow()
six_hours_ago_to_the_hour = (now - datetime.timedelta(hours=6)).replace(
minute=0, second=0, microsecond=0)
half_an_hour_ago = now - datetime.timedelta(minutes=30)
two_hours_ago = now - datetime.timedelta(hours=2)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag1 = setup_dag(dag_id='dag_with_catchup',
schedule_interval='* * * * *',
start_date=six_hours_ago_to_the_hour,
catchup=True)
default_catchup = conf.getboolean('scheduler', 'catchup_by_default')
self.assertEqual(default_catchup, True)
self.assertEqual(dag1.catchup, True)
dag2 = setup_dag(dag_id='dag_without_catchup_ten_minute',
schedule_interval='*/10 * * * *',
start_date=six_hours_ago_to_the_hour,
catchup=False)
dr = dag_file_processor.create_dag_run(dag2)
# We had better get a dag run
self.assertIsNotNone(dr)
# The DR should be scheduled in the last half an hour, not 6 hours ago
self.assertGreater(dr.execution_date, half_an_hour_ago)
# The DR should be scheduled BEFORE now
self.assertLess(dr.execution_date, timezone.utcnow())
dag3 = setup_dag(dag_id='dag_without_catchup_hourly',
schedule_interval='@hourly',
start_date=six_hours_ago_to_the_hour,
catchup=False)
dr = dag_file_processor.create_dag_run(dag3)
# We had better get a dag run
self.assertIsNotNone(dr)
# The DR should be scheduled in the last 2 hours, not 6 hours ago
self.assertGreater(dr.execution_date, two_hours_ago)
# The DR should be scheduled BEFORE now
self.assertLess(dr.execution_date, timezone.utcnow())
dag4 = setup_dag(dag_id='dag_without_catchup_once',
schedule_interval='@once',
start_date=six_hours_ago_to_the_hour,
catchup=False)
dr = dag_file_processor.create_dag_run(dag4)
self.assertIsNotNone(dr)
def test_dag_file_processor_auto_align(self):
"""
Test if the schedule_interval will be auto aligned with the start_date
such that if the start_date coincides with the schedule the first
execution_date will be start_date, otherwise it will be start_date +
interval.
"""
dag = DAG(
dag_id='test_scheduler_auto_align_1',
start_date=timezone.datetime(2016, 1, 1, 10, 10, 0),
schedule_interval="4 5 * * *"
)
DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
self.assertEqual(dr.execution_date, timezone.datetime(2016, 1, 2, 5, 4))
dag = DAG(
dag_id='test_scheduler_auto_align_2',
start_date=timezone.datetime(2016, 1, 1, 10, 10, 0),
schedule_interval="10 10 * * *"
)
DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
session = settings.Session()
orm_dag = DagModel(dag_id=dag.dag_id)
session.merge(orm_dag)
session.commit()
dag.clear()
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
self.assertEqual(dr.execution_date, timezone.datetime(2016, 1, 1, 10, 10))
def test_process_dags_not_create_dagrun_for_subdags(self):
dag = self.dagbag.get_dag('test_subdag_operator')
scheduler = DagFileProcessor(dag_ids=[dag.dag_id], log=mock.MagicMock())
scheduler._process_task_instances = mock.MagicMock()
scheduler.manage_slas = mock.MagicMock()
scheduler._process_dags([dag] + dag.subdags)
with create_session() as session:
sub_dagruns = (
session.query(DagRun).filter(DagRun.dag_id == dag.subdags[0].dag_id).count()
)
self.assertEqual(0, sub_dagruns)
parent_dagruns = (
session.query(DagRun).filter(DagRun.dag_id == dag.dag_id).count()
)
self.assertGreater(parent_dagruns, 0)
@patch.object(TaskInstance, 'handle_failure')
def test_kill_zombies(self, mock_ti_handle_failure):
"""
Test that kill zombies call TaskInstances failure handler with proper context
"""
dagbag = DagBag(dag_folder="/dev/null", include_examples=True)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
with create_session() as session:
session.query(TaskInstance).delete()
dag = dagbag.get_dag('example_branch_operator')
task = dag.get_task(task_id='run_this_first')
ti = TaskInstance(task, DEFAULT_DATE, State.RUNNING)
session.add(ti)
session.commit()
zombies = [SimpleTaskInstance(ti)]
dag_file_processor.kill_zombies(dagbag, zombies)
mock_ti_handle_failure.assert_called_once_with(
mock.ANY,
conf.getboolean('core', 'unit_test_mode'),
mock.ANY
)
class TestSchedulerJob(unittest.TestCase):
def setUp(self):
clear_db_runs()
clear_db_pools()
clear_db_dags()
clear_db_sla_miss()
clear_db_errors()
# Speed up some tests by not running the tasks, just look at what we
# enqueue!
self.null_exec = MockExecutor()
@classmethod
def setUpClass(cls):
cls.dagbag = DagBag()
cls.old_val = None
if conf.has_option('core', 'load_examples'):
cls.old_val = conf.get('core', 'load_examples')
conf.set('core', 'load_examples', 'false')
@classmethod
def tearDownClass(cls):
if cls.old_val is not None:
conf.set('core', 'load_examples', cls.old_val)
else:
conf.remove_option('core', 'load_examples')
def test_is_alive(self):
job = SchedulerJob(None, heartrate=10, state=State.RUNNING)
self.assertTrue(job.is_alive())
job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(seconds=20)
self.assertTrue(job.is_alive())
job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(seconds=31)
self.assertFalse(job.is_alive())
# test because .seconds was used before instead of total_seconds
# internal repr of datetime is (days, seconds)
job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(days=1)
self.assertFalse(job.is_alive())
job.state = State.SUCCESS
job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(seconds=10)
self.assertFalse(job.is_alive(), "Completed jobs even with recent heartbeat should not be alive")
def run_single_scheduler_loop_with_no_dags(self, dags_folder):
"""
Utility function that runs a single scheduler loop without actually
changing/scheduling any dags. This is useful to simulate the other side effects of
running a scheduler loop, e.g. to see what parse errors there are in the
dags_folder.
:param dags_folder: the directory to traverse
:type directory: str
"""
scheduler = SchedulerJob(
executor=self.null_exec,
dag_id='this_dag_doesnt_exist', # We don't want to actually run anything
num_runs=1,
subdir=os.path.join(dags_folder))
scheduler.heartrate = 0
scheduler.run()
def _make_simple_dag_bag(self, dags):
return SimpleDagBag([SimpleDag(dag) for dag in dags])
def test_no_orphan_process_will_be_left(self):
empty_dir = mkdtemp()
current_process = psutil.Process()
old_children = current_process.children(recursive=True)
scheduler = SchedulerJob(subdir=empty_dir,
num_runs=1,
executor=MockExecutor(do_update=False))
scheduler.run()
shutil.rmtree(empty_dir)
# Remove potential noise created by previous tests.
current_children = set(current_process.children(recursive=True)) - set(
old_children)
self.assertFalse(current_children)
@mock.patch('airflow.jobs.scheduler_job.Stats.incr')
def test_process_executor_events(self, mock_stats_incr):
dag_id = "test_process_executor_events"
dag_id2 = "test_process_executor_events_2"
task_id_1 = 'dummy_task'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE)
dag2 = DAG(dag_id=dag_id2, start_date=DEFAULT_DATE)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
DummyOperator(dag=dag2, task_id=task_id_1)
dagbag1 = self._make_simple_dag_bag([dag])
dagbag2 = self._make_simple_dag_bag([dag2])
scheduler = SchedulerJob()
session = settings.Session()
ti1 = TaskInstance(task1, DEFAULT_DATE)
ti1.state = State.QUEUED
session.merge(ti1)
session.commit()
executor = MockExecutor(do_update=False)
executor.event_buffer[ti1.key] = State.FAILED
scheduler.executor = executor
# dag bag does not contain dag_id
scheduler._process_executor_events(simple_dag_bag=dagbag2)
ti1.refresh_from_db()
self.assertEqual(ti1.state, State.QUEUED)
# dag bag does contain dag_id
scheduler._process_executor_events(simple_dag_bag=dagbag1)
ti1.refresh_from_db()
self.assertEqual(ti1.state, State.FAILED)
ti1.state = State.SUCCESS
session.merge(ti1)
session.commit()
executor.event_buffer[ti1.key] = State.SUCCESS
scheduler._process_executor_events(simple_dag_bag=dagbag1)
ti1.refresh_from_db()
self.assertEqual(ti1.state, State.SUCCESS)
mock_stats_incr.assert_called_once_with('scheduler.tasks.killed_externally')
def test_execute_task_instances_is_paused_wont_execute(self):
dag_id = 'SchedulerJobTest.test_execute_task_instances_is_paused_wont_execute'
task_id_1 = 'dummy_task'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag)
ti1 = TaskInstance(task1, DEFAULT_DATE)
ti1.state = State.SCHEDULED
dr1.state = State.RUNNING
dagmodel = DagModel()
dagmodel.dag_id = dag_id
dagmodel.is_paused = True
session.merge(ti1)
session.merge(dr1)
session.add(dagmodel)
session.commit()
scheduler._execute_task_instances(dagbag)
ti1.refresh_from_db()
self.assertEqual(State.SCHEDULED, ti1.state)
def test_execute_task_instances_no_dagrun_task_will_execute(self):
"""
Tests that tasks without dagrun still get executed.
"""
dag_id = 'SchedulerJobTest.test_execute_task_instances_no_dagrun_task_will_execute'
task_id_1 = 'dummy_task'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dag_file_processor.create_dag_run(dag)
ti1 = TaskInstance(task1, DEFAULT_DATE)
ti1.state = State.SCHEDULED
ti1.execution_date = ti1.execution_date + datetime.timedelta(days=1)
session.merge(ti1)
session.commit()
scheduler._execute_task_instances(dagbag)
ti1.refresh_from_db()
self.assertEqual(State.QUEUED, ti1.state)
def test_execute_task_instances_backfill_tasks_wont_execute(self):
"""
Tests that backfill tasks won't get executed.
"""
dag_id = 'SchedulerJobTest.test_execute_task_instances_backfill_tasks_wont_execute'
task_id_1 = 'dummy_task'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag)
dr1.run_id = DagRunType.BACKFILL_JOB.value + '_blah'
ti1 = TaskInstance(task1, dr1.execution_date)
ti1.refresh_from_db()
ti1.state = State.SCHEDULED
session.merge(ti1)
session.merge(dr1)
session.commit()
self.assertTrue(dr1.is_backfill)
scheduler._execute_task_instances(dagbag)
ti1.refresh_from_db()
self.assertEqual(State.SCHEDULED, ti1.state)
def test_find_executable_task_instances_backfill_nodagrun(self):
dag_id = 'SchedulerJobTest.test_find_executable_task_instances_backfill_nodagrun'
task_id_1 = 'dummy'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=16)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag)
dr2 = dag_file_processor.create_dag_run(dag)
dr2.run_id = DagRunType.BACKFILL_JOB.value + 'asdf'
ti_no_dagrun = TaskInstance(task1, DEFAULT_DATE - datetime.timedelta(days=1))
ti_backfill = TaskInstance(task1, dr2.execution_date)
ti_with_dagrun = TaskInstance(task1, dr1.execution_date)
# ti_with_paused
ti_no_dagrun.state = State.SCHEDULED
ti_backfill.state = State.SCHEDULED
ti_with_dagrun.state = State.SCHEDULED
session.merge(dr2)
session.merge(ti_no_dagrun)
session.merge(ti_backfill)
session.merge(ti_with_dagrun)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(2, len(res))
res_keys = map(lambda x: x.key, res)
self.assertIn(ti_no_dagrun.key, res_keys)
self.assertIn(ti_with_dagrun.key, res_keys)
def test_find_executable_task_instances_pool(self):
dag_id = 'SchedulerJobTest.test_find_executable_task_instances_pool'
task_id_1 = 'dummy'
task_id_2 = 'dummydummy'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=16)
task1 = DummyOperator(dag=dag, task_id=task_id_1, pool='a')
task2 = DummyOperator(dag=dag, task_id=task_id_2, pool='b')
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag)
dr2 = dag_file_processor.create_dag_run(dag)
tis = ([
TaskInstance(task1, dr1.execution_date),
TaskInstance(task2, dr1.execution_date),
TaskInstance(task1, dr2.execution_date),
TaskInstance(task2, dr2.execution_date)
])
for ti in tis:
ti.state = State.SCHEDULED
session.merge(ti)
pool = Pool(pool='a', slots=1, description='haha')
pool2 = Pool(pool='b', slots=100, description='haha')
session.add(pool)
session.add(pool2)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
session.commit()
self.assertEqual(3, len(res))
res_keys = []
for ti in res:
res_keys.append(ti.key)
self.assertIn(tis[0].key, res_keys)
self.assertIn(tis[1].key, res_keys)
self.assertIn(tis[3].key, res_keys)
def test_find_executable_task_instances_in_default_pool(self):
set_default_pool_slots(1)
dag_id = 'SchedulerJobTest.test_find_executable_task_instances_in_default_pool'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE)
op1 = DummyOperator(dag=dag, task_id='dummy1')
op2 = DummyOperator(dag=dag, task_id='dummy2')
dagbag = self._make_simple_dag_bag([dag])
executor = MockExecutor(do_update=True)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob(executor=executor)
dr1 = dag_file_processor.create_dag_run(dag)
dr2 = dag_file_processor.create_dag_run(dag)
ti1 = TaskInstance(task=op1, execution_date=dr1.execution_date)
ti2 = TaskInstance(task=op2, execution_date=dr2.execution_date)
ti1.state = State.SCHEDULED
ti2.state = State.SCHEDULED
session = settings.Session()
session.merge(ti1)
session.merge(ti2)
session.commit()
# Two tasks w/o pool up for execution and our default pool size is 1
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(1, len(res))
ti2.state = State.RUNNING
session.merge(ti2)
session.commit()
# One task w/o pool up for execution and one task task running
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(0, len(res))
session.close()
def test_nonexistent_pool(self):
dag_id = 'SchedulerJobTest.test_nonexistent_pool'
task_id = 'dummy_wrong_pool'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=16)
task = DummyOperator(dag=dag, task_id=task_id, pool="this_pool_doesnt_exist")
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr = dag_file_processor.create_dag_run(dag)
ti = TaskInstance(task, dr.execution_date)
ti.state = State.SCHEDULED
session.merge(ti)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
session.commit()
self.assertEqual(0, len(res))
def test_find_executable_task_instances_none(self):
dag_id = 'SchedulerJobTest.test_find_executable_task_instances_none'
task_id_1 = 'dummy'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=16)
DummyOperator(dag=dag, task_id=task_id_1)
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dag_file_processor.create_dag_run(dag)
session.commit()
self.assertEqual(0, len(scheduler._find_executable_task_instances(
dagbag,
session=session)))
def test_find_executable_task_instances_concurrency(self):
dag_id = 'SchedulerJobTest.test_find_executable_task_instances_concurrency'
task_id_1 = 'dummy'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=2)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag)
dr2 = dag_file_processor.create_dag_run(dag)
dr3 = dag_file_processor.create_dag_run(dag)
ti1 = TaskInstance(task1, dr1.execution_date)
ti2 = TaskInstance(task1, dr2.execution_date)
ti3 = TaskInstance(task1, dr3.execution_date)
ti1.state = State.RUNNING
ti2.state = State.SCHEDULED
ti3.state = State.SCHEDULED
session.merge(ti1)
session.merge(ti2)
session.merge(ti3)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(1, len(res))
res_keys = map(lambda x: x.key, res)
self.assertIn(ti2.key, res_keys)
ti2.state = State.RUNNING
session.merge(ti2)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(0, len(res))
def test_find_executable_task_instances_concurrency_queued(self):
dag_id = 'SchedulerJobTest.test_find_executable_task_instances_concurrency_queued'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=3)
task1 = DummyOperator(dag=dag, task_id='dummy1')
task2 = DummyOperator(dag=dag, task_id='dummy2')
task3 = DummyOperator(dag=dag, task_id='dummy3')
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dag_run = dag_file_processor.create_dag_run(dag)
ti1 = TaskInstance(task1, dag_run.execution_date)
ti2 = TaskInstance(task2, dag_run.execution_date)
ti3 = TaskInstance(task3, dag_run.execution_date)
ti1.state = State.RUNNING
ti2.state = State.QUEUED
ti3.state = State.SCHEDULED
session.merge(ti1)
session.merge(ti2)
session.merge(ti3)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(1, len(res))
self.assertEqual(res[0].key, ti3.key)
def test_find_executable_task_instances_task_concurrency(self): # pylint: disable=too-many-statements
dag_id = 'SchedulerJobTest.test_find_executable_task_instances_task_concurrency'
task_id_1 = 'dummy'
task_id_2 = 'dummy2'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=16)
task1 = DummyOperator(dag=dag, task_id=task_id_1, task_concurrency=2)
task2 = DummyOperator(dag=dag, task_id=task_id_2)
dagbag = self._make_simple_dag_bag([dag])
executor = MockExecutor(do_update=True)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob(executor=executor)
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag)
dr2 = dag_file_processor.create_dag_run(dag)
dr3 = dag_file_processor.create_dag_run(dag)
ti1_1 = TaskInstance(task1, dr1.execution_date)
ti2 = TaskInstance(task2, dr1.execution_date)
ti1_1.state = State.SCHEDULED
ti2.state = State.SCHEDULED
session.merge(ti1_1)
session.merge(ti2)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(2, len(res))
ti1_1.state = State.RUNNING
ti2.state = State.RUNNING
ti1_2 = TaskInstance(task1, dr2.execution_date)
ti1_2.state = State.SCHEDULED
session.merge(ti1_1)
session.merge(ti2)
session.merge(ti1_2)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(1, len(res))
ti1_2.state = State.RUNNING
ti1_3 = TaskInstance(task1, dr3.execution_date)
ti1_3.state = State.SCHEDULED
session.merge(ti1_2)
session.merge(ti1_3)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(0, len(res))
ti1_1.state = State.SCHEDULED
ti1_2.state = State.SCHEDULED
ti1_3.state = State.SCHEDULED
session.merge(ti1_1)
session.merge(ti1_2)
session.merge(ti1_3)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(2, len(res))
ti1_1.state = State.RUNNING
ti1_2.state = State.SCHEDULED
ti1_3.state = State.SCHEDULED
session.merge(ti1_1)
session.merge(ti1_2)
session.merge(ti1_3)
session.commit()
res = scheduler._find_executable_task_instances(
dagbag,
session=session)
self.assertEqual(1, len(res))
def test_change_state_for_executable_task_instances_no_tis(self):
scheduler = SchedulerJob()
session = settings.Session()
res = scheduler._change_state_for_executable_task_instances(
[], session)
self.assertEqual(0, len(res))
def test_change_state_for_executable_task_instances_no_tis_with_state(self):
dag_id = 'SchedulerJobTest.test_change_state_for__no_tis_with_state'
task_id_1 = 'dummy'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=2)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
self._make_simple_dag_bag([dag])
scheduler = SchedulerJob()
session = settings.Session()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dr1 = dag_file_processor.create_dag_run(dag)
dr2 = dag_file_processor.create_dag_run(dag)
dr3 = dag_file_processor.create_dag_run(dag)
ti1 = TaskInstance(task1, dr1.execution_date)
ti2 = TaskInstance(task1, dr2.execution_date)
ti3 = TaskInstance(task1, dr3.execution_date)
ti1.state = State.RUNNING
ti2.state = State.RUNNING
ti3.state = State.RUNNING
session.merge(ti1)
session.merge(ti2)
session.merge(ti3)
session.commit()
res = scheduler._change_state_for_executable_task_instances(
[ti1, ti2, ti3],
session)
self.assertEqual(0, len(res))
def test_enqueue_task_instances_with_queued_state(self):
dag_id = 'SchedulerJobTest.test_enqueue_task_instances_with_queued_state'
task_id_1 = 'dummy'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag)
ti1 = TaskInstance(task1, dr1.execution_date)
session.merge(ti1)
session.commit()
with patch.object(BaseExecutor, 'queue_command') as mock_queue_command:
scheduler._enqueue_task_instances_with_queued_state(dagbag, [ti1])
assert mock_queue_command.called
def test_execute_task_instances_nothing(self):
dag_id = 'SchedulerJobTest.test_execute_task_instances_nothing'
task_id_1 = 'dummy'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=2)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
dagbag = SimpleDagBag([])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag)
ti1 = TaskInstance(task1, dr1.execution_date)
ti1.state = State.SCHEDULED
session.merge(ti1)
session.commit()
self.assertEqual(0, scheduler._execute_task_instances(dagbag))
def test_execute_task_instances(self):
dag_id = 'SchedulerJobTest.test_execute_task_instances'
task_id_1 = 'dummy_task'
task_id_2 = 'dummy_task_nonexistent_queue'
# important that len(tasks) is less than concurrency
# because before scheduler._execute_task_instances would only
# check the num tasks once so if concurrency was 3,
# we could execute arbitrarily many tasks in the second run
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=3)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
task2 = DummyOperator(dag=dag, task_id=task_id_2)
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
# create first dag run with 1 running and 1 queued
dr1 = dag_file_processor.create_dag_run(dag)
ti1 = TaskInstance(task1, dr1.execution_date)
ti2 = TaskInstance(task2, dr1.execution_date)
ti1.refresh_from_db()
ti2.refresh_from_db()
ti1.state = State.RUNNING
ti2.state = State.RUNNING
session.merge(ti1)
session.merge(ti2)
session.commit()
self.assertEqual(State.RUNNING, dr1.state)
self.assertEqual(
2,
DAG.get_num_task_instances(
dag_id, dag.task_ids, states=[State.RUNNING], session=session
)
)
# create second dag run
dr2 = dag_file_processor.create_dag_run(dag)
ti3 = TaskInstance(task1, dr2.execution_date)
ti4 = TaskInstance(task2, dr2.execution_date)
ti3.refresh_from_db()
ti4.refresh_from_db()
# manually set to scheduled so we can pick them up
ti3.state = State.SCHEDULED
ti4.state = State.SCHEDULED
session.merge(ti3)
session.merge(ti4)
session.commit()
self.assertEqual(State.RUNNING, dr2.state)
res = scheduler._execute_task_instances(dagbag)
# check that concurrency is respected
ti1.refresh_from_db()
ti2.refresh_from_db()
ti3.refresh_from_db()
ti4.refresh_from_db()
self.assertEqual(
3,
DAG.get_num_task_instances(
dag_id, dag.task_ids, states=[State.RUNNING, State.QUEUED], session=session
)
)
self.assertEqual(State.RUNNING, ti1.state)
self.assertEqual(State.RUNNING, ti2.state)
six.assertCountEqual(self, [State.QUEUED, State.SCHEDULED], [ti3.state, ti4.state])
self.assertEqual(1, res)
def test_execute_task_instances_limit(self):
dag_id = 'SchedulerJobTest.test_execute_task_instances_limit'
task_id_1 = 'dummy_task'
task_id_2 = 'dummy_task_2'
# important that len(tasks) is less than concurrency
# because before scheduler._execute_task_instances would only
# check the num tasks once so if concurrency was 3,
# we could execute arbitrarily many tasks in the second run
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, concurrency=16)
task1 = DummyOperator(dag=dag, task_id=task_id_1)
task2 = DummyOperator(dag=dag, task_id=task_id_2)
dagbag = self._make_simple_dag_bag([dag])
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
scheduler.max_tis_per_query = 3
session = settings.Session()
tis = []
for _ in range(0, 4):
dr = dag_file_processor.create_dag_run(dag)
ti1 = TaskInstance(task1, dr.execution_date)
ti2 = TaskInstance(task2, dr.execution_date)
tis.append(ti1)
tis.append(ti2)
ti1.refresh_from_db()
ti2.refresh_from_db()
ti1.state = State.SCHEDULED
ti2.state = State.SCHEDULED
session.merge(ti1)
session.merge(ti2)
session.commit()
res = scheduler._execute_task_instances(dagbag)
self.assertEqual(8, res)
for ti in tis:
ti.refresh_from_db()
self.assertEqual(State.QUEUED, ti.state)
@pytest.mark.xfail(condition=True, reason="The test is flaky with nondeterministic result")
def test_change_state_for_tis_without_dagrun(self):
dag1 = DAG(dag_id='test_change_state_for_tis_without_dagrun', start_date=DEFAULT_DATE)
DummyOperator(task_id='dummy', dag=dag1, owner='airflow')
DummyOperator(task_id='dummy_b', dag=dag1, owner='airflow')
dag2 = DAG(dag_id='test_change_state_for_tis_without_dagrun_dont_change', start_date=DEFAULT_DATE)
DummyOperator(task_id='dummy', dag=dag2, owner='airflow')
dag3 = DAG(dag_id='test_change_state_for_tis_without_dagrun_no_dagrun', start_date=DEFAULT_DATE)
DummyOperator(task_id='dummy', dag=dag3, owner='airflow')
session = settings.Session()
dr1 = dag1.create_dagrun(run_id=DagRunType.SCHEDULED.value,
state=State.RUNNING,
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
session=session)
dr2 = dag2.create_dagrun(run_id=DagRunType.SCHEDULED.value,
state=State.RUNNING,
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
session=session)
ti1a = dr1.get_task_instance(task_id='dummy', session=session)
ti1a.state = State.SCHEDULED
ti1b = dr1.get_task_instance(task_id='dummy_b', session=session)
ti1b.state = State.SUCCESS
session.commit()
ti2 = dr2.get_task_instance(task_id='dummy', session=session)
ti2.state = State.SCHEDULED
session.commit()
ti3 = TaskInstance(dag3.get_task('dummy'), DEFAULT_DATE)
ti3.state = State.SCHEDULED
session.merge(ti3)
session.commit()
dagbag = self._make_simple_dag_bag([dag1, dag2, dag3])
scheduler = SchedulerJob(num_runs=0)
scheduler._change_state_for_tis_without_dagrun(
simple_dag_bag=dagbag,
old_states=[State.SCHEDULED, State.QUEUED],
new_state=State.NONE,
session=session)
ti1a = dr1.get_task_instance(task_id='dummy', session=session)
ti1a.refresh_from_db(session=session)
self.assertEqual(ti1a.state, State.SCHEDULED)
ti1b = dr1.get_task_instance(task_id='dummy_b', session=session)
ti1b.refresh_from_db(session=session)
self.assertEqual(ti1b.state, State.SUCCESS)
ti2 = dr2.get_task_instance(task_id='dummy', session=session)
ti2.refresh_from_db(session=session)
self.assertEqual(ti2.state, State.SCHEDULED)
ti3.refresh_from_db(session=session)
self.assertEqual(ti3.state, State.NONE)
dr1.refresh_from_db(session=session)
dr1.state = State.FAILED
# why o why
session.merge(dr1)
session.commit()
scheduler._change_state_for_tis_without_dagrun(
simple_dag_bag=dagbag,
old_states=[State.SCHEDULED, State.QUEUED],
new_state=State.NONE,
session=session)
ti1a.refresh_from_db(session=session)
self.assertEqual(ti1a.state, State.SCHEDULED)
# don't touch ti1b
ti1b.refresh_from_db(session=session)
self.assertEqual(ti1b.state, State.SUCCESS)
# don't touch ti2
ti2.refresh_from_db(session=session)
self.assertEqual(ti2.state, State.SCHEDULED)
def test_change_state_for_tasks_failed_to_execute(self):
dag = DAG(
dag_id='dag_id',
start_date=DEFAULT_DATE)
task = DummyOperator(
task_id='task_id',
dag=dag,
owner='airflow')
# If there's no left over task in executor.queued_tasks, nothing happens
session = settings.Session()
scheduler_job = SchedulerJob()
mock_logger = mock.MagicMock()
test_executor = MockExecutor(do_update=False)
scheduler_job.executor = test_executor
scheduler_job._logger = mock_logger
scheduler_job._change_state_for_tasks_failed_to_execute() # pylint: disable=no-value-for-parameter
mock_logger.info.assert_not_called()
# Tasks failed to execute with QUEUED state will be set to SCHEDULED state.
session.query(TaskInstance).delete()
session.commit()
key = 'dag_id', 'task_id', DEFAULT_DATE, 1
test_executor.queued_tasks[key] = 'value'
ti = TaskInstance(task, DEFAULT_DATE)
ti.state = State.QUEUED
session.merge(ti) # pylint: disable=no-value-for-parameter
session.commit()
scheduler_job._change_state_for_tasks_failed_to_execute() # pylint: disable=no-value-for-parameter
ti.refresh_from_db()
self.assertEqual(State.SCHEDULED, ti.state)
# Tasks failed to execute with RUNNING state will not be set to SCHEDULED state.
session.query(TaskInstance).delete()
session.commit()
ti.state = State.RUNNING
session.merge(ti)
session.commit()
scheduler_job._change_state_for_tasks_failed_to_execute() # pylint: disable=no-value-for-parameter
ti.refresh_from_db()
self.assertEqual(State.RUNNING, ti.state)
def test_execute_helper_reset_orphaned_tasks(self):
session = settings.Session()
dag = DAG(
'test_execute_helper_reset_orphaned_tasks',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
with dag:
op1 = DummyOperator(task_id='op1')
dag.clear()
dr = dag.create_dagrun(run_id=DagRunType.SCHEDULED.value,
state=State.RUNNING,
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
session=session)
dr2 = dag.create_dagrun(run_id=DagRunType.BACKFILL_JOB.value,
state=State.RUNNING,
execution_date=DEFAULT_DATE + datetime.timedelta(1),
start_date=DEFAULT_DATE,
session=session)
ti = dr.get_task_instance(task_id=op1.task_id, session=session)
ti.state = State.SCHEDULED
ti2 = dr2.get_task_instance(task_id=op1.task_id, session=session)
ti2.state = State.SCHEDULED
session.commit()
processor = mock.MagicMock()
scheduler = SchedulerJob(num_runs=0)
executor = MockExecutor(do_update=False)
scheduler.executor = executor
scheduler.processor_agent = processor
scheduler._execute_helper()
ti = dr.get_task_instance(task_id=op1.task_id, session=session)
self.assertEqual(ti.state, State.NONE)
ti2 = dr2.get_task_instance(task_id=op1.task_id, session=session)
self.assertEqual(ti2.state, State.SCHEDULED)
@parameterized.expand([
[State.UP_FOR_RETRY, State.FAILED],
[State.QUEUED, State.NONE],
[State.SCHEDULED, State.NONE],
[State.UP_FOR_RESCHEDULE, State.NONE],
])
def test_execute_helper_should_change_state_for_tis_without_dagrun(self,
initial_task_state,
expected_task_state):
session = settings.Session()
dag = DAG(
'test_execute_helper_should_change_state_for_tis_without_dagrun',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
with dag:
op1 = DummyOperator(task_id='op1')
# Create DAG run with FAILED state
dag.clear()
dr = dag.create_dagrun(run_id=DagRunType.SCHEDULED.value,
state=State.FAILED,
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
session=session)
ti = dr.get_task_instance(task_id=op1.task_id, session=session)
ti.state = initial_task_state
session.commit()
# Create scheduler and mock calls to processor. Run duration is set
# to a high value to ensure loop is entered. Poll interval is 0 to
# avoid sleep. Done flag is set to true to exist the loop immediately.
scheduler = SchedulerJob(num_runs=0, processor_poll_interval=0)
executor = MockExecutor(do_update=False)
executor.queued_tasks
scheduler.executor = executor
processor = mock.MagicMock()
processor.harvest_simple_dags.return_value = [dag]
processor.done = True
scheduler.processor_agent = processor
scheduler._execute_helper()
ti = dr.get_task_instance(task_id=op1.task_id, session=session)
self.assertEqual(ti.state, expected_task_state)
@provide_session
def evaluate_dagrun(
self,
dag_id,
expected_task_states, # dict of task_id: state
dagrun_state,
run_kwargs=None,
advance_execution_date=False,
session=None): # pylint: disable=unused-argument
"""
Helper for testing DagRun states with simple two-task DAGS.
This is hackish: a dag run is created but its tasks are
run by a backfill.
"""
if run_kwargs is None:
run_kwargs = {}
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag = self.dagbag.get_dag(dag_id)
dr = dag_file_processor.create_dag_run(dag)
if advance_execution_date:
# run a second time to schedule a dagrun after the start_date
dr = dag_file_processor.create_dag_run(dag)
ex_date = dr.execution_date
for tid, state in expected_task_states.items():
if state != State.FAILED:
continue
self.null_exec.mock_task_fail(dag_id, tid, ex_date)
try:
dag.run(start_date=ex_date, end_date=ex_date, executor=self.null_exec, **run_kwargs)
except AirflowException:
pass
# test tasks
for task_id, expected_state in expected_task_states.items():
task = dag.get_task(task_id)
ti = TaskInstance(task, ex_date)
ti.refresh_from_db()
self.assertEqual(ti.state, expected_state)
# load dagrun
dr = DagRun.find(dag_id=dag_id, execution_date=ex_date)
dr = dr[0]
dr.dag = dag
self.assertEqual(dr.state, dagrun_state)
def test_dagrun_fail(self):
"""
DagRuns with one failed and one incomplete root task -> FAILED
"""
self.evaluate_dagrun(
dag_id='test_dagrun_states_fail',
expected_task_states={
'test_dagrun_fail': State.FAILED,
'test_dagrun_succeed': State.UPSTREAM_FAILED,
},
dagrun_state=State.FAILED)
def test_dagrun_success(self):
"""
DagRuns with one failed and one successful root task -> SUCCESS
"""
self.evaluate_dagrun(
dag_id='test_dagrun_states_success',
expected_task_states={
'test_dagrun_fail': State.FAILED,
'test_dagrun_succeed': State.SUCCESS,
},
dagrun_state=State.SUCCESS)
def test_dagrun_root_fail(self):
"""
DagRuns with one successful and one failed root task -> FAILED
"""
self.evaluate_dagrun(
dag_id='test_dagrun_states_root_fail',
expected_task_states={
'test_dagrun_succeed': State.SUCCESS,
'test_dagrun_fail': State.FAILED,
},
dagrun_state=State.FAILED)
def test_dagrun_root_fail_unfinished(self):
"""
DagRuns with one unfinished and one failed root task -> RUNNING
"""
# TODO: this should live in test_dagrun.py
# Run both the failed and successful tasks
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag_id = 'test_dagrun_states_root_fail_unfinished'
dag = self.dagbag.get_dag(dag_id)
dr = dag_file_processor.create_dag_run(dag)
self.null_exec.mock_task_fail(dag_id, 'test_dagrun_fail', DEFAULT_DATE)
with self.assertRaises(AirflowException):
dag.run(start_date=dr.execution_date, end_date=dr.execution_date, executor=self.null_exec)
# Mark the successful task as never having run since we want to see if the
# dagrun will be in a running state despite haveing an unfinished task.
with create_session() as session:
ti = dr.get_task_instance('test_dagrun_unfinished', session=session)
ti.state = State.NONE
session.commit()
dr.update_state()
self.assertEqual(dr.state, State.RUNNING)
def test_dagrun_root_after_dagrun_unfinished(self):
"""
DagRuns with one successful and one future root task -> SUCCESS
Noted: the DagRun state could be still in running state during CI.
"""
dag_id = 'test_dagrun_states_root_future'
dag = self.dagbag.get_dag(dag_id)
scheduler = SchedulerJob(
dag_id,
num_runs=1,
executor=self.null_exec,
subdir=dag.fileloc)
scheduler.run()
first_run = DagRun.find(dag_id=dag_id, execution_date=DEFAULT_DATE)[0]
ti_ids = [(ti.task_id, ti.state) for ti in first_run.get_task_instances()]
self.assertEqual(ti_ids, [('current', State.SUCCESS)])
self.assertIn(first_run.state, [State.SUCCESS, State.RUNNING])
def test_dagrun_deadlock_ignore_depends_on_past_advance_ex_date(self):
"""
DagRun is marked a success if ignore_first_depends_on_past=True
Test that an otherwise-deadlocked dagrun is marked as a success
if ignore_first_depends_on_past=True and the dagrun execution_date
is after the start_date.
"""
self.evaluate_dagrun(
dag_id='test_dagrun_states_deadlock',
expected_task_states={
'test_depends_on_past': State.SUCCESS,
'test_depends_on_past_2': State.SUCCESS,
},
dagrun_state=State.SUCCESS,
advance_execution_date=True,
run_kwargs=dict(ignore_first_depends_on_past=True))
def test_dagrun_deadlock_ignore_depends_on_past(self):
"""
Test that ignore_first_depends_on_past doesn't affect results
(this is the same test as
test_dagrun_deadlock_ignore_depends_on_past_advance_ex_date except
that start_date == execution_date so depends_on_past is irrelevant).
"""
self.evaluate_dagrun(
dag_id='test_dagrun_states_deadlock',
expected_task_states={
'test_depends_on_past': State.SUCCESS,
'test_depends_on_past_2': State.SUCCESS,
},
dagrun_state=State.SUCCESS,
run_kwargs=dict(ignore_first_depends_on_past=True))
def test_scheduler_start_date(self):
"""
Test that the scheduler respects start_dates, even when DAGS have run
"""
with create_session() as session:
dag_id = 'test_start_date_scheduling'
dag = self.dagbag.get_dag(dag_id)
dag.clear()
self.assertTrue(dag.start_date > datetime.datetime.utcnow())
scheduler = SchedulerJob(dag_id,
executor=self.null_exec,
subdir=dag.fileloc,
num_runs=1)
scheduler.run()
# zero tasks ran
self.assertEqual(
len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()), 0)
session.commit()
self.assertListEqual([], self.null_exec.sorted_tasks)
# previously, running this backfill would kick off the Scheduler
# because it would take the most recent run and start from there
# That behavior still exists, but now it will only do so if after the
# start date
bf_exec = MockExecutor()
backfill = BackfillJob(
executor=bf_exec,
dag=dag,
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE)
backfill.run()
# one task ran
self.assertEqual(
len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()), 1)
self.assertListEqual(
[
((dag.dag_id, 'dummy', DEFAULT_DATE, 1), State.SUCCESS),
],
bf_exec.sorted_tasks
)
session.commit()
scheduler = SchedulerJob(dag_id,
executor=self.null_exec,
subdir=dag.fileloc,
num_runs=1)
scheduler.run()
# still one task
self.assertEqual(
len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()), 1)
session.commit()
self.assertListEqual([], self.null_exec.sorted_tasks)
def test_scheduler_task_start_date(self):
"""
Test that the scheduler respects task start dates that are different from DAG start dates
"""
dag_id = 'test_task_start_date_scheduling'
dag = self.dagbag.get_dag(dag_id)
dag.clear()
scheduler = SchedulerJob(dag_id,
executor=self.null_exec,
subdir=os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py'),
num_runs=2)
scheduler.run()
session = settings.Session()
tiq = session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id)
ti1s = tiq.filter(TaskInstance.task_id == 'dummy1').all()
ti2s = tiq.filter(TaskInstance.task_id == 'dummy2').all()
self.assertEqual(len(ti1s), 0)
self.assertEqual(len(ti2s), 2)
for task in ti2s:
self.assertEqual(task.state, State.SUCCESS)
def test_scheduler_multiprocessing(self):
"""
Test that the scheduler can successfully queue multiple dags in parallel
"""
dag_ids = ['test_start_date_scheduling', 'test_dagrun_states_success']
for dag_id in dag_ids:
dag = self.dagbag.get_dag(dag_id)
dag.clear()
scheduler = SchedulerJob(dag_ids=dag_ids,
executor=self.null_exec,
subdir=os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py'),
num_runs=1)
scheduler.run()
# zero tasks ran
dag_id = 'test_start_date_scheduling'
session = settings.Session()
self.assertEqual(
len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()), 0)
def test_scheduler_verify_pool_full(self):
"""
Test task instances not queued when pool is full
"""
dag = DAG(
dag_id='test_scheduler_verify_pool_full',
start_date=DEFAULT_DATE)
DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow',
pool='test_scheduler_verify_pool_full')
session = settings.Session()
pool = Pool(pool='test_scheduler_verify_pool_full', slots=1)
session.add(pool)
orm_dag = DagModel(dag_id=dag.dag_id)
orm_dag.is_paused = False
session.merge(orm_dag)
session.commit()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob(executor=self.null_exec)
# Create 2 dagruns, which will create 2 task instances.
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
self.assertEqual(dr.execution_date, DEFAULT_DATE)
dr = dag_file_processor.create_dag_run(dag)
self.assertIsNotNone(dr)
dag_runs = DagRun.find(dag_id="test_scheduler_verify_pool_full")
task_instances_list = dag_file_processor._process_task_instances(dag, dag_runs=dag_runs)
self.assertEqual(len(task_instances_list), 2)
dagbag = self._make_simple_dag_bag([dag])
# Recreated part of the scheduler here, to kick off tasks -> executor
for ti_key in task_instances_list:
task = dag.get_task(ti_key[1])
ti = TaskInstance(task, ti_key[2])
# Task starts out in the scheduled state. All tasks in the
# scheduled state will be sent to the executor
ti.state = State.SCHEDULED
# Also save this task instance to the DB.
session.merge(ti)
session.commit()
self.assertEqual(len(scheduler.executor.queued_tasks), 0, "Check test pre-condition")
scheduler._execute_task_instances(dagbag, session=session)
self.assertEqual(len(scheduler.executor.queued_tasks), 1)
def test_scheduler_reschedule(self):
"""
Checks if tasks that are not taken up by the executor
get rescheduled
"""
executor = MockExecutor(do_update=False)
dagbag = DagBag(dag_folder=os.path.join(settings.DAGS_FOLDER, "no_dags.py"))
dagbag.dags.clear()
dag = DAG(
dag_id='test_scheduler_reschedule',
start_date=DEFAULT_DATE)
dummy_task = DummyOperator(
task_id='dummy',
dag=dag,
owner='airflow')
dag.clear()
dag.is_subdag = False
with create_session() as session:
orm_dag = DagModel(dag_id=dag.dag_id)
orm_dag.is_paused = False
session.merge(orm_dag)
dagbag.bag_dag(dag=dag, root_dag=dag, parent_dag=dag)
@mock.patch('airflow.models.DagBag', return_value=dagbag)
@mock.patch('airflow.models.DagBag.collect_dags')
def do_schedule(mock_dagbag, mock_collect_dags):
# Use a empty file since the above mock will return the
# expected DAGs. Also specify only a single file so that it doesn't
# try to schedule the above DAG repeatedly.
scheduler = SchedulerJob(num_runs=1,
executor=executor,
subdir=os.path.join(settings.DAGS_FOLDER,
"no_dags.py"))
scheduler.heartrate = 0
scheduler.run()
do_schedule() # pylint: disable=no-value-for-parameter
with create_session() as session:
ti = session.query(TaskInstance).filter(TaskInstance.dag_id == dag.dag_id,
TaskInstance.task_id == dummy_task.task_id).first()
self.assertEqual(0, len(executor.queued_tasks))
self.assertEqual(State.SCHEDULED, ti.state)
executor.do_update = True
do_schedule() # pylint: disable=no-value-for-parameter
self.assertEqual(0, len(executor.queued_tasks))
ti.refresh_from_db()
self.assertEqual(State.SUCCESS, ti.state)
def test_retry_still_in_executor(self):
"""
Checks if the scheduler does not put a task in limbo, when a task is retried
but is still present in the executor.
"""
executor = MockExecutor(do_update=False)
dagbag = DagBag(dag_folder=os.path.join(settings.DAGS_FOLDER, "no_dags.py"))
dagbag.dags.clear()
dag = DAG(
dag_id='test_retry_still_in_executor',
start_date=DEFAULT_DATE,
schedule_interval="@once")
dag_task1 = BashOperator(
task_id='test_retry_handling_op',
bash_command='exit 1',
retries=1,
dag=dag,
owner='airflow')
dag.clear()
dag.is_subdag = False
with create_session() as session:
orm_dag = DagModel(dag_id=dag.dag_id)
orm_dag.is_paused = False
session.merge(orm_dag)
dagbag.bag_dag(dag=dag, root_dag=dag, parent_dag=dag)
@mock.patch('airflow.models.DagBag', return_value=dagbag)
@mock.patch('airflow.models.DagBag.collect_dags')
def do_schedule(mock_dagbag, mock_collect_dags):
# Use a empty file since the above mock will return the
# expected DAGs. Also specify only a single file so that it doesn't
# try to schedule the above DAG repeatedly.
scheduler = SchedulerJob(num_runs=1,
executor=executor,
subdir=os.path.join(settings.DAGS_FOLDER,
"no_dags.py"))
scheduler.heartrate = 0
scheduler.run()
do_schedule() # pylint: disable=no-value-for-parameter
with create_session() as session:
ti = session.query(TaskInstance).filter(TaskInstance.dag_id == 'test_retry_still_in_executor',
TaskInstance.task_id == 'test_retry_handling_op').first()
ti.task = dag_task1
# Nothing should be left in the queued_tasks as we don't do update in MockExecutor yet,
# and the queued_tasks will be cleared by scheduler job.
self.assertEqual(0, len(executor.queued_tasks))
def run_with_error(ti, ignore_ti_state=False):
try:
ti.run(ignore_ti_state=ignore_ti_state)
except AirflowException:
pass
self.assertEqual(ti.try_number, 1)
# At this point, scheduler has tried to schedule the task once and
# heartbeated the executor once, which moved the state of the task from
# SCHEDULED to QUEUED and then to SCHEDULED, to fail the task execution
# we need to ignore the TaskInstance state as SCHEDULED is not a valid state to start
# executing task.
run_with_error(ti, ignore_ti_state=True)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
self.assertEqual(ti.try_number, 2)
with create_session() as session:
ti.refresh_from_db(lock_for_update=True, session=session)
ti.state = State.SCHEDULED
session.merge(ti)
# do schedule
do_schedule() # pylint: disable=no-value-for-parameter
# MockExecutor is not aware of the TaskInstance since we don't do update yet
# and no trace of this TaskInstance will be left in the executor.
self.assertFalse(executor.has_task(ti))
self.assertEqual(ti.state, State.SCHEDULED)
# To verify that task does get re-queued.
executor.do_update = True
do_schedule() # pylint: disable=no-value-for-parameter
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
@pytest.mark.xfail(condition=True, reason="This test is failing!")
def test_retry_handling_job(self):
"""
Integration test of the scheduler not accidentally resetting
the try_numbers for a task
"""
dag = self.dagbag.get_dag('test_retry_handling_job')
dag_task1 = dag.get_task("test_retry_handling_op")
dag.clear()
scheduler = SchedulerJob(dag_id=dag.dag_id,
num_runs=1)
scheduler.heartrate = 0
scheduler.run()
session = settings.Session()
ti = session.query(TaskInstance).filter(TaskInstance.dag_id == dag.dag_id,
TaskInstance.task_id == dag_task1.task_id).first()
# make sure the counter has increased
self.assertEqual(ti.try_number, 2)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
def test_dag_with_system_exit(self):
"""
Test to check that a DAG with a system.exit() doesn't break the scheduler.
"""
dag_id = 'exit_test_dag'
dag_ids = [dag_id]
dag_directory = os.path.join(settings.DAGS_FOLDER, "..", "dags_with_system_exit")
dag_file = os.path.join(dag_directory,
'b_test_scheduler_dags.py')
dagbag = DagBag(dag_folder=dag_file)
for dag_id in dag_ids:
dag = dagbag.get_dag(dag_id)
dag.clear()
scheduler = SchedulerJob(dag_ids=dag_ids,
executor=self.null_exec,
subdir=dag_directory,
num_runs=1)
scheduler.run()
with create_session() as session:
self.assertEqual(
len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()), 1)
def test_dag_get_active_runs(self):
"""
Test to check that a DAG returns its active runs
"""
now = timezone.utcnow()
six_hours_ago_to_the_hour = \
(now - datetime.timedelta(hours=6)).replace(minute=0, second=0, microsecond=0)
start_date = six_hours_ago_to_the_hour
dag_name1 = 'get_active_runs_test'
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': start_date
}
dag1 = DAG(dag_name1,
schedule_interval='* * * * *',
max_active_runs=1,
default_args=default_args
)
run_this_1 = DummyOperator(task_id='run_this_1', dag=dag1)
run_this_2 = DummyOperator(task_id='run_this_2', dag=dag1)
run_this_2.set_upstream(run_this_1)
run_this_3 = DummyOperator(task_id='run_this_3', dag=dag1)
run_this_3.set_upstream(run_this_2)
session = settings.Session()
orm_dag = DagModel(dag_id=dag1.dag_id)
session.merge(orm_dag)
session.commit()
session.close()
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag1.clear()
dr = dag_file_processor.create_dag_run(dag1)
# We had better get a dag run
self.assertIsNotNone(dr)
execution_date = dr.execution_date
running_dates = dag1.get_active_runs()
try:
running_date = running_dates[0]
except Exception: # pylint: disable=broad-except
running_date = 'Except'
self.assertEqual(execution_date, running_date, 'Running Date must match Execution Date')
def test_add_unparseable_file_before_sched_start_creates_import_error(self):
dags_folder = mkdtemp()
try:
unparseable_filename = os.path.join(dags_folder, TEMP_DAG_FILENAME)
with open(unparseable_filename, 'w') as unparseable_file:
unparseable_file.writelines(UNPARSEABLE_DAG_FILE_CONTENTS)
self.run_single_scheduler_loop_with_no_dags(dags_folder)
finally:
shutil.rmtree(dags_folder)
with create_session() as session:
import_errors = session.query(errors.ImportError).all()
self.assertEqual(len(import_errors), 1)
import_error = import_errors[0]
self.assertEqual(import_error.filename,
unparseable_filename)
self.assertEqual(import_error.stacktrace,
"invalid syntax ({}, line 1)".format(TEMP_DAG_FILENAME))
def test_add_unparseable_file_after_sched_start_creates_import_error(self):
dags_folder = mkdtemp()
try:
unparseable_filename = os.path.join(dags_folder, TEMP_DAG_FILENAME)
self.run_single_scheduler_loop_with_no_dags(dags_folder)
with open(unparseable_filename, 'w') as unparseable_file:
unparseable_file.writelines(UNPARSEABLE_DAG_FILE_CONTENTS)
self.run_single_scheduler_loop_with_no_dags(dags_folder)
finally:
shutil.rmtree(dags_folder)
with create_session() as session:
import_errors = session.query(errors.ImportError).all()
self.assertEqual(len(import_errors), 1)
import_error = import_errors[0]
self.assertEqual(import_error.filename,
unparseable_filename)
self.assertEqual(import_error.stacktrace,
"invalid syntax ({}, line 1)".format(TEMP_DAG_FILENAME))
def test_no_import_errors_with_parseable_dag(self):
try:
dags_folder = mkdtemp()
parseable_filename = os.path.join(dags_folder, TEMP_DAG_FILENAME)
with open(parseable_filename, 'w') as parseable_file:
parseable_file.writelines(PARSEABLE_DAG_FILE_CONTENTS)
self.run_single_scheduler_loop_with_no_dags(dags_folder)
finally:
shutil.rmtree(dags_folder)
with create_session() as session:
import_errors = session.query(errors.ImportError).all()
self.assertEqual(len(import_errors), 0)
def test_new_import_error_replaces_old(self):
try:
dags_folder = mkdtemp()
unparseable_filename = os.path.join(dags_folder, TEMP_DAG_FILENAME)
# Generate original import error
with open(unparseable_filename, 'w') as unparseable_file:
unparseable_file.writelines(UNPARSEABLE_DAG_FILE_CONTENTS)
self.run_single_scheduler_loop_with_no_dags(dags_folder)
# Generate replacement import error (the error will be on the second line now)
with open(unparseable_filename, 'w') as unparseable_file:
unparseable_file.writelines(
PARSEABLE_DAG_FILE_CONTENTS +
os.linesep +
UNPARSEABLE_DAG_FILE_CONTENTS)
self.run_single_scheduler_loop_with_no_dags(dags_folder)
finally:
shutil.rmtree(dags_folder)
session = settings.Session()
import_errors = session.query(errors.ImportError).all()
self.assertEqual(len(import_errors), 1)
import_error = import_errors[0]
self.assertEqual(import_error.filename,
unparseable_filename)
self.assertEqual(import_error.stacktrace,
"invalid syntax ({}, line 2)".format(TEMP_DAG_FILENAME))
def test_remove_error_clears_import_error(self):
try:
dags_folder = mkdtemp()
filename_to_parse = os.path.join(dags_folder, TEMP_DAG_FILENAME)
# Generate original import error
with open(filename_to_parse, 'w') as file_to_parse:
file_to_parse.writelines(UNPARSEABLE_DAG_FILE_CONTENTS)
self.run_single_scheduler_loop_with_no_dags(dags_folder)
# Remove the import error from the file
with open(filename_to_parse, 'w') as file_to_parse:
file_to_parse.writelines(
PARSEABLE_DAG_FILE_CONTENTS)
self.run_single_scheduler_loop_with_no_dags(dags_folder)
finally:
shutil.rmtree(dags_folder)
session = settings.Session()
import_errors = session.query(errors.ImportError).all()
self.assertEqual(len(import_errors), 0)
def test_remove_file_clears_import_error(self):
try:
dags_folder = mkdtemp()
filename_to_parse = os.path.join(dags_folder, TEMP_DAG_FILENAME)
# Generate original import error
with open(filename_to_parse, 'w') as file_to_parse:
file_to_parse.writelines(UNPARSEABLE_DAG_FILE_CONTENTS)
self.run_single_scheduler_loop_with_no_dags(dags_folder)
finally:
shutil.rmtree(dags_folder)
# Rerun the scheduler once the dag file has been removed
self.run_single_scheduler_loop_with_no_dags(dags_folder)
with create_session() as session:
import_errors = session.query(errors.ImportError).all()
self.assertEqual(len(import_errors), 0)
def test_list_py_file_paths(self):
"""
[JIRA-1357] Test the 'list_py_file_paths' function used by the
scheduler to list and load DAGs.
"""
detected_files = set()
expected_files = set()
# No_dags is empty, _invalid_ is ignored by .airflowignore
ignored_files = {
'no_dags.py',
'test_invalid_cron.py',
'test_zip_invalid_cron.zip',
'test_ignore_this.py',
}
for root, _, files in os.walk(TEST_DAG_FOLDER): # pylint: disable=too-many-nested-blocks
for file_name in files:
if file_name.endswith('.py') or file_name.endswith('.zip'):
if file_name not in ignored_files:
expected_files.add(
'{}/{}'.format(root, file_name))
for file_path in list_py_file_paths(TEST_DAG_FOLDER, include_examples=False):
detected_files.add(file_path)
self.assertEqual(detected_files, expected_files)
ignored_files = {
'helper.py',
}
example_dag_folder = airflow.example_dags.__path__[0]
for root, _, files in os.walk(example_dag_folder): # pylint: disable=too-many-nested-blocks
for file_name in files:
if file_name.endswith('.py') or file_name.endswith('.zip'):
if file_name not in ['__init__.py'] and file_name not in ignored_files:
expected_files.add(os.path.join(root, file_name))
detected_files.clear()
for file_path in list_py_file_paths(TEST_DAG_FOLDER, include_examples=True):
detected_files.add(file_path)
self.assertEqual(detected_files, expected_files)
def test_reset_orphaned_tasks_nothing(self):
"""Try with nothing. """
scheduler = SchedulerJob()
session = settings.Session()
self.assertEqual(
0, len(scheduler.reset_state_for_orphaned_tasks(session=session)))
def test_reset_orphaned_tasks_external_triggered_dag(self):
dag_id = 'test_reset_orphaned_tasks_external_triggered_dag'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, schedule_interval='@daily')
task_id = dag_id + '_task'
DummyOperator(task_id=task_id, dag=dag)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag, session=session)
ti = dr1.get_task_instances(session=session)[0]
dr1.state = State.RUNNING
ti.state = State.SCHEDULED
dr1.external_trigger = True
session.merge(ti)
session.merge(dr1)
session.commit()
reset_tis = scheduler.reset_state_for_orphaned_tasks(session=session)
self.assertEqual(1, len(reset_tis))
def test_reset_orphaned_tasks_backfill_dag(self):
dag_id = 'test_reset_orphaned_tasks_backfill_dag'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, schedule_interval='@daily')
task_id = dag_id + '_task'
DummyOperator(task_id=task_id, dag=dag)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag, session=session)
ti = dr1.get_task_instances(session=session)[0]
ti.state = State.SCHEDULED
dr1.state = State.RUNNING
dr1.run_id = DagRunType.BACKFILL_JOB.value + '_sdfsfdfsd'
session.merge(ti)
session.merge(dr1)
session.commit()
self.assertTrue(dr1.is_backfill)
self.assertEqual(0, len(scheduler.reset_state_for_orphaned_tasks(session=session)))
def test_reset_orphaned_tasks_specified_dagrun(self):
"""Try to reset when we specify a dagrun and ensure nothing else is."""
dag_id = 'test_reset_orphaned_tasks_specified_dagrun'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, schedule_interval='@daily')
task_id = dag_id + '_task'
DummyOperator(task_id=task_id, dag=dag)
scheduler = SchedulerJob()
session = settings.Session()
# make two dagruns, only reset for one
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dr1 = dag_file_processor.create_dag_run(dag)
dr2 = dag_file_processor.create_dag_run(dag)
dr1.state = State.SUCCESS
dr2.state = State.RUNNING
ti1 = dr1.get_task_instances(session=session)[0]
ti2 = dr2.get_task_instances(session=session)[0]
ti1.state = State.SCHEDULED
ti2.state = State.SCHEDULED
session.merge(ti1)
session.merge(ti2)
session.merge(dr1)
session.merge(dr2)
session.commit()
reset_tis = scheduler.reset_state_for_orphaned_tasks(filter_by_dag_run=dr2, session=session)
self.assertEqual(1, len(reset_tis))
ti1.refresh_from_db(session=session)
ti2.refresh_from_db(session=session)
self.assertEqual(State.SCHEDULED, ti1.state)
self.assertEqual(State.NONE, ti2.state)
def test_reset_orphaned_tasks_nonexistent_dagrun(self):
"""Make sure a task in an orphaned state is not reset if it has no dagrun. """
dag_id = 'test_reset_orphaned_tasks_nonexistent_dagrun'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, schedule_interval='@daily')
task_id = dag_id + '_task'
task = DummyOperator(task_id=task_id, dag=dag)
scheduler = SchedulerJob()
session = settings.Session()
ti = TaskInstance(task=task, execution_date=DEFAULT_DATE)
session.add(ti)
session.commit()
ti.refresh_from_db()
ti.state = State.SCHEDULED
session.merge(ti)
session.commit()
self.assertEqual(0, len(scheduler.reset_state_for_orphaned_tasks(session=session)))
def test_reset_orphaned_tasks_no_orphans(self):
dag_id = 'test_reset_orphaned_tasks_no_orphans'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, schedule_interval='@daily')
task_id = dag_id + '_task'
DummyOperator(task_id=task_id, dag=dag)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag)
dr1.state = State.RUNNING
tis = dr1.get_task_instances(session=session)
tis[0].state = State.RUNNING
session.merge(dr1)
session.merge(tis[0])
session.commit()
self.assertEqual(0, len(scheduler.reset_state_for_orphaned_tasks(session=session)))
tis[0].refresh_from_db()
self.assertEqual(State.RUNNING, tis[0].state)
def test_reset_orphaned_tasks_non_running_dagruns(self):
"""Ensure orphaned tasks with non-running dagruns are not reset."""
dag_id = 'test_reset_orphaned_tasks_non_running_dagruns'
dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, schedule_interval='@daily')
task_id = dag_id + '_task'
DummyOperator(task_id=task_id, dag=dag)
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
scheduler = SchedulerJob()
session = settings.Session()
dr1 = dag_file_processor.create_dag_run(dag)
dr1.state = State.SUCCESS
tis = dr1.get_task_instances(session=session)
self.assertEqual(1, len(tis))
tis[0].state = State.SCHEDULED
session.merge(dr1)
session.merge(tis[0])
session.commit()
self.assertEqual(0, len(scheduler.reset_state_for_orphaned_tasks(session=session)))
def test_reset_orphaned_tasks_with_orphans(self):
"""Create dagruns and esnure only ones with correct states are reset."""
prefix = 'scheduler_job_test_test_reset_orphaned_tasks'
states = [State.QUEUED, State.SCHEDULED, State.NONE, State.RUNNING, State.SUCCESS]
states_to_reset = [State.QUEUED, State.SCHEDULED, State.NONE]
dag = DAG(dag_id=prefix,
start_date=DEFAULT_DATE,
schedule_interval="@daily")
tasks = []
for i in range(len(states)):
task_id = "{}_task_{}".format(prefix, i)
task = DummyOperator(task_id=task_id, dag=dag)
tasks.append(task)
scheduler = SchedulerJob()
session = settings.Session()
# create dagruns
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dr1 = dag_file_processor.create_dag_run(dag)
dr2 = dag_file_processor.create_dag_run(dag)
dr1.state = State.RUNNING
dr2.state = State.SUCCESS
session.merge(dr1)
session.merge(dr2)
session.commit()
# create taskinstances and set states
dr1_tis = []
dr2_tis = []
for i, (task, state) in enumerate(zip(tasks, states)):
ti1 = TaskInstance(task, dr1.execution_date)
ti2 = TaskInstance(task, dr2.execution_date)
ti1.refresh_from_db()
ti2.refresh_from_db()
ti1.state = state
ti2.state = state
dr1_tis.append(ti1)
dr2_tis.append(ti2)
session.merge(ti1)
session.merge(ti2)
session.commit()
self.assertEqual(2, len(scheduler.reset_state_for_orphaned_tasks(session=session)))
for ti in dr1_tis + dr2_tis:
ti.refresh_from_db()
# running dagrun should be reset
for state, ti in zip(states, dr1_tis):
if state in states_to_reset:
self.assertIsNone(ti.state)
else:
self.assertEqual(state, ti.state)
# otherwise not
for state, ti in zip(states, dr2_tis):
self.assertEqual(state, ti.state)
for state, ti in zip(states, dr1_tis):
ti.state = state
session.commit()
scheduler.reset_state_for_orphaned_tasks(filter_by_dag_run=dr1, session=session)
# check same for dag_run version
for state, ti in zip(states, dr2_tis):
self.assertEqual(state, ti.state)
session.close()
def test_task_with_upstream_skip_process_task_instances():
"""
Test if _process_task_instances puts a task instance into SKIPPED state if any of its
upstream tasks are skipped according to TriggerRuleDep.
"""
clear_db_runs()
with DAG(
dag_id='test_task_with_upstream_skip_dag',
start_date=DEFAULT_DATE,
schedule_interval=None
) as dag:
dummy1 = DummyOperator(task_id='dummy1')
dummy2 = DummyOperator(task_id="dummy2")
dummy3 = DummyOperator(task_id="dummy3")
[dummy1, dummy2] >> dummy3
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag.clear()
dr = dag.create_dagrun(run_id=f"manual__{DEFAULT_DATE.isoformat()}",
state=State.RUNNING,
execution_date=DEFAULT_DATE)
assert dr is not None
with create_session() as session:
tis = {ti.task_id: ti for ti in dr.get_task_instances(session=session)}
# Set dummy1 to skipped and dummy2 to success. dummy3 remains as none.
tis[dummy1.task_id].state = State.SKIPPED
tis[dummy2.task_id].state = State.SUCCESS
assert tis[dummy3.task_id].state == State.NONE
dag_runs = DagRun.find(dag_id='test_task_with_upstream_skip_dag')
dag_file_processor._process_task_instances(dag, dag_runs=dag_runs)
with create_session() as session:
tis = {ti.task_id: ti for ti in dr.get_task_instances(session=session)}
assert tis[dummy1.task_id].state == State.SKIPPED
assert tis[dummy2.task_id].state == State.SUCCESS
# dummy3 should be skipped because dummy1 is skipped.
assert tis[dummy3.task_id].state == State.SKIPPED
| 38.240952 | 110 | 0.631718 |
7955ded745f4cf12f654d76be29680230fb0d132 | 20,027 | py | Python | argoverse/evaluation/detection/utils.py | venice-liong/argoverse-api | 61c9bdc87f33feb84f2d8766dded1614b56cd0fe | [
"MIT"
] | null | null | null | argoverse/evaluation/detection/utils.py | venice-liong/argoverse-api | 61c9bdc87f33feb84f2d8766dded1614b56cd0fe | [
"MIT"
] | null | null | null | argoverse/evaluation/detection/utils.py | venice-liong/argoverse-api | 61c9bdc87f33feb84f2d8766dded1614b56cd0fe | [
"MIT"
] | null | null | null | # <Copyright 2020, Argo AI, LLC. Released under the MIT license.>
"""Detection utilities for the Argoverse detection leaderboard.
Accepts detections (in Argoverse ground truth format) and ground truth labels
for computing evaluation metrics for 3d object detection. We have five different,
metrics: mAP, ATE, ASE, AOE, and DCS. A true positive for mAP is defined as the
highest confidence prediction within a specified Euclidean distance threshold
from a bird's-eye view. We prefer these metrics instead of IoU due to the
increased interpretability of the error modes in a set of detections.
"""
import copy
import logging
import os
from collections import defaultdict
from enum import Enum, auto
from pathlib import Path
from typing import DefaultDict, List, NamedTuple, Optional, Tuple
import matplotlib
import numpy as np
import pandas as pd
from scipy.spatial.distance import cdist
from scipy.spatial.transform import Rotation as R
from argoverse.data_loading.object_label_record import ObjectLabelRecord, read_label
from argoverse.data_loading.pose_loader import get_city_SE3_egovehicle_at_sensor_t, read_city_name
from argoverse.evaluation.detection.constants import (
COMPETITION_CLASSES,
MAX_NORMALIZED_AOE,
MAX_NORMALIZED_ASE,
MAX_NUM_BOXES,
MAX_SCALE_ERROR,
MAX_YAW_ERROR,
MIN_AP,
MIN_CDS,
N_TP_ERRORS,
)
from argoverse.map_representation.map_api import ArgoverseMap
from argoverse.utils.se3 import SE3
from argoverse.utils.transform import quat_argo2scipy_vectorized
matplotlib.use("Agg") # isort:skip
import matplotlib.pyplot as plt # isort:skip # noqa: E402
logger = logging.getLogger(__name__)
class AffFnType(Enum):
CENTER = auto()
class DistFnType(Enum):
TRANSLATION = auto()
SCALE = auto()
ORIENTATION = auto()
class InterpType(Enum):
ALL = auto()
class FilterMetric(Enum):
EUCLIDEAN = auto()
class DetectionCfg(NamedTuple):
"""Instantiates a DetectionCfg object for configuring a DetectionEvaluator.
Args:
affinity_threshs: Affinity thresholds for determining a true positive.
affinity_fn_type: Type of affinity function to be used for calculating average precision.
n_rec_samples: Number of recall points to sample uniformly in [0, 1]. Default to 101 recall samples.
tp_thresh: Center distance threshold for the true positive metrics (in meters).
dt_classes: Detection classes for evaluation.
dt_metric: Detection metric to use for filtering of both detections and ground truth annotations.
max_dt_range: The max distance (under a specific metric in meters) for a detection or ground truth to be
considered for evaluation.
save_figs: Flag to save figures.
tp_normalization_terms: Normalization constants for ATE, ASE, and AOE.
summary_default_vals: Evaluation summary default values.
eval_only_roi_instances: only use dets and ground truth that lie within region of interest during eval.
"""
affinity_threshs: List[float] = [0.5, 1.0, 2.0, 4.0] # Meters
affinity_fn_type: AffFnType = AffFnType.CENTER
n_rec_samples: int = 101
tp_thresh: float = 2.0 # Meters
dt_classes: List[str] = COMPETITION_CLASSES
dt_metric: FilterMetric = FilterMetric.EUCLIDEAN
max_dt_range: float = 100.0 # Meters
save_figs: bool = False
tp_normalization_terms: np.ndarray = np.array([tp_thresh, MAX_SCALE_ERROR, MAX_YAW_ERROR])
summary_default_vals: np.ndarray = np.array([MIN_AP, tp_thresh, MAX_NORMALIZED_ASE, MAX_NORMALIZED_AOE, MIN_CDS])
eval_only_roi_instances: bool = True
def accumulate(
dt_root_fpath: Path, gt_fpath: Path, cfg: DetectionCfg, avm: Optional[ArgoverseMap]
) -> Tuple[DefaultDict[str, np.ndarray], DefaultDict[str, int]]:
"""Accumulate the true/false positives (boolean flags) and true positive errors for each class.
Args:
dt_root_fpath: Detections root folder file path.
gt_fpath: Ground truth file path.
cfg: Detection configuration.
Returns:
cls_to_accum: Class to accumulated statistics dictionary of shape |C| -> (N, K + S) where C
is the number of detection classes, K is the number of true positive thresholds used for
AP computation, and S is the number of true positive errors.
cls_to_ninst: Mapping of shape |C| -> (1,) the class names to the number of instances in the ground
truth dataset.
"""
log_id = gt_fpath.parents[1].stem
logger.info(f"log_id = {log_id}")
ts = int(gt_fpath.stem.split("_")[-1])
dt_fpath = dt_root_fpath / f"{log_id}/per_sweep_annotations_amodal/" f"tracked_object_labels_{ts}.json"
dts = np.array(read_label(str(dt_fpath)))
gts = np.array(read_label(str(gt_fpath)))
if cfg.eval_only_roi_instances and avm is not None:
# go up 3 levels, because hierarchy is as follows:
# {gt_root_fpath}/{log_id}/per_sweep_annotations_amodal/{gt_root_fname}
gt_root_fpath = Path(gt_fpath).parents[2]
city_SE3_egovehicle = get_city_SE3_egovehicle_at_sensor_t(ts, str(gt_root_fpath), log_id)
if city_SE3_egovehicle is not None:
log_city_name = read_city_name(os.path.join(gt_root_fpath, log_id, "city_info.json"))
dts = filter_objs_to_roi(dts, avm, city_SE3_egovehicle, log_city_name)
gts = filter_objs_to_roi(gts, avm, city_SE3_egovehicle, log_city_name)
cls_to_accum = defaultdict(list)
cls_to_ninst = defaultdict(int)
for class_name in cfg.dt_classes:
dt_filtered = filter_instances(
dts,
class_name,
filter_metric=cfg.dt_metric,
max_detection_range=cfg.max_dt_range,
)
gt_filtered = filter_instances(
gts,
class_name,
filter_metric=cfg.dt_metric,
max_detection_range=cfg.max_dt_range,
)
gt_filtered = remove_duplicate_instances(gt_filtered, cfg)
logger.info(f"{dt_filtered.shape[0]} detections")
logger.info(f"{gt_filtered.shape[0]} ground truth")
if dt_filtered.shape[0] > 0:
ranked_detections, scores = rank(dt_filtered)
metrics = assign(ranked_detections, gt_filtered, cfg)
cls_to_accum[class_name] = np.hstack((metrics, scores))
cls_to_ninst[class_name] = gt_filtered.shape[0]
return cls_to_accum, cls_to_ninst
def remove_duplicate_instances(instances: np.ndarray, cfg: DetectionCfg) -> np.ndarray:
"""Remove any duplicate cuboids in ground truth.
Any ground truth cuboid of the same object class that shares the same centroid
with another is considered a duplicate instance.
We first form an (N,N) affinity matrix with entries equal to negative distance.
We then find rows in the affinity matrix with more than one zero, and
then for each such row, we choose only the first column index with value zero.
Args:
instances: array of length (M,), each entry is an ObjectLabelRecord
cfg: Detection configuration.
Returns:
array of length (N,) where N <= M, each entry is a unique ObjectLabelRecord
"""
if len(instances) == 0:
return instances
assert isinstance(instances, np.ndarray)
# create affinity matrix as inverse distance to other objects
affinity_matrix = compute_affinity_matrix(copy.deepcopy(instances), copy.deepcopy(instances), cfg.affinity_fn_type)
row_idxs, col_idxs = np.where(affinity_matrix == 0)
# find the indices where each row index appears for the first time
unique_row_idxs, unique_element_idxs = np.unique(row_idxs, return_index=True)
# choose the first instance in each column where repeat occurs
first_col_idxs = col_idxs[unique_element_idxs]
# eliminate redundant column indices
unique_ids = np.unique(first_col_idxs)
return instances[unique_ids]
def assign(dts: np.ndarray, gts: np.ndarray, cfg: DetectionCfg) -> np.ndarray:
"""Attempt assignment of each detection to a ground truth label.
Args:
dts: Detections of shape (N,).
gts: Ground truth labels of shape (M,).
cfg: Detection configuration.
Returns:
metrics: Matrix of true/false positive concatenated with true positive errors (N, K + S) where K is the number
of true positive thresholds used for AP computation and S is the number of true positive errors.
"""
# Ensure the number of boxes considered per class is at most `MAX_NUM_BOXES`.
if dts.shape[0] > MAX_NUM_BOXES:
dts = dts[:MAX_NUM_BOXES]
n_threshs = len(cfg.affinity_threshs)
metrics = np.zeros((dts.shape[0], n_threshs + N_TP_ERRORS))
# Set the true positive metrics to np.nan since error is undefined on false positives.
metrics[:, n_threshs : n_threshs + N_TP_ERRORS] = np.nan
if gts.shape[0] == 0:
return metrics
affinity_matrix = compute_affinity_matrix(dts, gts, cfg.affinity_fn_type)
# Get the GT label for each max-affinity GT label, detection pair.
gt_matches = affinity_matrix.argmax(axis=1)[np.newaxis, :]
# The affinity matrix is an N by M matrix of the detections and ground truth labels respectively.
# We want to take the corresponding affinity for each of the initial assignments using `gt_matches`.
# The following line grabs the max affinity for each detection to a ground truth label.
affinities = np.take_along_axis(affinity_matrix.T, gt_matches, axis=0).squeeze(0)
# Find the indices of the "first" detection assigned to each GT.
unique_gt_matches, unique_dt_matches = np.unique(gt_matches, return_index=True)
for i, thresh in enumerate(cfg.affinity_threshs):
# `tp_mask` may need to be defined differently with other affinities.
tp_mask = affinities[unique_dt_matches] > -thresh
metrics[unique_dt_matches, i] = tp_mask
# Only compute true positive error when `thresh` is equal to the tp threshold.
is_tp_thresh = thresh == cfg.tp_thresh
# Ensure that there are true positives of the respective class in the frame.
has_true_positives = np.count_nonzero(tp_mask) > 0
if is_tp_thresh and has_true_positives:
dt_tp_indices = unique_dt_matches[tp_mask]
gt_tp_indices = unique_gt_matches[tp_mask]
# Form DataFrame of shape (N, D) where D is the number of attributes in `ObjectLabelRecord`.
dt_df = pd.DataFrame([dt.__dict__ for dt in dts[dt_tp_indices]])
gt_df = pd.DataFrame([gt.__dict__ for gt in gts[gt_tp_indices]])
trans_error = dist_fn(dt_df, gt_df, DistFnType.TRANSLATION)
scale_error = dist_fn(dt_df, gt_df, DistFnType.SCALE)
orient_error = dist_fn(dt_df, gt_df, DistFnType.ORIENTATION)
metrics[dt_tp_indices, n_threshs : n_threshs + N_TP_ERRORS] = np.vstack(
(trans_error, scale_error, orient_error)
).T
return metrics
def filter_objs_to_roi(
instances: np.ndarray, avm: ArgoverseMap, city_SE3_egovehicle: SE3, city_name: str
) -> np.ndarray:
"""Filter objects to the region of interest (5 meter dilation of driveable area).
We ignore instances outside of region of interest (ROI) during evaluation.
Args:
instances: Numpy array of shape (N,) with ObjectLabelRecord entries
avm: Argoverse map object
city_SE3_egovehicle: pose of egovehicle within city map at time of sweep
city_name: name of city where log was captured
Returns:
instances_roi: objects with any of 4 cuboid corners located within ROI
"""
# for each cuboid, get its 4 corners in the egovehicle frame
corners_egoframe = np.vstack([dt.as_2d_bbox() for dt in instances])
corners_cityframe = city_SE3_egovehicle.transform_point_cloud(corners_egoframe)
corner_within_roi = avm.get_raster_layer_points_boolean(corners_cityframe, city_name, "roi")
# check for each cuboid if any of its 4 corners lies within the ROI
is_within_roi = corner_within_roi.reshape(-1, 4).any(axis=1)
instances_roi = instances[is_within_roi]
return instances_roi
def filter_instances(
instances: List[ObjectLabelRecord],
target_class_name: str,
filter_metric: FilterMetric,
max_detection_range: float,
) -> np.ndarray:
"""Filter object instances based on a set of conditions (class name and distance from egovehicle).
Args:
instances: Instances to be filtered (N,), either detections or ground truth object labels
target_class_name: Name of the class of interest.
filter_metric: Range metric used for filtering.
max_detection_range: Maximum distance for range filtering.
Returns:
Filtered annotations.
"""
instances = np.array([instance for instance in instances if instance.label_class == target_class_name])
if filter_metric == FilterMetric.EUCLIDEAN:
centers = np.array([dt.translation for dt in instances])
filtered_instances = np.array([])
if centers.shape[0] > 0:
dt_dists = np.linalg.norm(centers, axis=1)
filtered_instances = instances[dt_dists < max_detection_range]
else:
raise NotImplementedError("This filter metric is not implemented!")
return filtered_instances
def rank(dts: List[ObjectLabelRecord]) -> Tuple[np.ndarray, np.ndarray]:
"""Get the rankings for the detections, according to detector confidence.
Args:
dts: Detections (N,).
Returns:
ranks: Ranking for the detections (N,).
scores: Detection scores (N,).
"""
scores = np.array([dt.score for dt in dts])
ranks = scores.argsort()[::-1]
ranked_detections = dts[ranks]
return ranked_detections, scores[:, np.newaxis]
def interp(prec: np.ndarray, method: InterpType = InterpType.ALL) -> np.ndarray:
"""Interpolate the precision over all recall levels. See equation 2 in
http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.167.6629&rep=rep1&type=pdf
for more information.
Args:
prec: Precision at all recall levels (N,).
method: Accumulation method.
Returns:
prec_interp: Interpolated precision at all recall levels (N,).
"""
if method == InterpType.ALL:
prec_interp = np.maximum.accumulate(prec[::-1])[::-1]
else:
raise NotImplementedError("This interpolation method is not implemented!")
return prec_interp
def compute_affinity_matrix(
dts: List[ObjectLabelRecord], gts: List[ObjectLabelRecord], metric: AffFnType
) -> np.ndarray:
"""Calculate the affinity matrix between detections and ground truth labels,
using a specified affinity function type.
Args:
dts: Detections (N,).
gts: Ground truth labels (M,).
metric: Affinity metric type.
Returns:
sims: Affinity scores between detections and ground truth annotations (N, M).
"""
if metric == AffFnType.CENTER:
dt_centers = np.array([dt.translation for dt in dts])
gt_centers = np.array([gt.translation for gt in gts])
sims = -cdist(dt_centers, gt_centers)
else:
raise NotImplementedError("This similarity metric is not implemented!")
return sims
def calc_ap(gt_ranked: np.ndarray, recalls_interp: np.ndarray, ninst: int) -> Tuple[float, np.ndarray]:
"""Compute precision and recall, interpolated over n fixed recall points.
Args:
gt_ranked: Ground truths, ranked by confidence.
recalls_interp: Interpolated recall values.
ninst: Number of instances of this class.
Returns:
avg_precision: Average precision.
precisions_interp: Interpolated precision values.
"""
tp = gt_ranked
cumulative_tp = np.cumsum(tp, dtype=np.int)
cumulative_fp = np.cumsum(~tp, dtype=np.int)
cumulative_fn = ninst - cumulative_tp
precisions = cumulative_tp / (cumulative_tp + cumulative_fp + np.finfo(float).eps)
recalls = cumulative_tp / (cumulative_tp + cumulative_fn)
precisions = interp(precisions)
precisions_interp = np.interp(recalls_interp, recalls, precisions, right=0)
avg_precision = precisions_interp.mean()
return avg_precision, precisions_interp
def dist_fn(dts: pd.DataFrame, gts: pd.DataFrame, metric: DistFnType) -> np.ndarray:
"""Distance functions between detections and ground truth.
Args:
dts: Detections (N, D) where D is the number of attributes in `ObjectLabelRecord`.
gts: Ground truth labels (N, D) where D is the number of attributes in `ObjectLabelRecord`.
metric: Distance function type.
Returns:
Distance between the detections and ground truth, using the provided metric (N,).
"""
if metric == DistFnType.TRANSLATION:
dt_centers = np.vstack(dts["translation"].array)
gt_centers = np.vstack(gts["translation"].array)
trans_errors = np.linalg.norm(dt_centers - gt_centers, axis=1)
return trans_errors
elif metric == DistFnType.SCALE:
dt_dims = dts[["width", "length", "height"]]
gt_dims = gts[["width", "length", "height"]]
scale_errors = 1 - iou_aligned_3d(dt_dims, gt_dims)
return scale_errors
elif metric == DistFnType.ORIENTATION:
# Re-order quaternions to go from Argoverse format to scipy format, then the third euler angle (z) is yaw.
dt_quats = np.vstack(dts["quaternion"].array)
dt_yaws = R.from_quat(quat_argo2scipy_vectorized(dt_quats)).as_euler("xyz")[:, 2]
gt_quats = np.vstack(gts["quaternion"].array)
gt_yaws = R.from_quat(quat_argo2scipy_vectorized(gt_quats)).as_euler("xyz")[:, 2]
orientation_errors = wrap_angle(dt_yaws - gt_yaws)
return orientation_errors
else:
raise NotImplementedError("This distance metric is not implemented!")
def iou_aligned_3d(dt_dims: pd.DataFrame, gt_dims: pd.DataFrame) -> np.ndarray:
"""Calculate the 3d, axis-aligned (vertical axis alignment) intersection-over-union (IoU)
between the detections and the ground truth labels. Both objects are aligned to their
+x axis and their centroids are placed at the origin before computation of the IoU.
Args:
dt_dims: Detections (N, 3).
gt_dims: Ground truth labels (N, 3).
Returns:
Intersection-over-union between the detections and their assigned ground
truth labels (N,).
"""
inter = np.minimum(dt_dims, gt_dims).prod(axis=1)
union = np.maximum(dt_dims, gt_dims).prod(axis=1)
return (inter / union).values
def wrap_angle(angles: np.ndarray, period: float = np.pi) -> np.ndarray:
"""Map angles (in radians) from domain [-∞, ∞] to [0, π). This function is
the inverse of `np.unwrap`.
Returns:
Angles (in radians) mapped to the interval [0, π).
"""
# Map angles to [0, ∞].
angles = np.abs(angles)
# Calculate floor division and remainder simultaneously.
divs, mods = np.divmod(angles, period)
# Select angles which exceed specified period.
angle_complement_mask = np.nonzero(divs)
# Take set complement of `mods` w.r.t. the set [0, π].
# `mods` must be nonzero, thus the image is the interval [0, π).
angles[angle_complement_mask] = period - mods[angle_complement_mask]
return angles
def plot(rec_interp: np.ndarray, prec_interp: np.ndarray, cls_name: str, figs_fpath: Path) -> Path:
"""Plot and save the precision recall curve.
Args:
rec_interp: Interpolated recall data of shape (N,).
prec_interp: Interpolated precision data of shape (N,).
cls_name: Class name.
figs_fpath: Path to the folder which will contain the output figures.
Returns:
dst_fpath: Plot file path.
"""
plt.plot(rec_interp, prec_interp)
plt.title("PR Curve")
plt.xlabel("Recall")
plt.ylabel("Precision")
dst_fpath = Path(f"{figs_fpath}/{cls_name}.png")
plt.savefig(dst_fpath)
plt.close()
return dst_fpath
| 40.054 | 119 | 0.702352 |
7955ded7fe8612798256b6cf763cd0e64b5573c1 | 2,697 | py | Python | src/streamlink/plugins/tv4play.py | Billy2011/streamlink | 5f99ec52e0a9c315aeee00b96287edc45adaccd3 | [
"BSD-2-Clause"
] | 1 | 2019-09-14T10:19:47.000Z | 2019-09-14T10:19:47.000Z | src/streamlink/plugins/tv4play.py | Billy2011/streamlink | 5f99ec52e0a9c315aeee00b96287edc45adaccd3 | [
"BSD-2-Clause"
] | 1 | 2018-07-12T18:18:05.000Z | 2018-07-12T18:18:05.000Z | src/streamlink/plugins/tv4play.py | Billy2011/streamlink | 5f99ec52e0a9c315aeee00b96287edc45adaccd3 | [
"BSD-2-Clause"
] | null | null | null | """
$description Live TV channels and video on-demand service from TV4, a Swedish free-to-air broadcaster.
$url tv4play.se
$url fotbollskanalen.se
$type live, vod
$region Sweden
$notes Only non-premium streams are supported
"""
import logging
import re
from streamlink.compat import urljoin
from streamlink.plugin import Plugin, PluginError, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(r"""
https?://(?:www\.)?
(?:
tv4play\.se/program/[^?/]+/[^?/]+
|
fotbollskanalen\.se/video
)
/(?P<video_id>\d+)
""", re.VERBOSE))
class TV4Play(Plugin):
video_id = None
api_url = "https://playback-api.b17g.net"
api_assets = urljoin(api_url, "/asset/{0}")
_meta_schema = validate.Schema(
{
"metadata": {
"title": validate.text
},
"mediaUri": validate.text
}
)
@property
def get_video_id(self):
if self.video_id is None:
self.video_id = self.match.group("video_id")
log.debug("Found video ID: {0}".format(self.video_id))
return self.video_id
def get_metadata(self):
params = {
"device": "browser",
"protocol": "hls",
"service": "tv4",
}
try:
res = self.session.http.get(
self.api_assets.format(self.get_video_id),
params=params
)
except Exception as e:
if "404 Client Error" in str(e):
raise PluginError("This Video is not available")
raise e
log.debug("Found metadata")
metadata = self.session.http.json(res, schema=self._meta_schema)
self.title = metadata["metadata"]["title"]
return metadata
def get_title(self):
if self.title is None:
self.get_metadata()
return self.title
def _get_streams(self):
metadata = self.get_metadata()
try:
res = self.session.http.get(urljoin(self.api_url, metadata["mediaUri"]))
except Exception as e:
if "401 Client Error" in str(e):
raise PluginError("This Video is not available in your country")
raise e
log.debug("Found stream data")
data = self.session.http.json(res)
hls_url = data["playbackItem"]["manifestUrl"]
log.debug("URL={0}".format(hls_url))
for s in HLSStream.parse_variant_playlist(self.session,
hls_url).items():
yield s
__plugin__ = TV4Play
| 27.804124 | 102 | 0.579162 |
7955def331c9fca840f719f3c0e832bb6e237649 | 749 | py | Python | 2020/src/j2.py | Kytabyte/CCC | 6f98e81c7fef38bf70e68188db38863cc0cba2f4 | [
"Apache-2.0"
] | 8 | 2020-12-13T01:29:14.000Z | 2022-02-15T09:02:27.000Z | 2020/src/j2.py | Kytabyte/CCC | 6f98e81c7fef38bf70e68188db38863cc0cba2f4 | [
"Apache-2.0"
] | null | null | null | 2020/src/j2.py | Kytabyte/CCC | 6f98e81c7fef38bf70e68188db38863cc0cba2f4 | [
"Apache-2.0"
] | 2 | 2021-02-05T19:59:33.000Z | 2021-09-14T23:25:52.000Z | import collections
import itertools
import functools
import math
import re
import bisect
import random
rint = lambda: int(input())
rstr = lambda: input()
rints = lambda: list(map(int, input().split()))
rstrs = lambda: input().split()
wmat = lambda n, mat, sep: '{}\n{}'.format(n, '\n'.join(sep.join(map(str, row)) for row in mat))
warr = lambda n, arr, sep: '{}\n{}'.format(n, sep.join(map(str, arr)))
wl = lambda sep, *arr: sep.join(map(str, arr))
def main():
P = rint()
N = rint()
R = rint()
if R == 1:
print(P // N)
return
total = 0
for i in itertools.count():
total += N
N *= R
if total > P:
print(i)
break
if __name__ == '__main__':
main()
| 19.205128 | 96 | 0.555407 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.