hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71df2573cd5a2a55288807df292984fea16b131 | 4,046 | py | Python | lib/python2.7/site-packages/wrapt/arguments.py | nishaero/wifi-userseg-ryu | 1132f2c813b79eff755bdd1a9e73e7ad3980af7c | [
"Apache-2.0"
] | 99 | 2015-02-27T02:21:41.000Z | 2021-02-09T15:13:25.000Z | lib/python2.7/site-packages/wrapt/arguments.py | nishaero/wifi-userseg-ryu | 1132f2c813b79eff755bdd1a9e73e7ad3980af7c | [
"Apache-2.0"
] | 114 | 2015-01-16T15:06:49.000Z | 2018-04-13T20:29:18.000Z | lib/python2.7/site-packages/wrapt/arguments.py | nishaero/wifi-userseg-ryu | 1132f2c813b79eff755bdd1a9e73e7ad3980af7c | [
"Apache-2.0"
] | 88 | 2016-11-27T02:16:11.000Z | 2020-02-28T05:10:26.000Z | # This is a copy of the inspect.getcallargs() function from Python 2.7
# so we can provide it for use under Python 2.6. As the code in this
# file derives from the Python distribution, it falls under the version
# of the PSF license used for Python 2.7.
from inspect import getargspec, ismethod
def getcallargs(func, *positional, **named):
"""Get the mapping of arguments to values.
A dict is returned, with keys the function argument names (including the
names of the * and ** arguments, if any), and values the respective bound
values from 'positional' and 'named'."""
args, varargs, varkw, defaults = getargspec(func)
f_name = func.__name__
arg2value = {}
# The following closures are basically because of tuple parameter unpacking.
assigned_tuple_params = []
def assign(arg, value):
if isinstance(arg, str):
arg2value[arg] = value
else:
assigned_tuple_params.append(arg)
value = iter(value)
for i, subarg in enumerate(arg):
try:
subvalue = next(value)
except StopIteration:
raise ValueError('need more than %d %s to unpack' %
(i, 'values' if i > 1 else 'value'))
assign(subarg,subvalue)
try:
next(value)
except StopIteration:
pass
else:
raise ValueError('too many values to unpack')
def is_assigned(arg):
if isinstance(arg,str):
return arg in arg2value
return arg in assigned_tuple_params
if ismethod(func) and func.im_self is not None:
# implicit 'self' (or 'cls' for classmethods) argument
positional = (func.im_self,) + positional
num_pos = len(positional)
num_total = num_pos + len(named)
num_args = len(args)
num_defaults = len(defaults) if defaults else 0
for arg, value in zip(args, positional):
assign(arg, value)
if varargs:
if num_pos > num_args:
assign(varargs, positional[-(num_pos-num_args):])
else:
assign(varargs, ())
elif 0 < num_args < num_pos:
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at most' if defaults else 'exactly', num_args,
'arguments' if num_args > 1 else 'argument', num_total))
elif num_args == 0 and num_total:
if varkw:
if num_pos:
# XXX: We should use num_pos, but Python also uses num_total:
raise TypeError('%s() takes exactly 0 arguments '
'(%d given)' % (f_name, num_total))
else:
raise TypeError('%s() takes no arguments (%d given)' %
(f_name, num_total))
for arg in args:
if isinstance(arg, str) and arg in named:
if is_assigned(arg):
raise TypeError("%s() got multiple values for keyword "
"argument '%s'" % (f_name, arg))
else:
assign(arg, named.pop(arg))
if defaults: # fill in any missing values with the defaults
for arg, value in zip(args[-num_defaults:], defaults):
if not is_assigned(arg):
assign(arg, value)
if varkw:
assign(varkw, named)
elif named:
unexpected = next(iter(named))
if isinstance(unexpected, unicode):
unexpected = unexpected.encode(sys.getdefaultencoding(), 'replace')
raise TypeError("%s() got an unexpected keyword argument '%s'" %
(f_name, unexpected))
unassigned = num_args - len([arg for arg in args if is_assigned(arg)])
if unassigned:
num_required = num_args - num_defaults
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at least' if defaults else 'exactly', num_required,
'arguments' if num_required > 1 else 'argument', num_total))
return arg2value
| 42.145833 | 80 | 0.578843 |
from inspect import getargspec, ismethod
def getcallargs(func, *positional, **named):
args, varargs, varkw, defaults = getargspec(func)
f_name = func.__name__
arg2value = {}
assigned_tuple_params = []
def assign(arg, value):
if isinstance(arg, str):
arg2value[arg] = value
else:
assigned_tuple_params.append(arg)
value = iter(value)
for i, subarg in enumerate(arg):
try:
subvalue = next(value)
except StopIteration:
raise ValueError('need more than %d %s to unpack' %
(i, 'values' if i > 1 else 'value'))
assign(subarg,subvalue)
try:
next(value)
except StopIteration:
pass
else:
raise ValueError('too many values to unpack')
def is_assigned(arg):
if isinstance(arg,str):
return arg in arg2value
return arg in assigned_tuple_params
if ismethod(func) and func.im_self is not None:
positional = (func.im_self,) + positional
num_pos = len(positional)
num_total = num_pos + len(named)
num_args = len(args)
num_defaults = len(defaults) if defaults else 0
for arg, value in zip(args, positional):
assign(arg, value)
if varargs:
if num_pos > num_args:
assign(varargs, positional[-(num_pos-num_args):])
else:
assign(varargs, ())
elif 0 < num_args < num_pos:
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at most' if defaults else 'exactly', num_args,
'arguments' if num_args > 1 else 'argument', num_total))
elif num_args == 0 and num_total:
if varkw:
if num_pos:
raise TypeError('%s() takes exactly 0 arguments '
'(%d given)' % (f_name, num_total))
else:
raise TypeError('%s() takes no arguments (%d given)' %
(f_name, num_total))
for arg in args:
if isinstance(arg, str) and arg in named:
if is_assigned(arg):
raise TypeError("%s() got multiple values for keyword "
"argument '%s'" % (f_name, arg))
else:
assign(arg, named.pop(arg))
if defaults:
for arg, value in zip(args[-num_defaults:], defaults):
if not is_assigned(arg):
assign(arg, value)
if varkw:
assign(varkw, named)
elif named:
unexpected = next(iter(named))
if isinstance(unexpected, unicode):
unexpected = unexpected.encode(sys.getdefaultencoding(), 'replace')
raise TypeError("%s() got an unexpected keyword argument '%s'" %
(f_name, unexpected))
unassigned = num_args - len([arg for arg in args if is_assigned(arg)])
if unassigned:
num_required = num_args - num_defaults
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at least' if defaults else 'exactly', num_required,
'arguments' if num_required > 1 else 'argument', num_total))
return arg2value
| true | true |
f71df272eade1b9d37dd1ad57e3ad4edcedf453e | 4,349 | py | Python | tensorflow/examples/learn/text_classification_cnn.py | toptaldev92/tensorflow | 1fd1f65d1b0896149e44a1f105267c27994010d9 | [
"Apache-2.0"
] | null | null | null | tensorflow/examples/learn/text_classification_cnn.py | toptaldev92/tensorflow | 1fd1f65d1b0896149e44a1f105267c27994010d9 | [
"Apache-2.0"
] | null | null | null | tensorflow/examples/learn/text_classification_cnn.py | toptaldev92/tensorflow | 1fd1f65d1b0896149e44a1f105267c27994010d9 | [
"Apache-2.0"
] | 1 | 2021-04-22T09:17:52.000Z | 2021-04-22T09:17:52.000Z | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example of Estimator for CNN-based text classification with DBpedia data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import numpy as np
import pandas
from sklearn import metrics
import tensorflow as tf
from tensorflow.contrib import learn
FLAGS = None
MAX_DOCUMENT_LENGTH = 100
EMBEDDING_SIZE = 20
N_FILTERS = 10
WINDOW_SIZE = 20
FILTER_SHAPE1 = [WINDOW_SIZE, EMBEDDING_SIZE]
FILTER_SHAPE2 = [WINDOW_SIZE, N_FILTERS]
POOLING_WINDOW = 4
POOLING_STRIDE = 2
n_words = 0
def cnn_model(x, y):
"""2 layer Convolutional network to predict from sequence of words
to a class."""
# Convert indexes of words into embeddings.
# This creates embeddings matrix of [n_words, EMBEDDING_SIZE] and then
# maps word indexes of the sequence into [batch_size, sequence_length,
# EMBEDDING_SIZE].
y = tf.one_hot(y, 15, 1, 0)
word_vectors = learn.ops.categorical_variable(x, n_classes=n_words,
embedding_size=EMBEDDING_SIZE, name='words')
word_vectors = tf.expand_dims(word_vectors, 3)
with tf.variable_scope('CNN_Layer1'):
# Apply Convolution filtering on input sequence.
conv1 = tf.contrib.layers.convolution2d(word_vectors, N_FILTERS,
FILTER_SHAPE1, padding='VALID')
# Add a RELU for non linearity.
conv1 = tf.nn.relu(conv1)
# Max pooling across output of Convolution+Relu.
pool1 = tf.nn.max_pool(conv1, ksize=[1, POOLING_WINDOW, 1, 1],
strides=[1, POOLING_STRIDE, 1, 1], padding='SAME')
# Transpose matrix so that n_filters from convolution becomes width.
pool1 = tf.transpose(pool1, [0, 1, 3, 2])
with tf.variable_scope('CNN_Layer2'):
# Second level of convolution filtering.
conv2 = tf.contrib.layers.convolution2d(pool1, N_FILTERS,
FILTER_SHAPE2, padding='VALID')
# Max across each filter to get useful features for classification.
pool2 = tf.squeeze(tf.reduce_max(conv2, 1), squeeze_dims=[1])
# Apply regular WX + B and classification.
prediction, loss = learn.models.logistic_regression(pool2, y)
train_op = tf.contrib.layers.optimize_loss(
loss, tf.contrib.framework.get_global_step(),
optimizer='Adam', learning_rate=0.01)
return {'class': tf.argmax(prediction, 1), 'prob': prediction}, loss, train_op
def main(unused_argv):
global n_words
# Prepare training and testing data
dbpedia = learn.datasets.load_dataset(
'dbpedia', test_with_fake_data=FLAGS.test_with_fake_data)
x_train = pandas.DataFrame(dbpedia.train.data)[1]
y_train = pandas.Series(dbpedia.train.target)
x_test = pandas.DataFrame(dbpedia.test.data)[1]
y_test = pandas.Series(dbpedia.test.target)
# Process vocabulary
vocab_processor = learn.preprocessing.VocabularyProcessor(MAX_DOCUMENT_LENGTH)
x_train = np.array(list(vocab_processor.fit_transform(x_train)))
x_test = np.array(list(vocab_processor.transform(x_test)))
n_words = len(vocab_processor.vocabulary_)
print('Total words: %d' % n_words)
# Build model
classifier = learn.Estimator(model_fn=cnn_model)
# Train and predict
classifier.fit(x_train, y_train, steps=100)
y_predicted = [
p['class'] for p in classifier.predict(x_test, as_iterable=True)]
score = metrics.accuracy_score(y_test, y_predicted)
print('Accuracy: {0:f}'.format(score))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--test_with_fake_data',
default=False,
help='Test the example code with fake data.',
action='store_true'
)
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| 36.241667 | 80 | 0.727983 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import numpy as np
import pandas
from sklearn import metrics
import tensorflow as tf
from tensorflow.contrib import learn
FLAGS = None
MAX_DOCUMENT_LENGTH = 100
EMBEDDING_SIZE = 20
N_FILTERS = 10
WINDOW_SIZE = 20
FILTER_SHAPE1 = [WINDOW_SIZE, EMBEDDING_SIZE]
FILTER_SHAPE2 = [WINDOW_SIZE, N_FILTERS]
POOLING_WINDOW = 4
POOLING_STRIDE = 2
n_words = 0
def cnn_model(x, y):
y = tf.one_hot(y, 15, 1, 0)
word_vectors = learn.ops.categorical_variable(x, n_classes=n_words,
embedding_size=EMBEDDING_SIZE, name='words')
word_vectors = tf.expand_dims(word_vectors, 3)
with tf.variable_scope('CNN_Layer1'):
conv1 = tf.contrib.layers.convolution2d(word_vectors, N_FILTERS,
FILTER_SHAPE1, padding='VALID')
conv1 = tf.nn.relu(conv1)
pool1 = tf.nn.max_pool(conv1, ksize=[1, POOLING_WINDOW, 1, 1],
strides=[1, POOLING_STRIDE, 1, 1], padding='SAME')
pool1 = tf.transpose(pool1, [0, 1, 3, 2])
with tf.variable_scope('CNN_Layer2'):
conv2 = tf.contrib.layers.convolution2d(pool1, N_FILTERS,
FILTER_SHAPE2, padding='VALID')
pool2 = tf.squeeze(tf.reduce_max(conv2, 1), squeeze_dims=[1])
prediction, loss = learn.models.logistic_regression(pool2, y)
train_op = tf.contrib.layers.optimize_loss(
loss, tf.contrib.framework.get_global_step(),
optimizer='Adam', learning_rate=0.01)
return {'class': tf.argmax(prediction, 1), 'prob': prediction}, loss, train_op
def main(unused_argv):
global n_words
dbpedia = learn.datasets.load_dataset(
'dbpedia', test_with_fake_data=FLAGS.test_with_fake_data)
x_train = pandas.DataFrame(dbpedia.train.data)[1]
y_train = pandas.Series(dbpedia.train.target)
x_test = pandas.DataFrame(dbpedia.test.data)[1]
y_test = pandas.Series(dbpedia.test.target)
vocab_processor = learn.preprocessing.VocabularyProcessor(MAX_DOCUMENT_LENGTH)
x_train = np.array(list(vocab_processor.fit_transform(x_train)))
x_test = np.array(list(vocab_processor.transform(x_test)))
n_words = len(vocab_processor.vocabulary_)
print('Total words: %d' % n_words)
classifier = learn.Estimator(model_fn=cnn_model)
classifier.fit(x_train, y_train, steps=100)
y_predicted = [
p['class'] for p in classifier.predict(x_test, as_iterable=True)]
score = metrics.accuracy_score(y_test, y_predicted)
print('Accuracy: {0:f}'.format(score))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--test_with_fake_data',
default=False,
help='Test the example code with fake data.',
action='store_true'
)
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| true | true |
f71df295ebee6a0945c2a0d4147b9909a35dfeca | 3,622 | py | Python | trainer.py | filipesouzacit/RL-with-MCTS | cca1a8a79e5973a30b423c45a090e2473975c189 | [
"MIT"
] | 1 | 2021-01-13T00:24:16.000Z | 2021-01-13T00:24:16.000Z | trainer.py | filipesouzacit/RL-with-MCTS | cca1a8a79e5973a30b423c45a090e2473975c189 | [
"MIT"
] | null | null | null | trainer.py | filipesouzacit/RL-with-MCTS | cca1a8a79e5973a30b423c45a090e2473975c189 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thus Jan 07 15:54:13 2021
@author: Filipe Souza
Based on Josh Varty (https://github.com/JoshVarty/AlphaZeroSimple)
"""
import numpy as np
from random import shuffle
import keras
from gym_go import gogame
from monte_carlo_tree_search import MCTS
class Trainer:
def __init__(self, game, model, args):
self.game = game
self.model = model
self.args = args
self.mcts = MCTS(self.game, self.model, self.args)
def exceute_episode(self):
train_examples = []
current_player = 1
state = gogame.init_state(self.args['boardSize'])
while True:
#print("while True")
canonical_board = gogame.canonical_form(state)
self.mcts = MCTS(self.game, self.model, self.args)
root = self.mcts.run(self.model, canonical_board, to_play=1)
action_probs = [0 for _ in range((self.args['boardSize']* self.args['boardSize'])+1)]
for k, v in root.children.items():
action_probs[k] = v.visit_count
action_probs = action_probs / np.sum(action_probs)
train_examples.append((canonical_board, current_player, action_probs))
action = root.select_action(temperature=1)
state = gogame.next_state(state, action, canonical=False)
current_player = - current_player
reward = gogame.winning(state)*current_player if gogame.game_ended(state) else None
if reward is not None:
ret = []
for hist_state, hist_current_player, hist_action_probs in train_examples:
# [Board, currentPlayer, actionProbabilities, Reward]
tfBoard = np.array([hist_state[0],hist_state[1],hist_state[3]]).transpose().tolist()
#ret.append(np.array([tfBoard,tfBoard, hist_action_probs, reward * ((-1) ** (hist_current_player != current_player))]))
ret.append((tfBoard,hist_action_probs, reward * ((-1) ** (hist_current_player != current_player))))
return ret
def learn(self):
for i in range(1, self.args['numIters'] + 1):
print("numIters: {}/{}".format(i, self.args['numIters']))
train_examples = []
for eps in range(self.args['numEps']):
print("numEps: {}/{}".format(eps, self.args['numEps']))
iteration_train_examples = self.exceute_episode()
train_examples.extend(iteration_train_examples)
shuffle(train_examples)
self.train(train_examples)
def train(self, trainD):
# Define the checkpoint
checkpoint = keras.callbacks.ModelCheckpoint(self.args['checkpointPath'], monitor="val_loss",
mode="min", save_best_only=True, verbose=0)
# train the network
print("Training network...")
x = [i[0] for i in trainD]
x = np.array(x)
y1 = [i[1] for i in trainD]
y2 = [i[2] for i in trainD]
y1 = np.array(y1)
y2 = np.array(y2)
history = self.model.model.fit(x,y={"action_output": y1, "Value_output": y2},
validation_split=0.2,
batch_size=self.args['batchSize'], epochs=self.args['epochs'],
verbose=1, callbacks=[checkpoint])
# print accurary of the best epoch
self.model.model.load_weights(self.args['checkpointPath'])
| 36.959184 | 139 | 0.57841 |
import numpy as np
from random import shuffle
import keras
from gym_go import gogame
from monte_carlo_tree_search import MCTS
class Trainer:
def __init__(self, game, model, args):
self.game = game
self.model = model
self.args = args
self.mcts = MCTS(self.game, self.model, self.args)
def exceute_episode(self):
train_examples = []
current_player = 1
state = gogame.init_state(self.args['boardSize'])
while True:
canonical_board = gogame.canonical_form(state)
self.mcts = MCTS(self.game, self.model, self.args)
root = self.mcts.run(self.model, canonical_board, to_play=1)
action_probs = [0 for _ in range((self.args['boardSize']* self.args['boardSize'])+1)]
for k, v in root.children.items():
action_probs[k] = v.visit_count
action_probs = action_probs / np.sum(action_probs)
train_examples.append((canonical_board, current_player, action_probs))
action = root.select_action(temperature=1)
state = gogame.next_state(state, action, canonical=False)
current_player = - current_player
reward = gogame.winning(state)*current_player if gogame.game_ended(state) else None
if reward is not None:
ret = []
for hist_state, hist_current_player, hist_action_probs in train_examples:
tfBoard = np.array([hist_state[0],hist_state[1],hist_state[3]]).transpose().tolist()
ret.append((tfBoard,hist_action_probs, reward * ((-1) ** (hist_current_player != current_player))))
return ret
def learn(self):
for i in range(1, self.args['numIters'] + 1):
print("numIters: {}/{}".format(i, self.args['numIters']))
train_examples = []
for eps in range(self.args['numEps']):
print("numEps: {}/{}".format(eps, self.args['numEps']))
iteration_train_examples = self.exceute_episode()
train_examples.extend(iteration_train_examples)
shuffle(train_examples)
self.train(train_examples)
def train(self, trainD):
checkpoint = keras.callbacks.ModelCheckpoint(self.args['checkpointPath'], monitor="val_loss",
mode="min", save_best_only=True, verbose=0)
print("Training network...")
x = [i[0] for i in trainD]
x = np.array(x)
y1 = [i[1] for i in trainD]
y2 = [i[2] for i in trainD]
y1 = np.array(y1)
y2 = np.array(y2)
history = self.model.model.fit(x,y={"action_output": y1, "Value_output": y2},
validation_split=0.2,
batch_size=self.args['batchSize'], epochs=self.args['epochs'],
verbose=1, callbacks=[checkpoint])
self.model.model.load_weights(self.args['checkpointPath'])
| true | true |
f71df2c133cd5cfc48196609f04f3fb72dd5de7f | 1,297 | py | Python | phrase_guess/genetic.py | chauhanjatin10/Genetic_alogs | 1d2a7cd4b2f14e0d79eaaa2eefee48a2fab608bd | [
"MIT"
] | null | null | null | phrase_guess/genetic.py | chauhanjatin10/Genetic_alogs | 1d2a7cd4b2f14e0d79eaaa2eefee48a2fab608bd | [
"MIT"
] | null | null | null | phrase_guess/genetic.py | chauhanjatin10/Genetic_alogs | 1d2a7cd4b2f14e0d79eaaa2eefee48a2fab608bd | [
"MIT"
] | null | null | null | import random
import statistics
import time
def _generate_gene(length,geneset):
genes = []
while len(genes) < length:
sample = min(length - len(genes), len(geneset))
genes.extend(random.sample(geneset,sample))
return ''.join(genes)
def _mutate(parent,geneset):
childgene = list(parent)
index = random.randrange(0,len(parent))
newgene, alternative_gene = random.sample(geneset,2)
childgene[index] = alternative_gene if newgene==childgene[index] else newgene
return ''.join(childgene)
def get_best(fitness_gene, targetlength, optimalfitness,geneset,display):
random.seed()
bestparent = _generate_gene(targetlength,geneset)
fitness = fitness_gene(bestparent)
display(bestparent)
if fitness >= optimalfitness:
return bestparent
while True:
child = _mutate(bestparent,geneset)
childfitness = fitness_gene(child)
if fitness >= childfitness:
continue
display(child)
if childfitness>= optimalfitness:
return child
fitness = childfitness
bestparent = child
class Benchmark:
@staticmethod
def run(function):
timings = []
for i in range(10):
starttime = time.time()
function()
seconds = time.time() - starttime
timings.append(seconds)
mean = statistics.mean(timings)
print(1+i," ",mean, " ",statistics.stdev(timings,mean) if i>1 else 0)
| 25.431373 | 78 | 0.737086 | import random
import statistics
import time
def _generate_gene(length,geneset):
genes = []
while len(genes) < length:
sample = min(length - len(genes), len(geneset))
genes.extend(random.sample(geneset,sample))
return ''.join(genes)
def _mutate(parent,geneset):
childgene = list(parent)
index = random.randrange(0,len(parent))
newgene, alternative_gene = random.sample(geneset,2)
childgene[index] = alternative_gene if newgene==childgene[index] else newgene
return ''.join(childgene)
def get_best(fitness_gene, targetlength, optimalfitness,geneset,display):
random.seed()
bestparent = _generate_gene(targetlength,geneset)
fitness = fitness_gene(bestparent)
display(bestparent)
if fitness >= optimalfitness:
return bestparent
while True:
child = _mutate(bestparent,geneset)
childfitness = fitness_gene(child)
if fitness >= childfitness:
continue
display(child)
if childfitness>= optimalfitness:
return child
fitness = childfitness
bestparent = child
class Benchmark:
@staticmethod
def run(function):
timings = []
for i in range(10):
starttime = time.time()
function()
seconds = time.time() - starttime
timings.append(seconds)
mean = statistics.mean(timings)
print(1+i," ",mean, " ",statistics.stdev(timings,mean) if i>1 else 0)
| true | true |
f71df387bcaccd4339c5f6848567d11bb20038fe | 179 | py | Python | sim/config/color_palette.py | sebamenabar/oc-fewshot-public | 2dad8c9f24cb1bfe72d8b13b33d28f6788d86ca8 | [
"MIT"
] | 18 | 2020-07-12T11:07:27.000Z | 2022-02-06T04:17:23.000Z | sim/config/color_palette.py | renmengye/oc-fewshot | eb12bd5b426518fd8353304f0760f5c24f1b3c12 | [
"MIT"
] | 2 | 2021-10-14T17:38:50.000Z | 2021-11-10T14:07:04.000Z | sim/config/color_palette.py | renmengye/oc-fewshot | eb12bd5b426518fd8353304f0760f5c24f1b3c12 | [
"MIT"
] | 6 | 2020-11-11T19:18:28.000Z | 2021-06-04T14:20:03.000Z | colors = {
"blue" : "#256EFF",
"violet" : "#46237A",
"green" : "#3DDC97",
"white" : "#FCFCFC",
"red" : "#FF495C",
"gray" : "#E8E8E8" #"#8D99AE"
} | 22.375 | 35 | 0.418994 | colors = {
"blue" : "#256EFF",
"violet" : "#46237A",
"green" : "#3DDC97",
"white" : "#FCFCFC",
"red" : "#FF495C",
"gray" : "#E8E8E8"
} | true | true |
f71df3a6fa58f8584dfdc62f31e6a1ab73062b3a | 6,754 | py | Python | tf3d/object_detection/preprocessor_test.py | gunpowder78/google-research | d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5 | [
"Apache-2.0"
] | 1 | 2022-03-13T21:48:52.000Z | 2022-03-13T21:48:52.000Z | tf3d/object_detection/preprocessor_test.py | gunpowder78/google-research | d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5 | [
"Apache-2.0"
] | null | null | null | tf3d/object_detection/preprocessor_test.py | gunpowder78/google-research | d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5 | [
"Apache-2.0"
] | 1 | 2022-03-30T07:20:29.000Z | 2022-03-30T07:20:29.000Z | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for ...object_detection.preprocessor."""
import tensorflow as tf
from tf3d import standard_fields
from tf3d.object_detection import preprocessor
class ObjectDetectionPreprocessorTest(tf.test.TestCase):
def _image_correspondence_fn(self, inputs):
return {
'view_images': {
'rgb_view':
tf.cast(
tf.zeros([5, 200, 300, 3], dtype=tf.int32), dtype=tf.uint8),
},
'view_indices_2d': {
'rgb_view':
tf.random.uniform([5, 100, 2],
minval=-10,
maxval=1000,
dtype=tf.int32)
}
}
def _get_input_dict(self, height=240, width=320):
return {
standard_fields.InputDataFields.camera_image:
tf.zeros((height, width, 3), dtype=tf.uint8),
standard_fields.InputDataFields.point_positions:
tf.random.uniform((100, 3), minval=-1, maxval=1),
standard_fields.InputDataFields.camera_intrinsics:
tf.constant([
[160.0, 0.0, 160.0], # fx, s, cx
[0.0, 160.0, 120.0], # 0, fy, cy
[0.0, 0.0, 1.0], # 0, 0, 1
]),
standard_fields.InputDataFields.camera_rotation_matrix:
tf.eye(3),
standard_fields.InputDataFields.camera_translation:
tf.constant([0., 0., 2.]),
standard_fields.InputDataFields.objects_class:
tf.constant([1, 4, 5]),
standard_fields.InputDataFields.objects_length:
tf.constant([[4.0], [1.0], [1.0]]),
standard_fields.InputDataFields.objects_height:
tf.constant([[2.0], [1.0], [4.0]]),
standard_fields.InputDataFields.objects_width:
tf.constant([[2.0], [1.0], [1.0]]),
standard_fields.InputDataFields.objects_rotation_matrix:
tf.constant([[[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]],
[[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]],
[[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]]),
standard_fields.InputDataFields.objects_center:
tf.constant([[4.0, 4.0, 4.0], [2.5, 2.5, 2.5], [0.5, 1.5, 9.5]]),
standard_fields.InputDataFields.objects_difficulty:
tf.constant([[1], [1], [1]]),
standard_fields.InputDataFields.objects_instance_id:
tf.constant([[1], [2], [1]]),
standard_fields.InputDataFields.objects_has_3d_info:
tf.constant([1, 1, 0]),
standard_fields.InputDataFields.camera_image_name:
tf.convert_to_tensor('image', tf.string),
}
def test_preprocess_output_shapes(self):
height, width = (240, 320)
input_dict = self._get_input_dict(height, width)
object_keys = preprocessor._OBJECT_KEYS
output_keys = [
standard_fields.InputDataFields.camera_intrinsics,
standard_fields.InputDataFields.camera_rotation_matrix,
standard_fields.InputDataFields.camera_translation,
standard_fields.InputDataFields.point_positions,
standard_fields.InputDataFields.num_valid_points,
standard_fields.InputDataFields.object_class_points,
standard_fields.InputDataFields.object_center_points,
standard_fields.InputDataFields.object_height_points,
standard_fields.InputDataFields.object_width_points,
standard_fields.InputDataFields.object_rotation_matrix_points,
standard_fields.InputDataFields.object_length_points,
standard_fields.InputDataFields.object_instance_id_points,
]
output_dict = preprocessor.preprocess(
inputs=input_dict,
images_points_correspondence_fn=self._image_correspondence_fn,
image_preprocess_fn_dic=None)
for key in output_keys:
self.assertIn(key, output_dict)
self.assertEqual(
output_dict[standard_fields.InputDataFields.camera_intrinsics].shape,
(3, 3))
self.assertEqual(
output_dict[
standard_fields.InputDataFields.camera_rotation_matrix].shape,
(3, 3))
self.assertEqual(
output_dict[standard_fields.InputDataFields.camera_translation].shape,
(3,))
self.assertEqual(
output_dict[standard_fields.InputDataFields.point_positions].shape,
(100, 3))
self.assertEqual(
output_dict[standard_fields.InputDataFields.num_valid_points].numpy(),
100)
self.assertEqual(
output_dict[standard_fields.InputDataFields.object_class_points].shape,
(100,))
self.assertEqual(
output_dict[standard_fields.InputDataFields.object_center_points].shape,
(100, 3))
self.assertEqual(
output_dict[standard_fields.InputDataFields.object_height_points].shape,
(100, 1))
self.assertEqual(
output_dict[standard_fields.InputDataFields.object_width_points].shape,
(100, 1))
self.assertEqual(
output_dict[standard_fields.InputDataFields.object_length_points].shape,
(100, 1))
self.assertEqual(
output_dict[standard_fields.InputDataFields
.object_rotation_matrix_points].shape, (100, 3, 3))
self.assertEqual(
output_dict[
standard_fields.InputDataFields.object_instance_id_points].shape,
(100,))
for key in object_keys:
self.assertEqual(output_dict[key].shape[0], 2)
def test_preprocess_output_keys(self):
height, width = (240, 320)
input_dict = self._get_input_dict(height, width)
output_dict = preprocessor.preprocess(
inputs=input_dict,
images_points_correspondence_fn=self._image_correspondence_fn,
output_keys=[standard_fields.InputDataFields.camera_image],
image_preprocess_fn_dic=None)
self.assertIn(standard_fields.InputDataFields.camera_image, output_dict)
self.assertEqual(len(output_dict.keys()), 1)
def test_preprocess_missing_input_raises(self):
with self.assertRaises(ValueError):
empty_input = {}
preprocessor.preprocess(inputs=empty_input)
if __name__ == '__main__':
tf.test.main()
| 40.933333 | 80 | 0.656352 |
import tensorflow as tf
from tf3d import standard_fields
from tf3d.object_detection import preprocessor
class ObjectDetectionPreprocessorTest(tf.test.TestCase):
def _image_correspondence_fn(self, inputs):
return {
'view_images': {
'rgb_view':
tf.cast(
tf.zeros([5, 200, 300, 3], dtype=tf.int32), dtype=tf.uint8),
},
'view_indices_2d': {
'rgb_view':
tf.random.uniform([5, 100, 2],
minval=-10,
maxval=1000,
dtype=tf.int32)
}
}
def _get_input_dict(self, height=240, width=320):
return {
standard_fields.InputDataFields.camera_image:
tf.zeros((height, width, 3), dtype=tf.uint8),
standard_fields.InputDataFields.point_positions:
tf.random.uniform((100, 3), minval=-1, maxval=1),
standard_fields.InputDataFields.camera_intrinsics:
tf.constant([
[160.0, 0.0, 160.0],
[0.0, 160.0, 120.0],
[0.0, 0.0, 1.0],
]),
standard_fields.InputDataFields.camera_rotation_matrix:
tf.eye(3),
standard_fields.InputDataFields.camera_translation:
tf.constant([0., 0., 2.]),
standard_fields.InputDataFields.objects_class:
tf.constant([1, 4, 5]),
standard_fields.InputDataFields.objects_length:
tf.constant([[4.0], [1.0], [1.0]]),
standard_fields.InputDataFields.objects_height:
tf.constant([[2.0], [1.0], [4.0]]),
standard_fields.InputDataFields.objects_width:
tf.constant([[2.0], [1.0], [1.0]]),
standard_fields.InputDataFields.objects_rotation_matrix:
tf.constant([[[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]],
[[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]],
[[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]]),
standard_fields.InputDataFields.objects_center:
tf.constant([[4.0, 4.0, 4.0], [2.5, 2.5, 2.5], [0.5, 1.5, 9.5]]),
standard_fields.InputDataFields.objects_difficulty:
tf.constant([[1], [1], [1]]),
standard_fields.InputDataFields.objects_instance_id:
tf.constant([[1], [2], [1]]),
standard_fields.InputDataFields.objects_has_3d_info:
tf.constant([1, 1, 0]),
standard_fields.InputDataFields.camera_image_name:
tf.convert_to_tensor('image', tf.string),
}
def test_preprocess_output_shapes(self):
height, width = (240, 320)
input_dict = self._get_input_dict(height, width)
object_keys = preprocessor._OBJECT_KEYS
output_keys = [
standard_fields.InputDataFields.camera_intrinsics,
standard_fields.InputDataFields.camera_rotation_matrix,
standard_fields.InputDataFields.camera_translation,
standard_fields.InputDataFields.point_positions,
standard_fields.InputDataFields.num_valid_points,
standard_fields.InputDataFields.object_class_points,
standard_fields.InputDataFields.object_center_points,
standard_fields.InputDataFields.object_height_points,
standard_fields.InputDataFields.object_width_points,
standard_fields.InputDataFields.object_rotation_matrix_points,
standard_fields.InputDataFields.object_length_points,
standard_fields.InputDataFields.object_instance_id_points,
]
output_dict = preprocessor.preprocess(
inputs=input_dict,
images_points_correspondence_fn=self._image_correspondence_fn,
image_preprocess_fn_dic=None)
for key in output_keys:
self.assertIn(key, output_dict)
self.assertEqual(
output_dict[standard_fields.InputDataFields.camera_intrinsics].shape,
(3, 3))
self.assertEqual(
output_dict[
standard_fields.InputDataFields.camera_rotation_matrix].shape,
(3, 3))
self.assertEqual(
output_dict[standard_fields.InputDataFields.camera_translation].shape,
(3,))
self.assertEqual(
output_dict[standard_fields.InputDataFields.point_positions].shape,
(100, 3))
self.assertEqual(
output_dict[standard_fields.InputDataFields.num_valid_points].numpy(),
100)
self.assertEqual(
output_dict[standard_fields.InputDataFields.object_class_points].shape,
(100,))
self.assertEqual(
output_dict[standard_fields.InputDataFields.object_center_points].shape,
(100, 3))
self.assertEqual(
output_dict[standard_fields.InputDataFields.object_height_points].shape,
(100, 1))
self.assertEqual(
output_dict[standard_fields.InputDataFields.object_width_points].shape,
(100, 1))
self.assertEqual(
output_dict[standard_fields.InputDataFields.object_length_points].shape,
(100, 1))
self.assertEqual(
output_dict[standard_fields.InputDataFields
.object_rotation_matrix_points].shape, (100, 3, 3))
self.assertEqual(
output_dict[
standard_fields.InputDataFields.object_instance_id_points].shape,
(100,))
for key in object_keys:
self.assertEqual(output_dict[key].shape[0], 2)
def test_preprocess_output_keys(self):
height, width = (240, 320)
input_dict = self._get_input_dict(height, width)
output_dict = preprocessor.preprocess(
inputs=input_dict,
images_points_correspondence_fn=self._image_correspondence_fn,
output_keys=[standard_fields.InputDataFields.camera_image],
image_preprocess_fn_dic=None)
self.assertIn(standard_fields.InputDataFields.camera_image, output_dict)
self.assertEqual(len(output_dict.keys()), 1)
def test_preprocess_missing_input_raises(self):
with self.assertRaises(ValueError):
empty_input = {}
preprocessor.preprocess(inputs=empty_input)
if __name__ == '__main__':
tf.test.main()
| true | true |
f71df3f459ff6917944a00897a64d96a40e9755e | 23,397 | py | Python | misc/config_tools/library/launch_cfg_lib.py | jackwhich/acrn-hypervisor-1 | 2ff11c2ef04a2668979b3e363e25f13cf48376ac | [
"BSD-3-Clause"
] | null | null | null | misc/config_tools/library/launch_cfg_lib.py | jackwhich/acrn-hypervisor-1 | 2ff11c2ef04a2668979b3e363e25f13cf48376ac | [
"BSD-3-Clause"
] | null | null | null | misc/config_tools/library/launch_cfg_lib.py | jackwhich/acrn-hypervisor-1 | 2ff11c2ef04a2668979b3e363e25f13cf48376ac | [
"BSD-3-Clause"
] | null | null | null | # Copyright (C) 2019 Intel Corporation.
#
# SPDX-License-Identifier: BSD-3-Clause
#
import os
import getopt
import re
import common
import board_cfg_lib
import scenario_cfg_lib
import lxml
import lxml.etree
ERR_LIST = {}
BOOT_TYPE = ['no', 'ovmf']
RTOS_TYPE = ['no', 'Soft RT', 'Hard RT']
DM_VUART0 = ['Disable', 'Enable']
y_n = ['y', 'n']
USER_VM_TYPES = ['CLEARLINUX', 'ANDROID', 'ALIOS', 'PREEMPT-RT LINUX', 'VXWORKS', 'WINDOWS', 'ZEPHYR', 'YOCTO', 'UBUNTU', 'GENERIC LINUX']
LINUX_LIKE_OS = ['CLEARLINUX', 'PREEMPT-RT LINUX', 'YOCTO', 'UBUNTU', 'GENERIC LINUX']
PT_SUB_PCI = {}
PT_SUB_PCI['usb_xdci'] = ['USB controller']
PT_SUB_PCI['gpu'] = ['VGA compatible controller']
PT_SUB_PCI['ipu'] = ['Multimedia controller']
PT_SUB_PCI['ipu_i2c'] = ['Signal processing controller']
PT_SUB_PCI['cse'] = ['Communication controller']
PT_SUB_PCI['audio'] = ['Audio device', 'Multimedia audio controller']
PT_SUB_PCI['audio_codec'] = ['Signal processing controller']
PT_SUB_PCI['sd_card'] = ['SD Host controller']
PT_SUB_PCI['wifi'] = ['Ethernet controller', 'Network controller', '802.1a controller',
'802.1b controller', 'Wireless controller']
PT_SUB_PCI['bluetooth'] = ['Signal processing controller']
PT_SUB_PCI['ethernet'] = ['Ethernet controller', 'Network controller']
PT_SUB_PCI['sata'] = ['SATA controller']
PT_SUB_PCI['nvme'] = ['Non-Volatile memory controller']
# passthrough devices for board
PASSTHRU_DEVS = ['usb_xdci', 'gpu', 'ipu', 'ipu_i2c', 'cse', 'audio', 'sata',
'nvme', 'audio_codec', 'sd_card', 'ethernet', 'wifi', 'bluetooth']
PT_SLOT = {
"hostbridge":0,
"lpc":1,
"pci-gvt":2,
"virtio-blk":3,
"igd-lpc":31,
}
MOUNT_FLAG_DIC = {}
def usage(file_name):
""" This is usage for how to use this tool """
print("usage= {} [h]".format(file_name), end="")
print("--board <board_info_file> --scenario <scenario_info_file> --launch <launch_info_file> --user_vmid <user_vmid id> --out [output folder]")
print('board_info_file : file name of the board info')
print('scenario_info_file : file name of the scenario info')
print('launch_info_file : file name of the launch info')
print('user_vmid : this is the relative id for post launch vm in scenario info XML:[1..max post launch vm]')
print('output folder : path to acrn-hypervisor_folder')
def get_param(args):
"""
Get the script parameters from command line
:param args: this the command line of string for the script without script name
"""
vm_th = '0'
err_dic = {}
board_info_file = False
scenario_info_file = False
launch_info_file = False
output_folder = False
param_list = ['--board', '--scenario', '--launch', '--user_vmid']
for arg_str in param_list:
if arg_str not in args:
usage(args[0])
err_dic['common error: wrong parameter'] = "wrong usage"
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
args_list = args[1:]
(optlist, args_list) = getopt.getopt(args_list, '', ['board=', 'scenario=', 'launch=', 'user_vmid=', 'out='])
for arg_k, arg_v in optlist:
if arg_k == '--board':
board_info_file = arg_v
if arg_k == '--scenario':
scenario_info_file = arg_v
if arg_k == '--launch':
launch_info_file = arg_v
if arg_k == '--out':
output_folder = arg_v
if '--user_vmid' in args:
if arg_k == '--user_vmid':
vm_th = arg_v
if not vm_th.isnumeric():
err_dic['common error: wrong parameter'] = "--user_vmid should be a number"
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
if not board_info_file or not scenario_info_file or not launch_info_file:
usage(args[0])
err_dic['common error: wrong parameter'] = "wrong usage"
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
if not os.path.exists(board_info_file):
err_dic['common error: wrong parameter'] = "{} does not exist!".format(board_info_file)
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
if not os.path.exists(scenario_info_file):
err_dic['common error: wrong parameter'] = "{} does not exist!".format(scenario_info_file)
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
if not os.path.exists(launch_info_file):
err_dic['common error: wrong parameter'] = "{} does not exist!".format(launch_info_file)
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
def launch_vm_cnt(config_file):
"""
Get post vm number
:param config_file: it is a file what contains information for script to read from
:return: total post vm number in launch file
"""
post_vm_count = 0
# get post vm number
root = common.get_config_root(config_file)
for item in root:
if item.tag == "user_vm":
post_vm_count += 1
return post_vm_count
def get_post_num_list():
"""
Get post vm number list
:return: total post dic: {launch_id:scenario_id} in launch file
"""
post_vm_list = []
# get post vm number
root = common.get_config_root(common.LAUNCH_INFO_FILE)
for item in root:
if item.tag == "user_vm":
post_vm_list.append(int(item.attrib['id']))
return post_vm_list
def post_vm_cnt(config_file):
"""
Calculate the pre launched vm number
:param config_file: it is a file what contains information for script to read from
:return: number of post launched vm
"""
post_launch_cnt = 0
for load_order in common.LOAD_ORDER.values():
if load_order == "POST_LAUNCHED_VM":
post_launch_cnt += 1
return post_launch_cnt
def get_post_vm_cnt():
"""
Get board name from launch.xml at fist line
:param scenario_file: it is a file what contains scenario information for script to read from
"""
launch_vm_count = launch_vm_cnt(common.LAUNCH_INFO_FILE)
post_vm_count = post_vm_cnt(common.SCENARIO_INFO_FILE)
return (launch_vm_count, post_vm_count)
def get_sos_vmid():
sos_id = ''
for vm_i,load_order in common.LOAD_ORDER.items():
if load_order == "SERVICE_VM":
sos_id = vm_i
break
return sos_id
def get_bdf_from_tag(config_file, branch_tag, tag_str):
bdf_list = {}
bdf_list = common.get_leaf_tag_map(config_file, branch_tag, tag_str)
# split b:d:f from pci description
for idx, bdf_v in bdf_list.items():
if bdf_v:
bdf_list[idx] = bdf_v.split()[0]
return bdf_list
def get_vpid_from_bdf(bdf_vpid_map, bdf_list):
vpid_list = {}
post_vm_list = get_post_num_list()
for p_id in post_vm_list:
for bdf_k, vpid_v in bdf_vpid_map.items():
if bdf_k == bdf_list[p_id]:
# print("k:{}, v{}".format(bdf_k, bdf_list[p_id]))
# convert "808x:0xxx" to "808x 0xxx"
tmp_vpid = " ".join(vpid_v.split(':'))
vpid_list[p_id] = tmp_vpid
elif not bdf_list[p_id]:
vpid_list[p_id] = ''
return vpid_list
def get_user_vm_type():
"""
Get User VM name from launch.xml at fist line
"""
user_vm_types = common.get_leaf_tag_map(common.LAUNCH_INFO_FILE, "user_vm_type")
return user_vm_types
def get_user_vm_names():
user_vm_names = common.get_leaf_tag_map(common.LAUNCH_INFO_FILE, "vm_name")
return user_vm_names
def is_bdf_format(bdf_str):
bdf_len = 7
status = True
if not bdf_str:
return status
bdf_str_len = len(bdf_str)
if ':' in bdf_str and '.' in bdf_str and bdf_len == bdf_str_len:
status = True
else:
status = False
return status
def is_vpid_format(vpid_str):
status = True
if not vpid_str:
return status
vpid_len = 9
vpid_str_len = len(vpid_str)
if ' ' in vpid_str and vpid_len == vpid_str_len:
status = True
else:
status = False
return status
def pt_devs_check(bdf_list, vpid_list, item):
i_cnt = 1
# check bdf
for bdf_str in bdf_list.values():
if is_bdf_format(bdf_str):
continue
else:
key = "user_vm:id={},passthrough_devices,{}".format(i_cnt, item)
ERR_LIST[key] = "Unkonw the BDF format of {} device".format(item)
i_cnt += 1
# check vpid
i_cnt = 1
for vpid_str in vpid_list.values():
if is_vpid_format(vpid_str):
continue
else:
key = "user_vm:id={},passthrough_devices,{}".format(i_cnt, item)
ERR_LIST[key] = "Unkonw the Vendor:Product ID format of {} device".format(item)
i_cnt += 1
def empty_err(i_cnt, item):
"""
add empty error message into ERR_LIST
:param i_cnt: the launch vm index from config xml
:param item: the item of tag from config xml
:return: None
"""
key = "user_vm:id={},{}".format(i_cnt, item)
ERR_LIST[key] = "The parameter should not be empty"
def args_aval_check(arg_list, item, avl_list):
"""
check arguments from config xml are available and validate
:param arg_list: the list of arguments from config xml
:param item: the item of tag from config xml
:param avl_list: available argument which are allowed to chose
:return: None
"""
# args should be set into launch xml from webUI
i_cnt = 1
skip_check_list = ['']
if item in skip_check_list:
return
for arg_str in arg_list.values():
if arg_str == None or not arg_str.strip():
empty_err(i_cnt, item)
i_cnt += 1
continue
if arg_str not in avl_list:
key = "user_vm:id={},{}".format(i_cnt, item)
ERR_LIST[key] = "The {} is invalidate".format(item)
i_cnt += 1
def mem_size_check(arg_list, item):
"""
check memory size list which are set from webUI
:param arg_list: the list of arguments from config xml
:param item: the item of tag from config xml
:return: None
"""
# get total memory information
total_mem_mb = board_cfg_lib.get_total_mem()
# available check
i_cnt = 1
for arg_str in arg_list.values():
if arg_str == None or not arg_str.strip():
empty_err(i_cnt, item)
i_cnt += 1
continue
mem_size_set = int(arg_str.strip())
if mem_size_set > total_mem_mb:
key = "user_vm:id={},{}".format(i_cnt, item)
ERR_LIST[key] = "{}MB should be less than total memory {}MB".format(item)
i_cnt += 1
def virtual_dev_slot(dev):
max_slot = 31
base_slot = 3
# get devices slot which already stored
if dev in list(PT_SLOT.keys()):
return PT_SLOT[dev]
# alloc a new slot for device
for slot_num in range(base_slot, max_slot):
if slot_num not in list(PT_SLOT.values()):
if (slot_num == 6 and 14 in list(PT_SLOT.values())) or (slot_num == 14 and 6 in list(PT_SLOT.values())):
continue
if (slot_num == 7 and 15 in list(PT_SLOT.values())) or (slot_num == 15 and 7 in list(PT_SLOT.values())):
continue
PT_SLOT[dev] = slot_num
break
return slot_num
def get_slot(bdf_list, dev):
slot_list = {}
post_vm_list = get_post_num_list()
for p_id in post_vm_list:
if not bdf_list[p_id]:
slot_list[p_id] = ''
else:
slot_fun = virtual_dev_slot(dev)
PT_SLOT[dev] = slot_fun
slot_list[p_id] = slot_fun
return slot_list
def reset_pt_slot():
global PT_SLOT
PT_SLOT = {
"hostbridge":0,
"lpc":1,
"pci-gvt":2,
"virtio-blk":3,
"igd-lpc":31,
}
def get_pt_dev():
""" Get passthrough device list """
cap_pt = PASSTHRU_DEVS
return cap_pt
def get_vuart1_from_scenario(vmid):
"""Get the vmid's vuart1 base"""
vuart1 = common.get_vuart_info_id(common.SCENARIO_INFO_FILE, 1)
return vuart1[vmid]['base']
def pt_devs_check_audio(audio_map, audio_codec_map):
"""
Check the connections about audio/audio_codec pass-through devices
If audio_codec is selected as pass-through device, the audio device
must to be chosen as pass-through device either.
:param audio_map: the dictionary contains vmid and bdf of audio device
:param audio_codec_map: the dictionary contains vmid and bdf of audio_codec device
"""
for vmid in list(audio_map.keys()):
bdf_audio = audio_map[vmid]
bdf_codec = audio_codec_map[vmid]
if not bdf_audio and bdf_codec:
key = "user_vm:id={},passthrough_devices,{}".format(vmid, 'audio_codec')
ERR_LIST[key] = "Audio codec device should be pass through together with Audio devcie!"
def check_block_mount(virtio_blk_dic):
(blk_dev_list, num) = board_cfg_lib.get_rootfs(common.BOARD_INFO_FILE)
for vmid in list(virtio_blk_dic.keys()):
mount_flags = []
for blk in virtio_blk_dic[vmid]:
rootfs_img = ''
if not blk:
mount_flags.append(False)
continue
if ':' in blk:
blk_dev = blk.split(':')[0]
rootfs_img = blk.split(':')[1]
else:
blk_dev = blk
if blk_dev in blk_dev_list and rootfs_img:
mount_flags.append(True)
else:
mount_flags.append(False)
MOUNT_FLAG_DIC[vmid] = mount_flags
def check_sriov_param(sriov_dev, pt_sel):
for dev_type in ['gpu', 'network']:
for vm_id, dev_bdf in sriov_dev[dev_type].items():
if not dev_bdf:
continue
pt_devname = dev_type
if pt_devname == 'network':
pt_devname = 'ethernet'
if pt_sel.bdf[pt_devname][vm_id]:
ERR_LIST[
'vmid:{} sriov {}'.format(vm_id, dev_type)
] = 'this vm has {} passthrough and sriov {} at same time!'.format(pt_devname, dev_type)
if not re.match(r'^[\da-fA-F]{2}:[0-3][\da-fA-F]\.[0-7]$', dev_bdf):
ERR_LIST['vmid:{} sriov {}'.format(vm_id, dev_type)] = 'sriov {} bdf error'.format(dev_type)
def bdf_duplicate_check(bdf_dic):
"""
Check if exist duplicate slot
:param bdf_dic: contains all selected pass-through devices
:return: None
"""
bdf_used = []
for dev in bdf_dic.keys():
dev_bdf_dic = bdf_dic[dev]
for vm_i in dev_bdf_dic.keys():
dev_bdf = dev_bdf_dic[vm_i]
if not dev_bdf:
continue
if dev_bdf in bdf_used:
key = "user_vm:id={},{},{}".format(vm_i, 'passthrough_devices', dev)
ERR_LIST[key] = "You select the same device for {} pass-through !".format(dev)
return
else:
bdf_used.append(dev_bdf)
def get_gpu_bdf():
pci_lines = board_cfg_lib.get_info(common.BOARD_INFO_FILE, "<PCI_DEVICE>", "</PCI_DEVICE>")
for line in pci_lines:
if "VGA compatible controller" in line:
global gpu_bdf
gpu_bdf = line.split('\t')[1]
gpu_bdf = gpu_bdf[0:7]
return gpu_bdf
def get_vpid_by_bdf(bdf):
vpid = ''
vpid_lines = board_cfg_lib.get_info(common.BOARD_INFO_FILE, "<PCI_VID_PID>", "</PCI_VID_PID>")
for vpid_line in vpid_lines:
if bdf in vpid_line:
vpid = " ".join(vpid_line.split()[2].split(':'))
return vpid
def get_gpu_vpid():
gpu_bdf = get_gpu_bdf()
return get_vpid_by_bdf(gpu_bdf)
def user_vm_cpu_affinity(user_vmid_cpu_affinity):
cpu_affinity = {}
sos_vm_id = get_sos_vmid()
for user_vmid,cpu_affinity_list in user_vmid_cpu_affinity.items():
cpu_affinity[int(user_vmid) + int(sos_vm_id)] = cpu_affinity_list
return cpu_affinity
def check_slot(slot_db):
slot_values = {}
# init list of slot values for Post VM
for dev in slot_db.keys():
for user_vmid in slot_db[dev].keys():
slot_values[user_vmid] = []
break
# get slot values for Passthrough devices
for dev in PASSTHRU_DEVS:
if dev == 'gpu':
continue
for user_vmid,slot_str in slot_db[dev].items():
if not slot_str:
continue
slot_values[user_vmid].append(slot_str)
# update slot values and replace the fun=0 if there is no fun 0 in bdf list
for dev in PASSTHRU_DEVS:
if dev == 'gpu':
continue
for user_vmid,slot_str in slot_db[dev].items():
if not slot_str or ':' not in str(slot_str):
continue
bus_slot = slot_str[0:-1]
bus_slot_fun0 = bus_slot + "0"
if bus_slot_fun0 not in slot_values[user_vmid]:
slot_db[dev][user_vmid] = bus_slot_fun0
slot_values[user_vmid].append(bus_slot_fun0)
def is_linux_like(user_vm_type):
is_linux = False
if user_vm_type in LINUX_LIKE_OS:
is_linux = True
return is_linux
def set_shm_regions(launch_item_values, scenario_info):
try:
raw_shmem_regions = common.get_hv_item_tag(scenario_info, "FEATURES", "IVSHMEM", "IVSHMEM_REGION")
load_orders = common.get_leaf_tag_map(scenario_info, "load_order")
shm_enabled = common.get_hv_item_tag(scenario_info, "FEATURES", "IVSHMEM", "IVSHMEM_ENABLED")
except:
return
sos_vm_id = 0
for vm_id,load_order in load_orders.items():
if load_order in ['SERVICE_VM']:
sos_vm_id = vm_id
elif load_order in ['POST_LAUNCHED_VM']:
user_vmid = vm_id - sos_vm_id
shm_region_key = 'user_vm:id={},shm_regions,shm_region'.format(user_vmid)
launch_item_values[shm_region_key] = ['']
if shm_enabled == 'y':
for shmem_region in raw_shmem_regions:
if shmem_region is None or shmem_region.strip() == '':
continue
try:
shm_splited = shmem_region.split(',')
name = shm_splited[0].strip()
size = shm_splited[1].strip()
vm_id_list = [x.strip() for x in shm_splited[2].split(':')]
if str(vm_id) in vm_id_list:
launch_item_values[shm_region_key].append(','.join([name, size]))
except Exception as e:
print(e)
def set_pci_vuarts(launch_item_values, scenario_info):
try:
launch_item_values['user_vm,console_vuart'] = DM_VUART0
load_orders = common.get_leaf_tag_map(scenario_info, 'load_order')
sos_vm_id = 0
for vm_id,load_order in load_orders.items():
if load_order in ['SERVICE_VM']:
sos_vm_id = vm_id
for vm in list(common.get_config_root(scenario_info)):
if vm.tag == 'vm' and load_orders[int(vm.attrib['id'])] == 'POST_LAUNCHED_VM':
user_vmid = int(vm.attrib['id']) - sos_vm_id
pci_vuart_key = 'user_vm:id={},communication_vuarts,communication_vuart'.format(user_vmid)
for elem in list(vm):
if elem.tag == 'communication_vuart':
for sub_elem in list(elem):
if sub_elem.tag == 'base' and sub_elem.text == 'PCI_VUART':
if pci_vuart_key not in launch_item_values.keys():
launch_item_values[pci_vuart_key] = ['', elem.attrib['id']]
else:
launch_item_values[pci_vuart_key].append(elem.attrib['id'])
except:
return
def check_shm_regions(launch_shm_regions, scenario_info):
launch_item_values = {}
set_shm_regions(launch_item_values, scenario_info)
for user_vmid, shm_regions in launch_shm_regions.items():
shm_region_key = 'user_vm:id={},shm_regions,shm_region'.format(user_vmid)
for shm_region in shm_regions:
if shm_region_key not in launch_item_values.keys() or shm_region not in launch_item_values[shm_region_key]:
ERR_LIST[shm_region_key] = "shm {} should be configured in scenario setting and the size should be decimal" \
"in MB and spaces should not exist.".format(shm_region)
return
def check_console_vuart(launch_console_vuart, vuart0, scenario_info):
vuarts = common.get_vuart_info(scenario_info)
for user_vmid, console_vuart_enable in launch_console_vuart.items():
key = 'user_vm:id={},console_vuart'.format(user_vmid)
if console_vuart_enable == "Enable" and vuart0[user_vmid] == "Enable":
ERR_LIST[key] = "vuart0 and console_vuart of user_vm {} should not be enabled " \
"at the same time".format(user_vmid)
return
if console_vuart_enable == "Enable" and int(user_vmid) in vuarts.keys() \
and 0 in vuarts[user_vmid] and vuarts[user_vmid][0]['base'] == "INVALID_PCI_BASE":
ERR_LIST[key] = "console_vuart of user_vm {} should be enabled in scenario setting".format(user_vmid)
return
def check_communication_vuart(launch_communication_vuarts, scenario_info):
vuarts = common.get_vuart_info(scenario_info)
vuart1_setting = common.get_vuart_info_id(common.SCENARIO_INFO_FILE, 1)
for user_vmid, vuart_list in launch_communication_vuarts.items():
vuart_key = 'user_vm:id={},communication_vuarts,communication_vuart'.format(user_vmid)
for vuart_id in vuart_list:
if not vuart_id:
return
if int(vuart_id) not in vuarts[user_vmid].keys():
ERR_LIST[vuart_key] = "communication_vuart {} of user_vm {} should be configured" \
"in scenario setting.".format(vuart_id, user_vmid)
return
if int(vuart_id) == 1 and vuarts[user_vmid][1]['base'] != "INVALID_PCI_BASE":
if user_vmid in vuart1_setting.keys() and vuart1_setting[user_vmid]['base'] != "INVALID_COM_BASE":
ERR_LIST[vuart_key] = "user_vm {}'s communication_vuart 1 and legacy_vuart 1 should " \
"not be configured at the same time.".format(user_vmid)
return
def check_enable_ptm(launch_enable_ptm, scenario_info):
scenario_etree = lxml.etree.parse(scenario_info)
enable_ptm_vm_list = scenario_etree.xpath("//vm[PTM = 'y']/@id")
for user_vmid, enable_ptm in launch_enable_ptm.items():
key = 'user_vm:id={},enable_ptm'.format(user_vmid)
if enable_ptm == 'y' and str(user_vmid) not in enable_ptm_vm_list:
ERR_LIST[key] = "PTM of user_vm:{} set to 'n' in scenario xml".format(user_vmid)
| 34.306452 | 147 | 0.622986 |
import os
import getopt
import re
import common
import board_cfg_lib
import scenario_cfg_lib
import lxml
import lxml.etree
ERR_LIST = {}
BOOT_TYPE = ['no', 'ovmf']
RTOS_TYPE = ['no', 'Soft RT', 'Hard RT']
DM_VUART0 = ['Disable', 'Enable']
y_n = ['y', 'n']
USER_VM_TYPES = ['CLEARLINUX', 'ANDROID', 'ALIOS', 'PREEMPT-RT LINUX', 'VXWORKS', 'WINDOWS', 'ZEPHYR', 'YOCTO', 'UBUNTU', 'GENERIC LINUX']
LINUX_LIKE_OS = ['CLEARLINUX', 'PREEMPT-RT LINUX', 'YOCTO', 'UBUNTU', 'GENERIC LINUX']
PT_SUB_PCI = {}
PT_SUB_PCI['usb_xdci'] = ['USB controller']
PT_SUB_PCI['gpu'] = ['VGA compatible controller']
PT_SUB_PCI['ipu'] = ['Multimedia controller']
PT_SUB_PCI['ipu_i2c'] = ['Signal processing controller']
PT_SUB_PCI['cse'] = ['Communication controller']
PT_SUB_PCI['audio'] = ['Audio device', 'Multimedia audio controller']
PT_SUB_PCI['audio_codec'] = ['Signal processing controller']
PT_SUB_PCI['sd_card'] = ['SD Host controller']
PT_SUB_PCI['wifi'] = ['Ethernet controller', 'Network controller', '802.1a controller',
'802.1b controller', 'Wireless controller']
PT_SUB_PCI['bluetooth'] = ['Signal processing controller']
PT_SUB_PCI['ethernet'] = ['Ethernet controller', 'Network controller']
PT_SUB_PCI['sata'] = ['SATA controller']
PT_SUB_PCI['nvme'] = ['Non-Volatile memory controller']
PASSTHRU_DEVS = ['usb_xdci', 'gpu', 'ipu', 'ipu_i2c', 'cse', 'audio', 'sata',
'nvme', 'audio_codec', 'sd_card', 'ethernet', 'wifi', 'bluetooth']
PT_SLOT = {
"hostbridge":0,
"lpc":1,
"pci-gvt":2,
"virtio-blk":3,
"igd-lpc":31,
}
MOUNT_FLAG_DIC = {}
def usage(file_name):
print("usage= {} [h]".format(file_name), end="")
print("--board <board_info_file> --scenario <scenario_info_file> --launch <launch_info_file> --user_vmid <user_vmid id> --out [output folder]")
print('board_info_file : file name of the board info')
print('scenario_info_file : file name of the scenario info')
print('launch_info_file : file name of the launch info')
print('user_vmid : this is the relative id for post launch vm in scenario info XML:[1..max post launch vm]')
print('output folder : path to acrn-hypervisor_folder')
def get_param(args):
vm_th = '0'
err_dic = {}
board_info_file = False
scenario_info_file = False
launch_info_file = False
output_folder = False
param_list = ['--board', '--scenario', '--launch', '--user_vmid']
for arg_str in param_list:
if arg_str not in args:
usage(args[0])
err_dic['common error: wrong parameter'] = "wrong usage"
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
args_list = args[1:]
(optlist, args_list) = getopt.getopt(args_list, '', ['board=', 'scenario=', 'launch=', 'user_vmid=', 'out='])
for arg_k, arg_v in optlist:
if arg_k == '--board':
board_info_file = arg_v
if arg_k == '--scenario':
scenario_info_file = arg_v
if arg_k == '--launch':
launch_info_file = arg_v
if arg_k == '--out':
output_folder = arg_v
if '--user_vmid' in args:
if arg_k == '--user_vmid':
vm_th = arg_v
if not vm_th.isnumeric():
err_dic['common error: wrong parameter'] = "--user_vmid should be a number"
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
if not board_info_file or not scenario_info_file or not launch_info_file:
usage(args[0])
err_dic['common error: wrong parameter'] = "wrong usage"
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
if not os.path.exists(board_info_file):
err_dic['common error: wrong parameter'] = "{} does not exist!".format(board_info_file)
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
if not os.path.exists(scenario_info_file):
err_dic['common error: wrong parameter'] = "{} does not exist!".format(scenario_info_file)
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
if not os.path.exists(launch_info_file):
err_dic['common error: wrong parameter'] = "{} does not exist!".format(launch_info_file)
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
return (err_dic, board_info_file, scenario_info_file, launch_info_file, int(vm_th), output_folder)
def launch_vm_cnt(config_file):
post_vm_count = 0
root = common.get_config_root(config_file)
for item in root:
if item.tag == "user_vm":
post_vm_count += 1
return post_vm_count
def get_post_num_list():
post_vm_list = []
root = common.get_config_root(common.LAUNCH_INFO_FILE)
for item in root:
if item.tag == "user_vm":
post_vm_list.append(int(item.attrib['id']))
return post_vm_list
def post_vm_cnt(config_file):
post_launch_cnt = 0
for load_order in common.LOAD_ORDER.values():
if load_order == "POST_LAUNCHED_VM":
post_launch_cnt += 1
return post_launch_cnt
def get_post_vm_cnt():
launch_vm_count = launch_vm_cnt(common.LAUNCH_INFO_FILE)
post_vm_count = post_vm_cnt(common.SCENARIO_INFO_FILE)
return (launch_vm_count, post_vm_count)
def get_sos_vmid():
sos_id = ''
for vm_i,load_order in common.LOAD_ORDER.items():
if load_order == "SERVICE_VM":
sos_id = vm_i
break
return sos_id
def get_bdf_from_tag(config_file, branch_tag, tag_str):
bdf_list = {}
bdf_list = common.get_leaf_tag_map(config_file, branch_tag, tag_str)
for idx, bdf_v in bdf_list.items():
if bdf_v:
bdf_list[idx] = bdf_v.split()[0]
return bdf_list
def get_vpid_from_bdf(bdf_vpid_map, bdf_list):
vpid_list = {}
post_vm_list = get_post_num_list()
for p_id in post_vm_list:
for bdf_k, vpid_v in bdf_vpid_map.items():
if bdf_k == bdf_list[p_id]:
tmp_vpid = " ".join(vpid_v.split(':'))
vpid_list[p_id] = tmp_vpid
elif not bdf_list[p_id]:
vpid_list[p_id] = ''
return vpid_list
def get_user_vm_type():
user_vm_types = common.get_leaf_tag_map(common.LAUNCH_INFO_FILE, "user_vm_type")
return user_vm_types
def get_user_vm_names():
user_vm_names = common.get_leaf_tag_map(common.LAUNCH_INFO_FILE, "vm_name")
return user_vm_names
def is_bdf_format(bdf_str):
bdf_len = 7
status = True
if not bdf_str:
return status
bdf_str_len = len(bdf_str)
if ':' in bdf_str and '.' in bdf_str and bdf_len == bdf_str_len:
status = True
else:
status = False
return status
def is_vpid_format(vpid_str):
status = True
if not vpid_str:
return status
vpid_len = 9
vpid_str_len = len(vpid_str)
if ' ' in vpid_str and vpid_len == vpid_str_len:
status = True
else:
status = False
return status
def pt_devs_check(bdf_list, vpid_list, item):
i_cnt = 1
for bdf_str in bdf_list.values():
if is_bdf_format(bdf_str):
continue
else:
key = "user_vm:id={},passthrough_devices,{}".format(i_cnt, item)
ERR_LIST[key] = "Unkonw the BDF format of {} device".format(item)
i_cnt += 1
i_cnt = 1
for vpid_str in vpid_list.values():
if is_vpid_format(vpid_str):
continue
else:
key = "user_vm:id={},passthrough_devices,{}".format(i_cnt, item)
ERR_LIST[key] = "Unkonw the Vendor:Product ID format of {} device".format(item)
i_cnt += 1
def empty_err(i_cnt, item):
key = "user_vm:id={},{}".format(i_cnt, item)
ERR_LIST[key] = "The parameter should not be empty"
def args_aval_check(arg_list, item, avl_list):
i_cnt = 1
skip_check_list = ['']
if item in skip_check_list:
return
for arg_str in arg_list.values():
if arg_str == None or not arg_str.strip():
empty_err(i_cnt, item)
i_cnt += 1
continue
if arg_str not in avl_list:
key = "user_vm:id={},{}".format(i_cnt, item)
ERR_LIST[key] = "The {} is invalidate".format(item)
i_cnt += 1
def mem_size_check(arg_list, item):
total_mem_mb = board_cfg_lib.get_total_mem()
i_cnt = 1
for arg_str in arg_list.values():
if arg_str == None or not arg_str.strip():
empty_err(i_cnt, item)
i_cnt += 1
continue
mem_size_set = int(arg_str.strip())
if mem_size_set > total_mem_mb:
key = "user_vm:id={},{}".format(i_cnt, item)
ERR_LIST[key] = "{}MB should be less than total memory {}MB".format(item)
i_cnt += 1
def virtual_dev_slot(dev):
max_slot = 31
base_slot = 3
if dev in list(PT_SLOT.keys()):
return PT_SLOT[dev]
for slot_num in range(base_slot, max_slot):
if slot_num not in list(PT_SLOT.values()):
if (slot_num == 6 and 14 in list(PT_SLOT.values())) or (slot_num == 14 and 6 in list(PT_SLOT.values())):
continue
if (slot_num == 7 and 15 in list(PT_SLOT.values())) or (slot_num == 15 and 7 in list(PT_SLOT.values())):
continue
PT_SLOT[dev] = slot_num
break
return slot_num
def get_slot(bdf_list, dev):
slot_list = {}
post_vm_list = get_post_num_list()
for p_id in post_vm_list:
if not bdf_list[p_id]:
slot_list[p_id] = ''
else:
slot_fun = virtual_dev_slot(dev)
PT_SLOT[dev] = slot_fun
slot_list[p_id] = slot_fun
return slot_list
def reset_pt_slot():
global PT_SLOT
PT_SLOT = {
"hostbridge":0,
"lpc":1,
"pci-gvt":2,
"virtio-blk":3,
"igd-lpc":31,
}
def get_pt_dev():
cap_pt = PASSTHRU_DEVS
return cap_pt
def get_vuart1_from_scenario(vmid):
vuart1 = common.get_vuart_info_id(common.SCENARIO_INFO_FILE, 1)
return vuart1[vmid]['base']
def pt_devs_check_audio(audio_map, audio_codec_map):
for vmid in list(audio_map.keys()):
bdf_audio = audio_map[vmid]
bdf_codec = audio_codec_map[vmid]
if not bdf_audio and bdf_codec:
key = "user_vm:id={},passthrough_devices,{}".format(vmid, 'audio_codec')
ERR_LIST[key] = "Audio codec device should be pass through together with Audio devcie!"
def check_block_mount(virtio_blk_dic):
(blk_dev_list, num) = board_cfg_lib.get_rootfs(common.BOARD_INFO_FILE)
for vmid in list(virtio_blk_dic.keys()):
mount_flags = []
for blk in virtio_blk_dic[vmid]:
rootfs_img = ''
if not blk:
mount_flags.append(False)
continue
if ':' in blk:
blk_dev = blk.split(':')[0]
rootfs_img = blk.split(':')[1]
else:
blk_dev = blk
if blk_dev in blk_dev_list and rootfs_img:
mount_flags.append(True)
else:
mount_flags.append(False)
MOUNT_FLAG_DIC[vmid] = mount_flags
def check_sriov_param(sriov_dev, pt_sel):
for dev_type in ['gpu', 'network']:
for vm_id, dev_bdf in sriov_dev[dev_type].items():
if not dev_bdf:
continue
pt_devname = dev_type
if pt_devname == 'network':
pt_devname = 'ethernet'
if pt_sel.bdf[pt_devname][vm_id]:
ERR_LIST[
'vmid:{} sriov {}'.format(vm_id, dev_type)
] = 'this vm has {} passthrough and sriov {} at same time!'.format(pt_devname, dev_type)
if not re.match(r'^[\da-fA-F]{2}:[0-3][\da-fA-F]\.[0-7]$', dev_bdf):
ERR_LIST['vmid:{} sriov {}'.format(vm_id, dev_type)] = 'sriov {} bdf error'.format(dev_type)
def bdf_duplicate_check(bdf_dic):
bdf_used = []
for dev in bdf_dic.keys():
dev_bdf_dic = bdf_dic[dev]
for vm_i in dev_bdf_dic.keys():
dev_bdf = dev_bdf_dic[vm_i]
if not dev_bdf:
continue
if dev_bdf in bdf_used:
key = "user_vm:id={},{},{}".format(vm_i, 'passthrough_devices', dev)
ERR_LIST[key] = "You select the same device for {} pass-through !".format(dev)
return
else:
bdf_used.append(dev_bdf)
def get_gpu_bdf():
pci_lines = board_cfg_lib.get_info(common.BOARD_INFO_FILE, "<PCI_DEVICE>", "</PCI_DEVICE>")
for line in pci_lines:
if "VGA compatible controller" in line:
global gpu_bdf
gpu_bdf = line.split('\t')[1]
gpu_bdf = gpu_bdf[0:7]
return gpu_bdf
def get_vpid_by_bdf(bdf):
vpid = ''
vpid_lines = board_cfg_lib.get_info(common.BOARD_INFO_FILE, "<PCI_VID_PID>", "</PCI_VID_PID>")
for vpid_line in vpid_lines:
if bdf in vpid_line:
vpid = " ".join(vpid_line.split()[2].split(':'))
return vpid
def get_gpu_vpid():
gpu_bdf = get_gpu_bdf()
return get_vpid_by_bdf(gpu_bdf)
def user_vm_cpu_affinity(user_vmid_cpu_affinity):
cpu_affinity = {}
sos_vm_id = get_sos_vmid()
for user_vmid,cpu_affinity_list in user_vmid_cpu_affinity.items():
cpu_affinity[int(user_vmid) + int(sos_vm_id)] = cpu_affinity_list
return cpu_affinity
def check_slot(slot_db):
slot_values = {}
for dev in slot_db.keys():
for user_vmid in slot_db[dev].keys():
slot_values[user_vmid] = []
break
for dev in PASSTHRU_DEVS:
if dev == 'gpu':
continue
for user_vmid,slot_str in slot_db[dev].items():
if not slot_str:
continue
slot_values[user_vmid].append(slot_str)
for dev in PASSTHRU_DEVS:
if dev == 'gpu':
continue
for user_vmid,slot_str in slot_db[dev].items():
if not slot_str or ':' not in str(slot_str):
continue
bus_slot = slot_str[0:-1]
bus_slot_fun0 = bus_slot + "0"
if bus_slot_fun0 not in slot_values[user_vmid]:
slot_db[dev][user_vmid] = bus_slot_fun0
slot_values[user_vmid].append(bus_slot_fun0)
def is_linux_like(user_vm_type):
is_linux = False
if user_vm_type in LINUX_LIKE_OS:
is_linux = True
return is_linux
def set_shm_regions(launch_item_values, scenario_info):
try:
raw_shmem_regions = common.get_hv_item_tag(scenario_info, "FEATURES", "IVSHMEM", "IVSHMEM_REGION")
load_orders = common.get_leaf_tag_map(scenario_info, "load_order")
shm_enabled = common.get_hv_item_tag(scenario_info, "FEATURES", "IVSHMEM", "IVSHMEM_ENABLED")
except:
return
sos_vm_id = 0
for vm_id,load_order in load_orders.items():
if load_order in ['SERVICE_VM']:
sos_vm_id = vm_id
elif load_order in ['POST_LAUNCHED_VM']:
user_vmid = vm_id - sos_vm_id
shm_region_key = 'user_vm:id={},shm_regions,shm_region'.format(user_vmid)
launch_item_values[shm_region_key] = ['']
if shm_enabled == 'y':
for shmem_region in raw_shmem_regions:
if shmem_region is None or shmem_region.strip() == '':
continue
try:
shm_splited = shmem_region.split(',')
name = shm_splited[0].strip()
size = shm_splited[1].strip()
vm_id_list = [x.strip() for x in shm_splited[2].split(':')]
if str(vm_id) in vm_id_list:
launch_item_values[shm_region_key].append(','.join([name, size]))
except Exception as e:
print(e)
def set_pci_vuarts(launch_item_values, scenario_info):
try:
launch_item_values['user_vm,console_vuart'] = DM_VUART0
load_orders = common.get_leaf_tag_map(scenario_info, 'load_order')
sos_vm_id = 0
for vm_id,load_order in load_orders.items():
if load_order in ['SERVICE_VM']:
sos_vm_id = vm_id
for vm in list(common.get_config_root(scenario_info)):
if vm.tag == 'vm' and load_orders[int(vm.attrib['id'])] == 'POST_LAUNCHED_VM':
user_vmid = int(vm.attrib['id']) - sos_vm_id
pci_vuart_key = 'user_vm:id={},communication_vuarts,communication_vuart'.format(user_vmid)
for elem in list(vm):
if elem.tag == 'communication_vuart':
for sub_elem in list(elem):
if sub_elem.tag == 'base' and sub_elem.text == 'PCI_VUART':
if pci_vuart_key not in launch_item_values.keys():
launch_item_values[pci_vuart_key] = ['', elem.attrib['id']]
else:
launch_item_values[pci_vuart_key].append(elem.attrib['id'])
except:
return
def check_shm_regions(launch_shm_regions, scenario_info):
launch_item_values = {}
set_shm_regions(launch_item_values, scenario_info)
for user_vmid, shm_regions in launch_shm_regions.items():
shm_region_key = 'user_vm:id={},shm_regions,shm_region'.format(user_vmid)
for shm_region in shm_regions:
if shm_region_key not in launch_item_values.keys() or shm_region not in launch_item_values[shm_region_key]:
ERR_LIST[shm_region_key] = "shm {} should be configured in scenario setting and the size should be decimal" \
"in MB and spaces should not exist.".format(shm_region)
return
def check_console_vuart(launch_console_vuart, vuart0, scenario_info):
vuarts = common.get_vuart_info(scenario_info)
for user_vmid, console_vuart_enable in launch_console_vuart.items():
key = 'user_vm:id={},console_vuart'.format(user_vmid)
if console_vuart_enable == "Enable" and vuart0[user_vmid] == "Enable":
ERR_LIST[key] = "vuart0 and console_vuart of user_vm {} should not be enabled " \
"at the same time".format(user_vmid)
return
if console_vuart_enable == "Enable" and int(user_vmid) in vuarts.keys() \
and 0 in vuarts[user_vmid] and vuarts[user_vmid][0]['base'] == "INVALID_PCI_BASE":
ERR_LIST[key] = "console_vuart of user_vm {} should be enabled in scenario setting".format(user_vmid)
return
def check_communication_vuart(launch_communication_vuarts, scenario_info):
vuarts = common.get_vuart_info(scenario_info)
vuart1_setting = common.get_vuart_info_id(common.SCENARIO_INFO_FILE, 1)
for user_vmid, vuart_list in launch_communication_vuarts.items():
vuart_key = 'user_vm:id={},communication_vuarts,communication_vuart'.format(user_vmid)
for vuart_id in vuart_list:
if not vuart_id:
return
if int(vuart_id) not in vuarts[user_vmid].keys():
ERR_LIST[vuart_key] = "communication_vuart {} of user_vm {} should be configured" \
"in scenario setting.".format(vuart_id, user_vmid)
return
if int(vuart_id) == 1 and vuarts[user_vmid][1]['base'] != "INVALID_PCI_BASE":
if user_vmid in vuart1_setting.keys() and vuart1_setting[user_vmid]['base'] != "INVALID_COM_BASE":
ERR_LIST[vuart_key] = "user_vm {}'s communication_vuart 1 and legacy_vuart 1 should " \
"not be configured at the same time.".format(user_vmid)
return
def check_enable_ptm(launch_enable_ptm, scenario_info):
scenario_etree = lxml.etree.parse(scenario_info)
enable_ptm_vm_list = scenario_etree.xpath("//vm[PTM = 'y']/@id")
for user_vmid, enable_ptm in launch_enable_ptm.items():
key = 'user_vm:id={},enable_ptm'.format(user_vmid)
if enable_ptm == 'y' and str(user_vmid) not in enable_ptm_vm_list:
ERR_LIST[key] = "PTM of user_vm:{} set to 'n' in scenario xml".format(user_vmid)
| true | true |
f71df485d3408330f790929d7b0da668a541e0f4 | 11,467 | py | Python | doc/conf.py | kvdblom/AutoFolio | 60a38a485e832b4e9bd2d06cbe7e1aecc994bb32 | [
"BSD-2-Clause"
] | 24 | 2017-05-17T15:51:44.000Z | 2021-05-14T20:24:44.000Z | doc/conf.py | kvdblom/AutoFolio | 60a38a485e832b4e9bd2d06cbe7e1aecc994bb32 | [
"BSD-2-Clause"
] | 10 | 2017-02-21T13:36:25.000Z | 2021-04-10T01:35:35.000Z | doc/conf.py | kvdblom/AutoFolio | 60a38a485e832b4e9bd2d06cbe7e1aecc994bb32 | [
"BSD-2-Clause"
] | 12 | 2016-07-21T08:59:36.000Z | 2021-01-28T13:51:16.000Z | # -*- coding: utf-8 -*-
#
# AutoFolio documentation build configuration file, created by
# sphinx-quickstart on Mon Sep 14 12:36:21 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import datetime
import sys
import os
import shlex
import sphinx_bootstrap_theme
sys.path.insert(0, '..')
import autofolio
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'AutoFolio'
copyright = '2015-%s, %s' % (datetime.datetime.now().year, autofolio.AUTHORS)
author = autofolio.AUTHORS
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = autofolio.VERSION
# The full version, including alpha/beta/rc tags.
release = autofolio.VERSION
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_static']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bootstrap'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
# Navigation bar title. (Default: ``project`` value)
'navbar_title': "AutoFolio",
# Tab name for entire site. (Default: "Site")
# 'navbar_site_name': "Site",
# A list of tuples containting pages to link to. The value should
# be in the form [(name, page), ..]
'navbar_links': [
('Start', 'index'),
('Installation', 'installation'),
('Manual', 'manual'),
('Contact', 'contact'),
('License', 'license'),
],
# Render the next and previous page links in navbar. (Default: true)
'navbar_sidebarrel': False,
# Render the current pages TOC in the navbar. (Default: true)
'navbar_pagenav': False,
# Tab name for the current pages TOC. (Default: "Page")
'navbar_pagenav_name': "On this page",
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
'globaltoc_depth': 1,
# Include hidden TOCs in Site navbar?
#
# Note: If this is "false", you cannot have mixed ``:hidden:`` and
# non-hidden ``toctree`` directives in the same page, or else the build
# will break.
#
# Values: "true" (default) or "false"
'globaltoc_includehidden': "false",
# HTML navbar class (Default: "navbar") to attach to <div> element.
# For black navbar, do "navbar navbar-inverse"
'navbar_class': "navbar",
# Fix navigation bar to top of page?
# Values: "true" (default) or "false"
'navbar_fixed_top': "true",
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
'source_link_position': "footer",
# Bootswatch (http://bootswatch.com/) theme.
#
# Options are nothing with "" (default) or the name of a valid theme
# such as "amelia" or "cosmo".
'bootswatch_theme': "cosmo",
# Choose Bootstrap version.
# Values: "3" (default) or "2" (in quotes)
'bootstrap_version': "3",
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {'**': ['localtoc.html']}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'AutoFoliodoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'AutoFolio.tex', u'AutoFolio Documentation', autofolio.AUTHORS, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'autofolio', u'AutoFolio Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'AutoFolio', u'AutoFolio Documentation',
author, 'AutoFolio', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| 32.210674 | 89 | 0.703148 |
import datetime
import sys
import os
import shlex
import sphinx_bootstrap_theme
sys.path.insert(0, '..')
import autofolio
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'AutoFolio'
copyright = '2015-%s, %s' % (datetime.datetime.now().year, autofolio.AUTHORS)
author = autofolio.AUTHORS
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = autofolio.VERSION
# The full version, including alpha/beta/rc tags.
release = autofolio.VERSION
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_static']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bootstrap'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
# Navigation bar title. (Default: ``project`` value)
'navbar_title': "AutoFolio",
# Tab name for entire site. (Default: "Site")
# 'navbar_site_name': "Site",
# A list of tuples containting pages to link to. The value should
# be in the form [(name, page), ..]
'navbar_links': [
('Start', 'index'),
('Installation', 'installation'),
('Manual', 'manual'),
('Contact', 'contact'),
('License', 'license'),
],
# Render the next and previous page links in navbar. (Default: true)
'navbar_sidebarrel': False,
# Render the current pages TOC in the navbar. (Default: true)
'navbar_pagenav': False,
# Tab name for the current pages TOC. (Default: "Page")
'navbar_pagenav_name': "On this page",
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
'globaltoc_depth': 1,
# Include hidden TOCs in Site navbar?
#
# Note: If this is "false", you cannot have mixed ``:hidden:`` and
# non-hidden ``toctree`` directives in the same page, or else the build
# will break.
#
# Values: "true" (default) or "false"
'globaltoc_includehidden': "false",
# HTML navbar class (Default: "navbar") to attach to <div> element.
# For black navbar, do "navbar navbar-inverse"
'navbar_class': "navbar",
# Fix navigation bar to top of page?
# Values: "true" (default) or "false"
'navbar_fixed_top': "true",
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
'source_link_position': "footer",
# Bootswatch (http://bootswatch.com/) theme.
#
# Options are nothing with "" (default) or the name of a valid theme
# such as "amelia" or "cosmo".
'bootswatch_theme': "cosmo",
# Choose Bootstrap version.
# Values: "3" (default) or "2" (in quotes)
'bootstrap_version': "3",
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {'**': ['localtoc.html']}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'AutoFoliodoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'AutoFolio.tex', u'AutoFolio Documentation', autofolio.AUTHORS, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'autofolio', u'AutoFolio Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'AutoFolio', u'AutoFolio Documentation',
author, 'AutoFolio', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
| true | true |
f71df493deda24849909a1ce67f6f55f0aa47381 | 1,385 | py | Python | Numercial_Examples/Examples_China.py | Lemon-Nation/PyLMDI | 54d15ec44b84bd84b960003b1fd6690057240565 | [
"Apache-2.0"
] | 3 | 2021-12-23T12:19:18.000Z | 2022-01-14T03:49:51.000Z | Numercial_Examples/Examples_China.py | Lemon-Nation/PyLMDI | 54d15ec44b84bd84b960003b1fd6690057240565 | [
"Apache-2.0"
] | null | null | null | Numercial_Examples/Examples_China.py | Lemon-Nation/PyLMDI | 54d15ec44b84bd84b960003b1fd6690057240565 | [
"Apache-2.0"
] | 3 | 2021-12-23T11:57:35.000Z | 2022-02-28T13:43:55.000Z |
# =============================================================================
# Step1: Input
# =============================================================================
import numpy as np
from PyLMDI import PyLMDI
if __name__=='__main__':
#--- Step1: Input
Ct = 794.6119504871361 # Carbon emission from China's commercial buildings in 2018
C0 = 761.984276581356 # Carbon emission from China's commercial buildings in 2017
Pt = 1395.38 # Population size in 2018
P0 = 1390.08 # in 2017
gt = 64.52073987
g0 = 59.04367375
st = 0.521570193
s0 = 0.51892765
it = 0.002743568
i0 = 0.002876626
et = 3.053397862
e0 = 3.004500526
kt = 2.02
k0 = 2.07
Ct,C0 = [Ct],[C0]
Xt = np.array([Pt,gt,st,it,et,kt]).reshape([-1,1])
X0 = np.array([P0,g0,s0,i0,e0,k0]).reshape([-1,1])
#--- Step2-4: LMDI decomposition analysis
LMDI = PyLMDI(Ct,C0,Xt,X0)
ans = LMDI.Add()
# --- Step 5: Output
print("The change of carbon emission of China's commercial buildings from 2017 to 2018 is: ",ans[0])
print("The various driving forces contribute as follows:")
print("P: ",ans[1])
print("g: ",ans[2])
print("s: ",ans[3])
print("i: ",ans[4])
print("e: ",ans[5])
print("K: ",ans[6]) | 26.132075 | 104 | 0.488809 |
import numpy as np
from PyLMDI import PyLMDI
if __name__=='__main__':
Ct = 794.6119504871361
C0 = 761.984276581356 # Carbon emission from China's commercial buildings in 2017
Pt = 1395.38
P0 = 1390.08
gt = 64.52073987
g0 = 59.04367375
st = 0.521570193
s0 = 0.51892765
it = 0.002743568
i0 = 0.002876626
et = 3.053397862
e0 = 3.004500526
kt = 2.02
k0 = 2.07
Ct,C0 = [Ct],[C0]
Xt = np.array([Pt,gt,st,it,et,kt]).reshape([-1,1])
X0 = np.array([P0,g0,s0,i0,e0,k0]).reshape([-1,1])
LMDI = PyLMDI(Ct,C0,Xt,X0)
ans = LMDI.Add()
print("The change of carbon emission of China's commercial buildings from 2017 to 2018 is: ",ans[0])
print("The various driving forces contribute as follows:")
print("P: ",ans[1])
print("g: ",ans[2])
print("s: ",ans[3])
print("i: ",ans[4])
print("e: ",ans[5])
print("K: ",ans[6]) | true | true |
f71df509d4fc284e6d1ded8236536c35e1a10c7b | 4,573 | py | Python | arcsight/utils/arcsight_ssl.py | mayurdhamecha-crest/ta_cloud_exchange_plugins | 8d64c92909f28bcb2067587ec3361499de5d5723 | [
"BSD-3-Clause"
] | null | null | null | arcsight/utils/arcsight_ssl.py | mayurdhamecha-crest/ta_cloud_exchange_plugins | 8d64c92909f28bcb2067587ec3361499de5d5723 | [
"BSD-3-Clause"
] | null | null | null | arcsight/utils/arcsight_ssl.py | mayurdhamecha-crest/ta_cloud_exchange_plugins | 8d64c92909f28bcb2067587ec3361499de5d5723 | [
"BSD-3-Clause"
] | null | null | null | """ArcSight Plugin SSL Log Handler."""
import os
import codecs
import logging
import logging.handlers
import ssl
import socket
from tempfile import NamedTemporaryFile
class SSLArcSightHandler(logging.handlers.SysLogHandler):
"""SSL ArcSightHandler Class."""
# We need to paste all this in because __init__ complains otherwise
# This all comes from logging.handlers.SysLogHandler
LOG_EMERG = 0 # system is unusable
LOG_ALERT = 1 # action must be taken immediately
LOG_CRIT = 2 # critical conditions
LOG_ERR = 3 # error conditions
LOG_WARNING = 4 # warning conditions
LOG_NOTICE = 5 # normal but significant condition
LOG_INFO = 6 # informational
LOG_DEBUG = 7 # debug-level messages
# facility codes
LOG_KERN = 0 # kernel messages
LOG_USER = 1 # random user-level messages
LOG_MAIL = 2 # mail system
LOG_DAEMON = 3 # system daemons
LOG_AUTH = 4 # security/authorization messages
LOG_SYSLOG = 5 # messages generated internally by syslogd
LOG_LPR = 6 # line printer subsystem
LOG_NEWS = 7 # network news subsystem
LOG_UUCP = 8 # UUCP subsystem
LOG_CRON = 9 # clock daemon
LOG_AUTHPRIV = 10 # security/authorization messages (private)
LOG_FTP = 11 # FTP daemon
# other codes through 15 reserved for system use
LOG_LOCAL0 = 16 # reserved for local use
LOG_LOCAL1 = 17 # reserved for local use
LOG_LOCAL2 = 18 # reserved for local use
LOG_LOCAL3 = 19 # reserved for local use
LOG_LOCAL4 = 20 # reserved for local use
LOG_LOCAL5 = 21 # reserved for local use
LOG_LOCAL6 = 22 # reserved for local use
LOG_LOCAL7 = 23 # reserved for local use
priority_names = {
"alert": LOG_ALERT,
"crit": LOG_CRIT,
"critical": LOG_CRIT,
"debug": LOG_DEBUG,
"emerg": LOG_EMERG,
"err": LOG_ERR,
"error": LOG_ERR, # DEPRECATED
"info": LOG_INFO,
"notice": LOG_NOTICE,
"panic": LOG_EMERG, # DEPRECATED
"warn": LOG_WARNING, # DEPRECATED
"warning": LOG_WARNING,
}
facility_names = {
"auth": LOG_AUTH,
"authpriv": LOG_AUTHPRIV,
"cron": LOG_CRON,
"daemon": LOG_DAEMON,
"ftp": LOG_FTP,
"kern": LOG_KERN,
"lpr": LOG_LPR,
"mail": LOG_MAIL,
"news": LOG_NEWS,
"security": LOG_AUTH, # DEPRECATED
"syslog": LOG_SYSLOG,
"user": LOG_USER,
"uucp": LOG_UUCP,
"local0": LOG_LOCAL0,
"local1": LOG_LOCAL1,
"local2": LOG_LOCAL2,
"local3": LOG_LOCAL3,
"local4": LOG_LOCAL4,
"local5": LOG_LOCAL5,
"local6": LOG_LOCAL6,
"local7": LOG_LOCAL7,
}
# The map below appears to be trivially lowercase the key. However,
# there's more to it than meets the eye - in some locales, lowercase
# gives unexpected results. See SF #1524081: in the Turkish locale,
# "INFO".lower() != "info"
priority_map = {
"DEBUG": "debug",
"INFO": "info",
"WARNING": "warning",
"ERROR": "error",
"CRITICAL": "critical",
}
def __init__(self, address, certs=None, facility=LOG_USER):
"""Init method."""
logging.Handler.__init__(self)
self.address = address
self.facility = facility
self.unixsocket = 0
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if certs:
cert = NamedTemporaryFile(delete=False)
cert.write(str.encode(certs))
cert.flush()
self.socket = ssl.wrap_socket(
s, ca_certs=cert.name, cert_reqs=ssl.CERT_REQUIRED
)
cert.close()
os.unlink(cert.name)
else:
self.socket = ssl.wrap_socket(s, cert_reqs=ssl.CERT_NONE)
self.socket.connect(address)
def close(self):
"""Close method."""
self.socket.close()
logging.Handler.close(self)
def emit(self, record):
"""Emit Method."""
msg = self.format(record) + "\n"
prio = "<%d>" % self.encodePriority(
self.facility, self.mapPriority(record.levelname)
)
if type(msg) == "unicode":
msg = msg.encode("utf-8")
if codecs:
msg = codecs.BOM_UTF8 + msg
msg = prio + msg
try:
self.socket.write(str.encode(msg))
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
self.handleError(record)
| 30.898649 | 72 | 0.597201 | import os
import codecs
import logging
import logging.handlers
import ssl
import socket
from tempfile import NamedTemporaryFile
class SSLArcSightHandler(logging.handlers.SysLogHandler):
LOG_EMERG = 0
LOG_ALERT = 1
LOG_CRIT = 2
LOG_ERR = 3
LOG_WARNING = 4
LOG_NOTICE = 5
LOG_INFO = 6
LOG_DEBUG = 7
LOG_KERN = 0
LOG_USER = 1
LOG_MAIL = 2
LOG_DAEMON = 3
LOG_AUTH = 4
LOG_SYSLOG = 5
LOG_LPR = 6
LOG_NEWS = 7
LOG_UUCP = 8
LOG_CRON = 9
LOG_AUTHPRIV = 10
LOG_FTP = 11
LOG_LOCAL0 = 16
LOG_LOCAL1 = 17
LOG_LOCAL2 = 18
LOG_LOCAL3 = 19
LOG_LOCAL4 = 20
LOG_LOCAL5 = 21
LOG_LOCAL6 = 22
LOG_LOCAL7 = 23
priority_names = {
"alert": LOG_ALERT,
"crit": LOG_CRIT,
"critical": LOG_CRIT,
"debug": LOG_DEBUG,
"emerg": LOG_EMERG,
"err": LOG_ERR,
"error": LOG_ERR,
"info": LOG_INFO,
"notice": LOG_NOTICE,
"panic": LOG_EMERG,
"warn": LOG_WARNING,
"warning": LOG_WARNING,
}
facility_names = {
"auth": LOG_AUTH,
"authpriv": LOG_AUTHPRIV,
"cron": LOG_CRON,
"daemon": LOG_DAEMON,
"ftp": LOG_FTP,
"kern": LOG_KERN,
"lpr": LOG_LPR,
"mail": LOG_MAIL,
"news": LOG_NEWS,
"security": LOG_AUTH,
"syslog": LOG_SYSLOG,
"user": LOG_USER,
"uucp": LOG_UUCP,
"local0": LOG_LOCAL0,
"local1": LOG_LOCAL1,
"local2": LOG_LOCAL2,
"local3": LOG_LOCAL3,
"local4": LOG_LOCAL4,
"local5": LOG_LOCAL5,
"local6": LOG_LOCAL6,
"local7": LOG_LOCAL7,
}
# gives unexpected results. See SF #1524081: in the Turkish locale,
# "INFO".lower() != "info"
priority_map = {
"DEBUG": "debug",
"INFO": "info",
"WARNING": "warning",
"ERROR": "error",
"CRITICAL": "critical",
}
def __init__(self, address, certs=None, facility=LOG_USER):
logging.Handler.__init__(self)
self.address = address
self.facility = facility
self.unixsocket = 0
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if certs:
cert = NamedTemporaryFile(delete=False)
cert.write(str.encode(certs))
cert.flush()
self.socket = ssl.wrap_socket(
s, ca_certs=cert.name, cert_reqs=ssl.CERT_REQUIRED
)
cert.close()
os.unlink(cert.name)
else:
self.socket = ssl.wrap_socket(s, cert_reqs=ssl.CERT_NONE)
self.socket.connect(address)
def close(self):
self.socket.close()
logging.Handler.close(self)
def emit(self, record):
msg = self.format(record) + "\n"
prio = "<%d>" % self.encodePriority(
self.facility, self.mapPriority(record.levelname)
)
if type(msg) == "unicode":
msg = msg.encode("utf-8")
if codecs:
msg = codecs.BOM_UTF8 + msg
msg = prio + msg
try:
self.socket.write(str.encode(msg))
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
self.handleError(record)
| true | true |
f71df5e138f560490f9f1eee06cf96b17622e978 | 270 | py | Python | DiscordShopBot/utils.py | chanchan69/shopper | dc7a9bba325d753c2454640d201514647628c01e | [
"Apache-2.0"
] | 1 | 2021-12-21T21:29:15.000Z | 2021-12-21T21:29:15.000Z | DiscordShopBot/utils.py | chanchan69/shopper | dc7a9bba325d753c2454640d201514647628c01e | [
"Apache-2.0"
] | null | null | null | DiscordShopBot/utils.py | chanchan69/shopper | dc7a9bba325d753c2454640d201514647628c01e | [
"Apache-2.0"
] | 1 | 2021-12-21T12:52:53.000Z | 2021-12-21T12:52:53.000Z | from tkinter import filedialog, Tk
from os import getcwd
def epic_file_dialog(title: str) -> str:
root = Tk()
root.attributes('-topmost',True)
root.withdraw()
path = filedialog.askopenfilename(title=title, initialdir=getcwd())
return path | 27 | 73 | 0.685185 | from tkinter import filedialog, Tk
from os import getcwd
def epic_file_dialog(title: str) -> str:
root = Tk()
root.attributes('-topmost',True)
root.withdraw()
path = filedialog.askopenfilename(title=title, initialdir=getcwd())
return path | true | true |
f71df5fc0f6719789acb2f43aed9c9cb11f18ee5 | 1,508 | py | Python | dags/exercise6.py | tisako/airflow-training-skeleton | 5bebe784d69f115df352ad5185320653eaa78eee | [
"Apache-2.0"
] | null | null | null | dags/exercise6.py | tisako/airflow-training-skeleton | 5bebe784d69f115df352ad5185320653eaa78eee | [
"Apache-2.0"
] | null | null | null | dags/exercise6.py | tisako/airflow-training-skeleton | 5bebe784d69f115df352ad5185320653eaa78eee | [
"Apache-2.0"
] | null | null | null | from datetime import timedelta, datetime
# noinspection PyPackageRequirements
import airflow
# noinspection PyPackageRequirements
from airflow import DAG
# noinspection PyPackageRequirements
from airflow.operators.dummy_operator import DummyOperator
# noinspection PyPackageRequirements
from airflow.operators.python_operator import PythonOperator, BranchPythonOperator
# noinspection PyUnresolvedReferences
from airflow.utils.trigger_rule import TriggerRule
args = {
'owner': 'Airflow',
'start_date': airflow.utils.dates.days_ago(9),
}
nameList = ['jan', 'peter', 'klaas', 'fred', 'jan', 'klaas', 'blob']
def print_date(**context):
print(get_week_day(context))
def get_week_day(context):
return context['execution_date'].weekday()
def email(name: str) -> DummyOperator:
return DummyOperator(task_id=str(name))
def branch_func(**context):
return nameList[get_week_day(context)]
with DAG(
dag_id='exercise6',
default_args=args,
schedule_interval=timedelta(hours=2.5)
) as dag:
print_date = PythonOperator(
task_id='task1',
provide_context=True,
python_callable=print_date
)
branching = BranchPythonOperator(
task_id='branch_task',
provide_context=True,
python_callable=branch_func
)
sleep = list(map(email, set(nameList)))
the_end = DummyOperator(
task_id='the_end',
trigger_rule=TriggerRule.NONE_FAILED
)
print_date >> branching >> sleep >> the_end
| 23.2 | 82 | 0.720159 | from datetime import timedelta, datetime
import airflow
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import PythonOperator, BranchPythonOperator
from airflow.utils.trigger_rule import TriggerRule
args = {
'owner': 'Airflow',
'start_date': airflow.utils.dates.days_ago(9),
}
nameList = ['jan', 'peter', 'klaas', 'fred', 'jan', 'klaas', 'blob']
def print_date(**context):
print(get_week_day(context))
def get_week_day(context):
return context['execution_date'].weekday()
def email(name: str) -> DummyOperator:
return DummyOperator(task_id=str(name))
def branch_func(**context):
return nameList[get_week_day(context)]
with DAG(
dag_id='exercise6',
default_args=args,
schedule_interval=timedelta(hours=2.5)
) as dag:
print_date = PythonOperator(
task_id='task1',
provide_context=True,
python_callable=print_date
)
branching = BranchPythonOperator(
task_id='branch_task',
provide_context=True,
python_callable=branch_func
)
sleep = list(map(email, set(nameList)))
the_end = DummyOperator(
task_id='the_end',
trigger_rule=TriggerRule.NONE_FAILED
)
print_date >> branching >> sleep >> the_end
| true | true |
f71df6209d55d9883c908aef33562038a34b55fc | 357 | py | Python | lib/rm_comments.py | guilyx/rm-comments-cmakelists | c5a957a34258f7fca55dd67d1955f107fd94f8fe | [
"MIT"
] | null | null | null | lib/rm_comments.py | guilyx/rm-comments-cmakelists | c5a957a34258f7fca55dd67d1955f107fd94f8fe | [
"MIT"
] | null | null | null | lib/rm_comments.py | guilyx/rm-comments-cmakelists | c5a957a34258f7fca55dd67d1955f107fd94f8fe | [
"MIT"
] | null | null | null | import os
def removeComments(filename):
savedFile = filename.replace('.txt', 'Copy.txt')
os.rename(filename, savedFile)
with open(filename, 'w') as new_file:
with open(savedFile) as old_file:
for line in old_file:
if '#' not in line and line != '\n':
new_file.write(line)
return(True) | 32.454545 | 52 | 0.585434 | import os
def removeComments(filename):
savedFile = filename.replace('.txt', 'Copy.txt')
os.rename(filename, savedFile)
with open(filename, 'w') as new_file:
with open(savedFile) as old_file:
for line in old_file:
if '#' not in line and line != '\n':
new_file.write(line)
return(True) | true | true |
f71df73df05965520b9d035dfb93c6e1edbf198b | 18,641 | py | Python | utils.py | HuangHaoyu1997/NRI | e0cd1ef5e168db19cd904eabfd369a65238b5d07 | [
"MIT"
] | null | null | null | utils.py | HuangHaoyu1997/NRI | e0cd1ef5e168db19cd904eabfd369a65238b5d07 | [
"MIT"
] | null | null | null | utils.py | HuangHaoyu1997/NRI | e0cd1ef5e168db19cd904eabfd369a65238b5d07 | [
"MIT"
] | null | null | null | import numpy as np
import torch
from torch.utils.data.dataset import TensorDataset
from torch.utils.data import DataLoader
import torch.nn.functional as F
from torch.autograd import Variable
def my_softmax(input, axis=1):
trans_input = input.transpose(axis, 0).contiguous()
soft_max_1d = F.softmax(trans_input)
return soft_max_1d.transpose(axis, 0)
def binary_concrete(logits, tau=1, hard=False, eps=1e-10):
y_soft = binary_concrete_sample(logits, tau=tau, eps=eps)
if hard:
y_hard = (y_soft > 0.5).float()
y = Variable(y_hard.data - y_soft.data) + y_soft
else:
y = y_soft
return y
def binary_concrete_sample(logits, tau=1, eps=1e-10):
logistic_noise = sample_logistic(logits.size(), eps=eps)
if logits.is_cuda:
logistic_noise = logistic_noise.cuda()
y = logits + Variable(logistic_noise)
return F.sigmoid(y / tau)
def sample_logistic(shape, eps=1e-10):
uniform = torch.rand(shape).float()
return torch.log(uniform + eps) - torch.log(1 - uniform + eps)
def sample_gumbel(shape, eps=1e-10):
"""
NOTE: Stolen from https://github.com/pytorch/pytorch/pull/3341/commits/327fcfed4c44c62b208f750058d14d4dc1b9a9d3
Sample from Gumbel(0, 1)
based on
https://github.com/ericjang/gumbel-softmax/blob/3c8584924603869e90ca74ac20a6a03d99a91ef9/Categorical%20VAE.ipynb ,
(MIT license)
"""
U = torch.rand(shape).float()
return - torch.log(eps - torch.log(U + eps))
def gumbel_softmax_sample(logits, tau=1, eps=1e-10):
"""
NOTE: Stolen from https://github.com/pytorch/pytorch/pull/3341/commits/327fcfed4c44c62b208f750058d14d4dc1b9a9d3
Draw a sample from the Gumbel-Softmax distribution
based on
https://github.com/ericjang/gumbel-softmax/blob/3c8584924603869e90ca74ac20a6a03d99a91ef9/Categorical%20VAE.ipynb
(MIT license)
"""
gumbel_noise = sample_gumbel(logits.size(), eps=eps)
if logits.is_cuda:
gumbel_noise = gumbel_noise.cuda()
y = logits + Variable(gumbel_noise)
return my_softmax(y / tau, axis=-1)
def gumbel_softmax(logits, tau=1, hard=False, eps=1e-10):
"""
NOTE: Stolen from https://github.com/pytorch/pytorch/pull/3341/commits/327fcfed4c44c62b208f750058d14d4dc1b9a9d3
Sample from the Gumbel-Softmax distribution and optionally discretize.
Args:
logits: [batch_size, n_class] unnormalized log-probs
tau: non-negative scalar temperature
hard: if True, take argmax, but differentiate w.r.t. soft sample y
Returns:
[batch_size, n_class] sample from the Gumbel-Softmax distribution.
If hard=True, then the returned sample will be one-hot, otherwise it will
be a probability distribution that sums to 1 across classes
Constraints:
- this implementation only works on batch_size x num_features tensor for now
based on
https://github.com/ericjang/gumbel-softmax/blob/3c8584924603869e90ca74ac20a6a03d99a91ef9/Categorical%20VAE.ipynb ,
(MIT license)
"""
y_soft = gumbel_softmax_sample(logits, tau=tau, eps=eps)
if hard:
shape = logits.size()
_, k = y_soft.data.max(-1)
# this bit is based on
# https://discuss.pytorch.org/t/stop-gradients-for-st-gumbel-softmax/530/5
y_hard = torch.zeros(*shape)
if y_soft.is_cuda:
y_hard = y_hard.cuda()
y_hard = y_hard.zero_().scatter_(-1, k.view(shape[:-1] + (1,)), 1.0)
# this cool bit of code achieves two things:
# - makes the output value exactly one-hot (since we add then
# subtract y_soft value)
# - makes the gradient equal to y_soft gradient (since we strip
# all other gradients)
y = Variable(y_hard - y_soft.data) + y_soft
else:
y = y_soft
return y
def binary_accuracy(output, labels):
preds = output > 0.5
correct = preds.type_as(labels).eq(labels).double()
correct = correct.sum()
return correct / len(labels)
def load_data(batch_size=1, suffix=''):
loc_train = np.load('data/loc_train' + suffix + '.npy')
vel_train = np.load('data/vel_train' + suffix + '.npy')
edges_train = np.load('data/edges_train' + suffix + '.npy')
loc_valid = np.load('data/loc_valid' + suffix + '.npy')
vel_valid = np.load('data/vel_valid' + suffix + '.npy')
edges_valid = np.load('data/edges_valid' + suffix + '.npy')
loc_test = np.load('data/loc_test' + suffix + '.npy')
vel_test = np.load('data/vel_test' + suffix + '.npy')
edges_test = np.load('data/edges_test' + suffix + '.npy')
# [num_samples, num_timesteps, num_dims, num_atoms]
num_atoms = loc_train.shape[3] # 质点的数量
loc_max = loc_train.max()
loc_min = loc_train.min()
vel_max = vel_train.max()
vel_min = vel_train.min()
# Normalize to [-1, 1]
loc_train = (loc_train - loc_min) * 2 / (loc_max - loc_min) - 1
vel_train = (vel_train - vel_min) * 2 / (vel_max - vel_min) - 1
loc_valid = (loc_valid - loc_min) * 2 / (loc_max - loc_min) - 1
vel_valid = (vel_valid - vel_min) * 2 / (vel_max - vel_min) - 1
loc_test = (loc_test - loc_min) * 2 / (loc_max - loc_min) - 1
vel_test = (vel_test - vel_min) * 2 / (vel_max - vel_min) - 1
# Reshape to: [num_sims, num_atoms, num_timesteps, num_dims], e.g. [50000, 5, 49, 2]
loc_train = np.transpose(loc_train, [0, 3, 1, 2])
vel_train = np.transpose(vel_train, [0, 3, 1, 2])
feat_train = np.concatenate([loc_train, vel_train], axis=3) # [50000, 5, 49, 4]
edges_train = np.reshape(edges_train, [-1, num_atoms ** 2]) # [50000, 25]
edges_train = np.array((edges_train + 1) / 2, dtype=np.int64) # float -> long
loc_valid = np.transpose(loc_valid, [0, 3, 1, 2])
vel_valid = np.transpose(vel_valid, [0, 3, 1, 2])
feat_valid = np.concatenate([loc_valid, vel_valid], axis=3)
edges_valid = np.reshape(edges_valid, [-1, num_atoms ** 2])
edges_valid = np.array((edges_valid + 1) / 2, dtype=np.int64)
loc_test = np.transpose(loc_test, [0, 3, 1, 2])
vel_test = np.transpose(vel_test, [0, 3, 1, 2])
feat_test = np.concatenate([loc_test, vel_test], axis=3)
edges_test = np.reshape(edges_test, [-1, num_atoms ** 2])
edges_test = np.array((edges_test + 1) / 2, dtype=np.int64)
feat_train = torch.FloatTensor(feat_train) # feature就是location和velocity向量concat
edges_train = torch.LongTensor(edges_train)
feat_valid = torch.FloatTensor(feat_valid)
edges_valid = torch.LongTensor(edges_valid)
feat_test = torch.FloatTensor(feat_test)
edges_test = torch.LongTensor(edges_test)
# Exclude self edges
off_diag_idx = np.ravel_multi_index(
np.where(np.ones((num_atoms, num_atoms)) - np.eye(num_atoms)), # 对角线0元素,其余为1,np.where输出非零元素坐标
[num_atoms, num_atoms]) # 把对角线元素的index去掉,返回剩下的index
edges_train = edges_train[:, off_diag_idx] # 将edge邻接矩阵中所有的对角线元素都去掉
edges_valid = edges_valid[:, off_diag_idx]
edges_test = edges_test[:, off_diag_idx]
train_data = TensorDataset(feat_train, edges_train)
valid_data = TensorDataset(feat_valid, edges_valid)
test_data = TensorDataset(feat_test, edges_test)
train_data_loader = DataLoader(train_data, batch_size=batch_size)
valid_data_loader = DataLoader(valid_data, batch_size=batch_size)
test_data_loader = DataLoader(test_data, batch_size=batch_size)
return train_data_loader, valid_data_loader, test_data_loader, loc_max, loc_min, vel_max, vel_min
def load_kuramoto_data(batch_size=1, suffix=''):
feat_train = np.load('data/feat_train' + suffix + '.npy')
edges_train = np.load('data/edges_train' + suffix + '.npy')
feat_valid = np.load('data/feat_valid' + suffix + '.npy')
edges_valid = np.load('data/edges_valid' + suffix + '.npy')
feat_test = np.load('data/feat_test' + suffix + '.npy')
edges_test = np.load('data/edges_test' + suffix + '.npy')
# [num_sims, num_atoms, num_timesteps, num_dims]
num_atoms = feat_train.shape[1]
# Normalize each feature dim. individually
feat_max = feat_train.max(0).max(0).max(0)
feat_min = feat_train.min(0).min(0).min(0)
feat_max = np.expand_dims(np.expand_dims(np.expand_dims(feat_max, 0), 0), 0)
feat_min = np.expand_dims(np.expand_dims(np.expand_dims(feat_min, 0), 0), 0)
# Normalize to [-1, 1]
feat_train = (feat_train - feat_min) * 2 / (feat_max - feat_min) - 1
feat_valid = (feat_valid - feat_min) * 2 / (feat_max - feat_min) - 1
feat_test = (feat_test - feat_min) * 2 / (feat_max - feat_min) - 1
# Reshape to: [num_sims, num_atoms, num_timesteps, num_dims]
edges_train = np.reshape(edges_train, [-1, num_atoms ** 2])
edges_valid = np.reshape(edges_valid, [-1, num_atoms ** 2])
edges_test = np.reshape(edges_test, [-1, num_atoms ** 2])
feat_train = torch.FloatTensor(feat_train)
edges_train = torch.LongTensor(edges_train)
feat_valid = torch.FloatTensor(feat_valid)
edges_valid = torch.LongTensor(edges_valid)
feat_test = torch.FloatTensor(feat_test)
edges_test = torch.LongTensor(edges_test)
# Exclude self edges
off_diag_idx = np.ravel_multi_index(
np.where(np.ones((num_atoms, num_atoms)) - np.eye(num_atoms)),
[num_atoms, num_atoms])
edges_train = edges_train[:, off_diag_idx]
edges_valid = edges_valid[:, off_diag_idx]
edges_test = edges_test[:, off_diag_idx]
train_data = TensorDataset(feat_train, edges_train)
valid_data = TensorDataset(feat_valid, edges_valid)
test_data = TensorDataset(feat_test, edges_test)
train_data_loader = DataLoader(train_data, batch_size=batch_size)
valid_data_loader = DataLoader(valid_data, batch_size=batch_size)
test_data_loader = DataLoader(test_data, batch_size=batch_size)
return train_data_loader, valid_data_loader, test_data_loader
def load_kuramoto_data_old(batch_size=1, suffix=''):
feat_train = np.load('data/old_kuramoto/feat_train' + suffix + '.npy')
edges_train = np.load('data/old_kuramoto/edges_train' + suffix + '.npy')
feat_valid = np.load('data/old_kuramoto/feat_valid' + suffix + '.npy')
edges_valid = np.load('data/old_kuramoto/edges_valid' + suffix + '.npy')
feat_test = np.load('data/old_kuramoto/feat_test' + suffix + '.npy')
edges_test = np.load('data/old_kuramoto/edges_test' + suffix + '.npy')
# [num_sims, num_atoms, num_timesteps, num_dims]
num_atoms = feat_train.shape[1]
# Reshape to: [num_sims, num_atoms, num_timesteps, num_dims]
edges_train = np.reshape(edges_train, [-1, num_atoms ** 2])
edges_valid = np.reshape(edges_valid, [-1, num_atoms ** 2])
edges_test = np.reshape(edges_test, [-1, num_atoms ** 2])
feat_train = torch.FloatTensor(feat_train)
edges_train = torch.LongTensor(edges_train)
feat_valid = torch.FloatTensor(feat_valid)
edges_valid = torch.LongTensor(edges_valid)
feat_test = torch.FloatTensor(feat_test)
edges_test = torch.LongTensor(edges_test)
# Exclude self edges
off_diag_idx = np.ravel_multi_index(
np.where(np.ones((num_atoms, num_atoms)) - np.eye(num_atoms)),
[num_atoms, num_atoms])
edges_train = edges_train[:, off_diag_idx]
edges_valid = edges_valid[:, off_diag_idx]
edges_test = edges_test[:, off_diag_idx]
train_data = TensorDataset(feat_train, edges_train)
valid_data = TensorDataset(feat_valid, edges_valid)
test_data = TensorDataset(feat_test, edges_test)
train_data_loader = DataLoader(train_data, batch_size=batch_size)
valid_data_loader = DataLoader(valid_data, batch_size=batch_size)
test_data_loader = DataLoader(test_data, batch_size=batch_size)
return train_data_loader, valid_data_loader, test_data_loader
def load_motion_data(batch_size=1, suffix=''):
feat_train = np.load('data/motion_train' + suffix + '.npy')
feat_valid = np.load('data/motion_valid' + suffix + '.npy')
feat_test = np.load('data/motion_test' + suffix + '.npy')
adj = np.load('data/motion_adj' + suffix + '.npy')
# NOTE: Already normalized
# [num_samples, num_nodes, num_timesteps, num_dims]
num_nodes = feat_train.shape[1]
edges_train = np.repeat(np.expand_dims(adj.flatten(), 0),
feat_train.shape[0], axis=0)
edges_valid = np.repeat(np.expand_dims(adj.flatten(), 0),
feat_valid.shape[0], axis=0)
edges_test = np.repeat(np.expand_dims(adj.flatten(), 0),
feat_test.shape[0], axis=0)
feat_train = torch.FloatTensor(feat_train)
edges_train = torch.LongTensor(np.array(edges_train, dtype=np.int64))
feat_valid = torch.FloatTensor(feat_valid)
edges_valid = torch.LongTensor(np.array(edges_valid, dtype=np.int64))
feat_test = torch.FloatTensor(feat_test)
edges_test = torch.LongTensor(np.array(edges_test, dtype=np.int64))
# Exclude self edges
off_diag_idx = np.ravel_multi_index(
np.where(np.ones((num_nodes, num_nodes)) - np.eye(num_nodes)),
[num_nodes, num_nodes])
edges_train = edges_train[:, off_diag_idx]
edges_valid = edges_valid[:, off_diag_idx]
edges_test = edges_test[:, off_diag_idx]
train_data = TensorDataset(feat_train, edges_train)
valid_data = TensorDataset(feat_valid, edges_valid)
test_data = TensorDataset(feat_test, edges_test)
train_data_loader = DataLoader(train_data, batch_size=batch_size)
valid_data_loader = DataLoader(valid_data, batch_size=batch_size)
test_data_loader = DataLoader(test_data, batch_size=batch_size)
return train_data_loader, valid_data_loader, test_data_loader
def to_2d_idx(idx, num_cols):
idx = np.array(idx, dtype=np.int64)
y_idx = np.array(np.floor(idx / float(num_cols)), dtype=np.int64)
x_idx = idx % num_cols
return x_idx, y_idx
def encode_onehot(labels):
classes = set(labels) # {0, 1, 2, 3, 4}
classes_dict = {c: np.identity(len(classes))[i, :] for i, c in enumerate(classes)}
# {0: array([1., 0., 0., 0., 0.]), 1: array([0., 1., 0., 0., 0.]), 2: array([0., 0., 1., 0., 0.]), 3: array([0., 0., 0., 1., 0.]), 4: array([0., 0., 0., 0., 1.])}
# print('class:',classes_dict)
labels_onehot = np.array(list(map(classes_dict.get, labels)), dtype=np.int32)
return labels_onehot
def get_triu_indices(num_nodes):
"""Linear triu (upper triangular) indices."""
ones = torch.ones(num_nodes, num_nodes)
eye = torch.eye(num_nodes, num_nodes)
triu_indices = (ones.triu() - eye).nonzero().t()
triu_indices = triu_indices[0] * num_nodes + triu_indices[1]
return triu_indices
def get_tril_indices(num_nodes):
"""Linear tril (lower triangular) indices."""
ones = torch.ones(num_nodes, num_nodes)
eye = torch.eye(num_nodes, num_nodes)
tril_indices = (ones.tril() - eye).nonzero().t()
tril_indices = tril_indices[0] * num_nodes + tril_indices[1]
return tril_indices
def get_offdiag_indices(num_nodes):
"""Linear off-diagonal indices."""
ones = torch.ones(num_nodes, num_nodes)
eye = torch.eye(num_nodes, num_nodes)
offdiag_indices = (ones - eye).nonzero().t()
offdiag_indices = offdiag_indices[0] * num_nodes + offdiag_indices[1]
return offdiag_indices
def get_triu_offdiag_indices(num_nodes):
"""Linear triu (upper) indices w.r.t. vector of off-diagonal elements."""
triu_idx = torch.zeros(num_nodes * num_nodes)
triu_idx[get_triu_indices(num_nodes)] = 1.
triu_idx = triu_idx[get_offdiag_indices(num_nodes)]
return triu_idx.nonzero()
def get_tril_offdiag_indices(num_nodes):
"""Linear tril (lower) indices w.r.t. vector of off-diagonal elements."""
tril_idx = torch.zeros(num_nodes * num_nodes)
tril_idx[get_tril_indices(num_nodes)] = 1.
tril_idx = tril_idx[get_offdiag_indices(num_nodes)]
return tril_idx.nonzero()
def get_minimum_distance(data):
data = data[:, :, :, :2].transpose(1, 2)
data_norm = (data ** 2).sum(-1, keepdim=True)
dist = data_norm + \
data_norm.transpose(2, 3) - \
2 * torch.matmul(data, data.transpose(2, 3))
min_dist, _ = dist.min(1)
return min_dist.view(min_dist.size(0), -1)
def get_buckets(dist, num_buckets):
dist = dist.cpu().data.numpy()
min_dist = np.min(dist)
max_dist = np.max(dist)
bucket_size = (max_dist - min_dist) / num_buckets
thresholds = bucket_size * np.arange(num_buckets)
bucket_idx = []
for i in range(num_buckets):
if i < num_buckets - 1:
idx = np.where(np.all(np.vstack((dist > thresholds[i],
dist <= thresholds[i + 1])), 0))[0]
else:
idx = np.where(dist > thresholds[i])[0]
bucket_idx.append(idx)
return bucket_idx, thresholds
def get_correct_per_bucket(bucket_idx, pred, target):
pred = pred.cpu().numpy()[:, 0]
target = target.cpu().data.numpy()
correct_per_bucket = []
for i in range(len(bucket_idx)):
preds_bucket = pred[bucket_idx[i]]
target_bucket = target[bucket_idx[i]]
correct_bucket = np.sum(preds_bucket == target_bucket)
correct_per_bucket.append(correct_bucket)
return correct_per_bucket
def get_correct_per_bucket_(bucket_idx, pred, target):
pred = pred.cpu().numpy()
target = target.cpu().data.numpy()
correct_per_bucket = []
for i in range(len(bucket_idx)):
preds_bucket = pred[bucket_idx[i]]
target_bucket = target[bucket_idx[i]]
correct_bucket = np.sum(preds_bucket == target_bucket)
correct_per_bucket.append(correct_bucket)
return correct_per_bucket
def kl_categorical(preds, log_prior, num_atoms, eps=1e-16):
kl_div = preds * (torch.log(preds + eps) - log_prior)
return kl_div.sum() / (num_atoms * preds.size(0))
def kl_categorical_uniform(preds, num_atoms, num_edge_types, add_const=False,
eps=1e-16):
kl_div = preds * torch.log(preds + eps)
if add_const:
const = np.log(num_edge_types)
kl_div += const
return kl_div.sum() / (num_atoms * preds.size(0))
def nll_gaussian(preds, target, variance, add_const=False):
neg_log_p = ((preds - target) ** 2 / (2 * variance))
if add_const:
const = 0.5 * np.log(2 * np.pi * variance)
neg_log_p += const
return neg_log_p.sum() / (target.size(0) * target.size(1))
def edge_accuracy(preds, target):
_, preds = preds.max(-1)
correct = preds.float().data.eq(
target.float().data.view_as(preds)).cpu().sum()
return np.float(correct) / (target.size(0) * target.size(1))
if __name__=="__main__":
triu_indices = get_triu_offdiag_indices(5)
print(triu_indices)
| 38.674274 | 166 | 0.680006 | import numpy as np
import torch
from torch.utils.data.dataset import TensorDataset
from torch.utils.data import DataLoader
import torch.nn.functional as F
from torch.autograd import Variable
def my_softmax(input, axis=1):
trans_input = input.transpose(axis, 0).contiguous()
soft_max_1d = F.softmax(trans_input)
return soft_max_1d.transpose(axis, 0)
def binary_concrete(logits, tau=1, hard=False, eps=1e-10):
y_soft = binary_concrete_sample(logits, tau=tau, eps=eps)
if hard:
y_hard = (y_soft > 0.5).float()
y = Variable(y_hard.data - y_soft.data) + y_soft
else:
y = y_soft
return y
def binary_concrete_sample(logits, tau=1, eps=1e-10):
logistic_noise = sample_logistic(logits.size(), eps=eps)
if logits.is_cuda:
logistic_noise = logistic_noise.cuda()
y = logits + Variable(logistic_noise)
return F.sigmoid(y / tau)
def sample_logistic(shape, eps=1e-10):
uniform = torch.rand(shape).float()
return torch.log(uniform + eps) - torch.log(1 - uniform + eps)
def sample_gumbel(shape, eps=1e-10):
U = torch.rand(shape).float()
return - torch.log(eps - torch.log(U + eps))
def gumbel_softmax_sample(logits, tau=1, eps=1e-10):
gumbel_noise = sample_gumbel(logits.size(), eps=eps)
if logits.is_cuda:
gumbel_noise = gumbel_noise.cuda()
y = logits + Variable(gumbel_noise)
return my_softmax(y / tau, axis=-1)
def gumbel_softmax(logits, tau=1, hard=False, eps=1e-10):
y_soft = gumbel_softmax_sample(logits, tau=tau, eps=eps)
if hard:
shape = logits.size()
_, k = y_soft.data.max(-1)
y_hard = torch.zeros(*shape)
if y_soft.is_cuda:
y_hard = y_hard.cuda()
y_hard = y_hard.zero_().scatter_(-1, k.view(shape[:-1] + (1,)), 1.0)
y = Variable(y_hard - y_soft.data) + y_soft
else:
y = y_soft
return y
def binary_accuracy(output, labels):
preds = output > 0.5
correct = preds.type_as(labels).eq(labels).double()
correct = correct.sum()
return correct / len(labels)
def load_data(batch_size=1, suffix=''):
loc_train = np.load('data/loc_train' + suffix + '.npy')
vel_train = np.load('data/vel_train' + suffix + '.npy')
edges_train = np.load('data/edges_train' + suffix + '.npy')
loc_valid = np.load('data/loc_valid' + suffix + '.npy')
vel_valid = np.load('data/vel_valid' + suffix + '.npy')
edges_valid = np.load('data/edges_valid' + suffix + '.npy')
loc_test = np.load('data/loc_test' + suffix + '.npy')
vel_test = np.load('data/vel_test' + suffix + '.npy')
edges_test = np.load('data/edges_test' + suffix + '.npy')
num_atoms = loc_train.shape[3]
loc_max = loc_train.max()
loc_min = loc_train.min()
vel_max = vel_train.max()
vel_min = vel_train.min()
loc_train = (loc_train - loc_min) * 2 / (loc_max - loc_min) - 1
vel_train = (vel_train - vel_min) * 2 / (vel_max - vel_min) - 1
loc_valid = (loc_valid - loc_min) * 2 / (loc_max - loc_min) - 1
vel_valid = (vel_valid - vel_min) * 2 / (vel_max - vel_min) - 1
loc_test = (loc_test - loc_min) * 2 / (loc_max - loc_min) - 1
vel_test = (vel_test - vel_min) * 2 / (vel_max - vel_min) - 1
loc_train = np.transpose(loc_train, [0, 3, 1, 2])
vel_train = np.transpose(vel_train, [0, 3, 1, 2])
feat_train = np.concatenate([loc_train, vel_train], axis=3)
edges_train = np.reshape(edges_train, [-1, num_atoms ** 2])
edges_train = np.array((edges_train + 1) / 2, dtype=np.int64)
loc_valid = np.transpose(loc_valid, [0, 3, 1, 2])
vel_valid = np.transpose(vel_valid, [0, 3, 1, 2])
feat_valid = np.concatenate([loc_valid, vel_valid], axis=3)
edges_valid = np.reshape(edges_valid, [-1, num_atoms ** 2])
edges_valid = np.array((edges_valid + 1) / 2, dtype=np.int64)
loc_test = np.transpose(loc_test, [0, 3, 1, 2])
vel_test = np.transpose(vel_test, [0, 3, 1, 2])
feat_test = np.concatenate([loc_test, vel_test], axis=3)
edges_test = np.reshape(edges_test, [-1, num_atoms ** 2])
edges_test = np.array((edges_test + 1) / 2, dtype=np.int64)
feat_train = torch.FloatTensor(feat_train)
edges_train = torch.LongTensor(edges_train)
feat_valid = torch.FloatTensor(feat_valid)
edges_valid = torch.LongTensor(edges_valid)
feat_test = torch.FloatTensor(feat_test)
edges_test = torch.LongTensor(edges_test)
off_diag_idx = np.ravel_multi_index(
np.where(np.ones((num_atoms, num_atoms)) - np.eye(num_atoms)),
[num_atoms, num_atoms])
edges_train = edges_train[:, off_diag_idx]
edges_valid = edges_valid[:, off_diag_idx]
edges_test = edges_test[:, off_diag_idx]
train_data = TensorDataset(feat_train, edges_train)
valid_data = TensorDataset(feat_valid, edges_valid)
test_data = TensorDataset(feat_test, edges_test)
train_data_loader = DataLoader(train_data, batch_size=batch_size)
valid_data_loader = DataLoader(valid_data, batch_size=batch_size)
test_data_loader = DataLoader(test_data, batch_size=batch_size)
return train_data_loader, valid_data_loader, test_data_loader, loc_max, loc_min, vel_max, vel_min
def load_kuramoto_data(batch_size=1, suffix=''):
feat_train = np.load('data/feat_train' + suffix + '.npy')
edges_train = np.load('data/edges_train' + suffix + '.npy')
feat_valid = np.load('data/feat_valid' + suffix + '.npy')
edges_valid = np.load('data/edges_valid' + suffix + '.npy')
feat_test = np.load('data/feat_test' + suffix + '.npy')
edges_test = np.load('data/edges_test' + suffix + '.npy')
num_atoms = feat_train.shape[1]
feat_max = feat_train.max(0).max(0).max(0)
feat_min = feat_train.min(0).min(0).min(0)
feat_max = np.expand_dims(np.expand_dims(np.expand_dims(feat_max, 0), 0), 0)
feat_min = np.expand_dims(np.expand_dims(np.expand_dims(feat_min, 0), 0), 0)
feat_train = (feat_train - feat_min) * 2 / (feat_max - feat_min) - 1
feat_valid = (feat_valid - feat_min) * 2 / (feat_max - feat_min) - 1
feat_test = (feat_test - feat_min) * 2 / (feat_max - feat_min) - 1
edges_train = np.reshape(edges_train, [-1, num_atoms ** 2])
edges_valid = np.reshape(edges_valid, [-1, num_atoms ** 2])
edges_test = np.reshape(edges_test, [-1, num_atoms ** 2])
feat_train = torch.FloatTensor(feat_train)
edges_train = torch.LongTensor(edges_train)
feat_valid = torch.FloatTensor(feat_valid)
edges_valid = torch.LongTensor(edges_valid)
feat_test = torch.FloatTensor(feat_test)
edges_test = torch.LongTensor(edges_test)
off_diag_idx = np.ravel_multi_index(
np.where(np.ones((num_atoms, num_atoms)) - np.eye(num_atoms)),
[num_atoms, num_atoms])
edges_train = edges_train[:, off_diag_idx]
edges_valid = edges_valid[:, off_diag_idx]
edges_test = edges_test[:, off_diag_idx]
train_data = TensorDataset(feat_train, edges_train)
valid_data = TensorDataset(feat_valid, edges_valid)
test_data = TensorDataset(feat_test, edges_test)
train_data_loader = DataLoader(train_data, batch_size=batch_size)
valid_data_loader = DataLoader(valid_data, batch_size=batch_size)
test_data_loader = DataLoader(test_data, batch_size=batch_size)
return train_data_loader, valid_data_loader, test_data_loader
def load_kuramoto_data_old(batch_size=1, suffix=''):
feat_train = np.load('data/old_kuramoto/feat_train' + suffix + '.npy')
edges_train = np.load('data/old_kuramoto/edges_train' + suffix + '.npy')
feat_valid = np.load('data/old_kuramoto/feat_valid' + suffix + '.npy')
edges_valid = np.load('data/old_kuramoto/edges_valid' + suffix + '.npy')
feat_test = np.load('data/old_kuramoto/feat_test' + suffix + '.npy')
edges_test = np.load('data/old_kuramoto/edges_test' + suffix + '.npy')
num_atoms = feat_train.shape[1]
edges_train = np.reshape(edges_train, [-1, num_atoms ** 2])
edges_valid = np.reshape(edges_valid, [-1, num_atoms ** 2])
edges_test = np.reshape(edges_test, [-1, num_atoms ** 2])
feat_train = torch.FloatTensor(feat_train)
edges_train = torch.LongTensor(edges_train)
feat_valid = torch.FloatTensor(feat_valid)
edges_valid = torch.LongTensor(edges_valid)
feat_test = torch.FloatTensor(feat_test)
edges_test = torch.LongTensor(edges_test)
off_diag_idx = np.ravel_multi_index(
np.where(np.ones((num_atoms, num_atoms)) - np.eye(num_atoms)),
[num_atoms, num_atoms])
edges_train = edges_train[:, off_diag_idx]
edges_valid = edges_valid[:, off_diag_idx]
edges_test = edges_test[:, off_diag_idx]
train_data = TensorDataset(feat_train, edges_train)
valid_data = TensorDataset(feat_valid, edges_valid)
test_data = TensorDataset(feat_test, edges_test)
train_data_loader = DataLoader(train_data, batch_size=batch_size)
valid_data_loader = DataLoader(valid_data, batch_size=batch_size)
test_data_loader = DataLoader(test_data, batch_size=batch_size)
return train_data_loader, valid_data_loader, test_data_loader
def load_motion_data(batch_size=1, suffix=''):
feat_train = np.load('data/motion_train' + suffix + '.npy')
feat_valid = np.load('data/motion_valid' + suffix + '.npy')
feat_test = np.load('data/motion_test' + suffix + '.npy')
adj = np.load('data/motion_adj' + suffix + '.npy')
num_nodes = feat_train.shape[1]
edges_train = np.repeat(np.expand_dims(adj.flatten(), 0),
feat_train.shape[0], axis=0)
edges_valid = np.repeat(np.expand_dims(adj.flatten(), 0),
feat_valid.shape[0], axis=0)
edges_test = np.repeat(np.expand_dims(adj.flatten(), 0),
feat_test.shape[0], axis=0)
feat_train = torch.FloatTensor(feat_train)
edges_train = torch.LongTensor(np.array(edges_train, dtype=np.int64))
feat_valid = torch.FloatTensor(feat_valid)
edges_valid = torch.LongTensor(np.array(edges_valid, dtype=np.int64))
feat_test = torch.FloatTensor(feat_test)
edges_test = torch.LongTensor(np.array(edges_test, dtype=np.int64))
off_diag_idx = np.ravel_multi_index(
np.where(np.ones((num_nodes, num_nodes)) - np.eye(num_nodes)),
[num_nodes, num_nodes])
edges_train = edges_train[:, off_diag_idx]
edges_valid = edges_valid[:, off_diag_idx]
edges_test = edges_test[:, off_diag_idx]
train_data = TensorDataset(feat_train, edges_train)
valid_data = TensorDataset(feat_valid, edges_valid)
test_data = TensorDataset(feat_test, edges_test)
train_data_loader = DataLoader(train_data, batch_size=batch_size)
valid_data_loader = DataLoader(valid_data, batch_size=batch_size)
test_data_loader = DataLoader(test_data, batch_size=batch_size)
return train_data_loader, valid_data_loader, test_data_loader
def to_2d_idx(idx, num_cols):
idx = np.array(idx, dtype=np.int64)
y_idx = np.array(np.floor(idx / float(num_cols)), dtype=np.int64)
x_idx = idx % num_cols
return x_idx, y_idx
def encode_onehot(labels):
classes = set(labels)
classes_dict = {c: np.identity(len(classes))[i, :] for i, c in enumerate(classes)}
labels_onehot = np.array(list(map(classes_dict.get, labels)), dtype=np.int32)
return labels_onehot
def get_triu_indices(num_nodes):
ones = torch.ones(num_nodes, num_nodes)
eye = torch.eye(num_nodes, num_nodes)
triu_indices = (ones.triu() - eye).nonzero().t()
triu_indices = triu_indices[0] * num_nodes + triu_indices[1]
return triu_indices
def get_tril_indices(num_nodes):
ones = torch.ones(num_nodes, num_nodes)
eye = torch.eye(num_nodes, num_nodes)
tril_indices = (ones.tril() - eye).nonzero().t()
tril_indices = tril_indices[0] * num_nodes + tril_indices[1]
return tril_indices
def get_offdiag_indices(num_nodes):
ones = torch.ones(num_nodes, num_nodes)
eye = torch.eye(num_nodes, num_nodes)
offdiag_indices = (ones - eye).nonzero().t()
offdiag_indices = offdiag_indices[0] * num_nodes + offdiag_indices[1]
return offdiag_indices
def get_triu_offdiag_indices(num_nodes):
triu_idx = torch.zeros(num_nodes * num_nodes)
triu_idx[get_triu_indices(num_nodes)] = 1.
triu_idx = triu_idx[get_offdiag_indices(num_nodes)]
return triu_idx.nonzero()
def get_tril_offdiag_indices(num_nodes):
tril_idx = torch.zeros(num_nodes * num_nodes)
tril_idx[get_tril_indices(num_nodes)] = 1.
tril_idx = tril_idx[get_offdiag_indices(num_nodes)]
return tril_idx.nonzero()
def get_minimum_distance(data):
data = data[:, :, :, :2].transpose(1, 2)
data_norm = (data ** 2).sum(-1, keepdim=True)
dist = data_norm + \
data_norm.transpose(2, 3) - \
2 * torch.matmul(data, data.transpose(2, 3))
min_dist, _ = dist.min(1)
return min_dist.view(min_dist.size(0), -1)
def get_buckets(dist, num_buckets):
dist = dist.cpu().data.numpy()
min_dist = np.min(dist)
max_dist = np.max(dist)
bucket_size = (max_dist - min_dist) / num_buckets
thresholds = bucket_size * np.arange(num_buckets)
bucket_idx = []
for i in range(num_buckets):
if i < num_buckets - 1:
idx = np.where(np.all(np.vstack((dist > thresholds[i],
dist <= thresholds[i + 1])), 0))[0]
else:
idx = np.where(dist > thresholds[i])[0]
bucket_idx.append(idx)
return bucket_idx, thresholds
def get_correct_per_bucket(bucket_idx, pred, target):
pred = pred.cpu().numpy()[:, 0]
target = target.cpu().data.numpy()
correct_per_bucket = []
for i in range(len(bucket_idx)):
preds_bucket = pred[bucket_idx[i]]
target_bucket = target[bucket_idx[i]]
correct_bucket = np.sum(preds_bucket == target_bucket)
correct_per_bucket.append(correct_bucket)
return correct_per_bucket
def get_correct_per_bucket_(bucket_idx, pred, target):
pred = pred.cpu().numpy()
target = target.cpu().data.numpy()
correct_per_bucket = []
for i in range(len(bucket_idx)):
preds_bucket = pred[bucket_idx[i]]
target_bucket = target[bucket_idx[i]]
correct_bucket = np.sum(preds_bucket == target_bucket)
correct_per_bucket.append(correct_bucket)
return correct_per_bucket
def kl_categorical(preds, log_prior, num_atoms, eps=1e-16):
kl_div = preds * (torch.log(preds + eps) - log_prior)
return kl_div.sum() / (num_atoms * preds.size(0))
def kl_categorical_uniform(preds, num_atoms, num_edge_types, add_const=False,
eps=1e-16):
kl_div = preds * torch.log(preds + eps)
if add_const:
const = np.log(num_edge_types)
kl_div += const
return kl_div.sum() / (num_atoms * preds.size(0))
def nll_gaussian(preds, target, variance, add_const=False):
neg_log_p = ((preds - target) ** 2 / (2 * variance))
if add_const:
const = 0.5 * np.log(2 * np.pi * variance)
neg_log_p += const
return neg_log_p.sum() / (target.size(0) * target.size(1))
def edge_accuracy(preds, target):
_, preds = preds.max(-1)
correct = preds.float().data.eq(
target.float().data.view_as(preds)).cpu().sum()
return np.float(correct) / (target.size(0) * target.size(1))
if __name__=="__main__":
triu_indices = get_triu_offdiag_indices(5)
print(triu_indices)
| true | true |
f71df7c676dc8d0d33a3040c13da04d04f4cf920 | 3,150 | py | Python | app/users/forms.py | Naomi-sigu/Job-Search | 71170bd444e666d590c856de5913aea22ae9874e | [
"MIT"
] | 1 | 2019-09-28T07:07:47.000Z | 2019-09-28T07:07:47.000Z | app/users/forms.py | Naomi-sigu/Job-Search | 71170bd444e666d590c856de5913aea22ae9874e | [
"MIT"
] | 3 | 2021-06-08T20:25:15.000Z | 2022-03-12T00:00:03.000Z | app/users/forms.py | Naomi-sigu/Job-Search | 71170bd444e666d590c856de5913aea22ae9874e | [
"MIT"
] | 3 | 2019-09-27T05:28:26.000Z | 2019-09-27T05:47:02.000Z | from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed
from wtforms import StringField, PasswordField, SubmitField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError, Optional, URL
from flask_login import current_user
from app.models import User
class RegistrationForm(FlaskForm):
username = StringField('Username', validators=[DataRequired(), Length(min=2, max=20)])
fullname = StringField('Full Name', validators=[DataRequired()])
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Sign Up')
def validate_field(self, username):
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError('That username is taken. Please choose a different one')
def validate_field(self, email):
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('That email is taken. Please choose a different one')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember = BooleanField('Remember Me')
submit = SubmitField('Login')
class UpdateAccountForm(FlaskForm):
username = StringField('Username', validators=[DataRequired(), Length(min=2, max=20)])
fullname = StringField('Full Name', validators=[DataRequired()])
email = StringField('Email', validators=[DataRequired(), Email()])
bio = TextAreaField('Tell us about you.')
picture = FileField('Update Profile Picture', validators=[FileAllowed(['jpg', 'png', 'jpeg'])])
facebook = StringField('Facebook URL', validators=[Optional(), URL()])
twitter = StringField('Twitter URL', validators=[Optional(), URL()])
github = StringField('Github URL', validators=[Optional(), URL()])
linkedin = StringField('LinkedIn URL', validators=[Optional(), URL()])
submit = SubmitField('Update')
def validate_username(self, username):
if username.data != current_user.username:
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError('That username is taken. Please choose a different one.')
def validate_email(self, email):
if email.data != current_user.email:
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('That email is taken. Please choose a different one.')
# class SocialMedia(FlaskForm):
# facebook = StringField('Facebook', validators=[DataRequired()])
# twitter = StringField('Twitter', validators=[DataRequired()])
# Github = StringField('Github', validators=[DataRequired()])
# linkedin = StringField('LinkedIn', validators=[DataRequired()])
# submit = SubmitField('Login') | 47.727273 | 106 | 0.691746 | from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed
from wtforms import StringField, PasswordField, SubmitField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError, Optional, URL
from flask_login import current_user
from app.models import User
class RegistrationForm(FlaskForm):
username = StringField('Username', validators=[DataRequired(), Length(min=2, max=20)])
fullname = StringField('Full Name', validators=[DataRequired()])
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Sign Up')
def validate_field(self, username):
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError('That username is taken. Please choose a different one')
def validate_field(self, email):
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('That email is taken. Please choose a different one')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember = BooleanField('Remember Me')
submit = SubmitField('Login')
class UpdateAccountForm(FlaskForm):
username = StringField('Username', validators=[DataRequired(), Length(min=2, max=20)])
fullname = StringField('Full Name', validators=[DataRequired()])
email = StringField('Email', validators=[DataRequired(), Email()])
bio = TextAreaField('Tell us about you.')
picture = FileField('Update Profile Picture', validators=[FileAllowed(['jpg', 'png', 'jpeg'])])
facebook = StringField('Facebook URL', validators=[Optional(), URL()])
twitter = StringField('Twitter URL', validators=[Optional(), URL()])
github = StringField('Github URL', validators=[Optional(), URL()])
linkedin = StringField('LinkedIn URL', validators=[Optional(), URL()])
submit = SubmitField('Update')
def validate_username(self, username):
if username.data != current_user.username:
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError('That username is taken. Please choose a different one.')
def validate_email(self, email):
if email.data != current_user.email:
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('That email is taken. Please choose a different one.')
| true | true |
f71df83dc8a96664ad6db8eee0fc2a0388ad93d9 | 3,614 | py | Python | pdbdy.py | IMULMUL/shadow | 910af255030274e65fae16ca164f5eb4c0103a8c | [
"BSD-3-Clause"
] | 430 | 2015-06-15T16:22:45.000Z | 2022-02-23T09:54:56.000Z | pdbdy.py | IMULMUL/shadow | 910af255030274e65fae16ca164f5eb4c0103a8c | [
"BSD-3-Clause"
] | 18 | 2016-11-26T21:49:20.000Z | 2021-11-12T03:06:11.000Z | pdbdy.py | IMULMUL/shadow | 910af255030274e65fae16ca164f5eb4c0103a8c | [
"BSD-3-Clause"
] | 79 | 2015-06-24T19:42:13.000Z | 2021-08-07T17:15:58.000Z | # shadow - De Mysteriis Dom jemalloc
import os
import sys
import argparse
import pickle
import comtypes
import comtypes.client
import symbol
# this has to be before the import that follows
msdia = comtypes.client.GetModule('msdia\\msdia90.dll')
from comtypes.gen.Dia2Lib import *
# https://msdn.microsoft.com/en-us/library/wcstk66t.aspx
udtEnumToStr = ('struct', 'class', 'union', 'interface')
# get a handle to the DIA COM object
def getDIAObj():
global msdia
try:
dia = comtypes.client.CreateObject(msdia.DiaSource)
except Exception as exc:
print "Exception creating DIA object: %s\nTry to regsrv32.dll msdia90.dll" % (str(exc))
sys.exit(1)
return dia
# parse the PDB
def loadPDB(dia, pdbFile):
try:
dia.loadDataFromPdb(pdbFile)
return dia.openSession()
except Exception as exc:
print('[!] loadDataFromPdb() error %s' % (str(exc)))
sys.exit(1)
# convert a cygwin path to a windows path if needed
def convertPath(path):
if path.find("/") == -1:
return path
elif path.startswith("/cygdrive/"):
pieces = path.split("/")
return pieces[2] + ":\\" + "\\".join(pieces[3:])
else:
return path.replace('/', '\\')
# load up the symbol list
def loadPickle(pfile):
pfd = open(pfile, 'rb')
syms = pickle.load(pfd)
pfd.close()
return syms
# store the symbol list
def storePickle(syms, pdbFileName):
outFile = '%s.pkl' % (pdbFileName)
pfd = open(outFile, 'wb')
pickle.dump(syms, pfd)
pfd.close()
print "Created pickle file %s" % (outFile)
# parse the input PDB
def parsePDB(pdbObj):
syms = set()
vfts = set()
# iterate the public syms to find all vtables
for symb in pdbObj.globalScope.findChildren(SymTagPublicSymbol, None, 0):
symbol_data = symb.QueryInterface(IDiaSymbol)
full_name = symbol_data.undecoratedName
vft_idx = full_name.find("::`vftable'")
if vft_idx == -1:
continue
symbol_name = full_name[6:vft_idx]
vfts.add(symbol_name)
# iterate all UDT/private? symbols
for symb in pdbObj.globalScope.findChildren(SymTagUDT, None, 0):
symbol_data = symb.QueryInterface(IDiaSymbol)
hasVFT = symbol_data.name in vfts
symbol_obj = symbol.symbol(udtEnumToStr[symbol_data.udtKind], symbol_data.name,
symbol_data.length, hasVFT)
syms.add(symbol_obj)
return list(syms)
# main
def do_main():
parser = argparse.ArgumentParser(description="PDB type search")
parser.add_argument('-f', '--pdbFile', required=True, help="The target PDB file.")
parser.add_argument('-p', '--pickleFile', required=False, help="The target pickle file.")
parser.add_argument('-v', '--verbose', default=False, action='store_true', help='Verbose output')
args = parser.parse_args()
pdbFile = convertPath(args.pdbFile)
# parse/load the PDB
dia = getDIAObj()
pdbObj = loadPDB(dia, pdbFile)
# if a pickle is provided, use that for the initial type list
# if not, parse the types from the PDB
if args.pickleFile:
print "Loading type list from pickle."
syms = loadPickle(convertPath(args.pickleFile))
else:
print "Loading type list from PDB %s." % (pdbFile)
syms = parsePDB(pdbObj)
storePickle(syms, os.path.basename(pdbFile))
# we have the types, now what.
print "Found %d total types." % (len(syms))
for sym in syms:
print('%s' % (sym))
if __name__ == '__main__':
do_main()
# EOF
| 28.234375 | 101 | 0.641671 |
import os
import sys
import argparse
import pickle
import comtypes
import comtypes.client
import symbol
msdia = comtypes.client.GetModule('msdia\\msdia90.dll')
from comtypes.gen.Dia2Lib import *
udtEnumToStr = ('struct', 'class', 'union', 'interface')
def getDIAObj():
global msdia
try:
dia = comtypes.client.CreateObject(msdia.DiaSource)
except Exception as exc:
print "Exception creating DIA object: %s\nTry to regsrv32.dll msdia90.dll" % (str(exc))
sys.exit(1)
return dia
def loadPDB(dia, pdbFile):
try:
dia.loadDataFromPdb(pdbFile)
return dia.openSession()
except Exception as exc:
print('[!] loadDataFromPdb() error %s' % (str(exc)))
sys.exit(1)
def convertPath(path):
if path.find("/") == -1:
return path
elif path.startswith("/cygdrive/"):
pieces = path.split("/")
return pieces[2] + ":\\" + "\\".join(pieces[3:])
else:
return path.replace('/', '\\')
def loadPickle(pfile):
pfd = open(pfile, 'rb')
syms = pickle.load(pfd)
pfd.close()
return syms
def storePickle(syms, pdbFileName):
outFile = '%s.pkl' % (pdbFileName)
pfd = open(outFile, 'wb')
pickle.dump(syms, pfd)
pfd.close()
print "Created pickle file %s" % (outFile)
def parsePDB(pdbObj):
syms = set()
vfts = set()
for symb in pdbObj.globalScope.findChildren(SymTagPublicSymbol, None, 0):
symbol_data = symb.QueryInterface(IDiaSymbol)
full_name = symbol_data.undecoratedName
vft_idx = full_name.find("::`vftable'")
if vft_idx == -1:
continue
symbol_name = full_name[6:vft_idx]
vfts.add(symbol_name)
# iterate all UDT/private? symbols
for symb in pdbObj.globalScope.findChildren(SymTagUDT, None, 0):
symbol_data = symb.QueryInterface(IDiaSymbol)
hasVFT = symbol_data.name in vfts
symbol_obj = symbol.symbol(udtEnumToStr[symbol_data.udtKind], symbol_data.name,
symbol_data.length, hasVFT)
syms.add(symbol_obj)
return list(syms)
# main
def do_main():
parser = argparse.ArgumentParser(description="PDB type search")
parser.add_argument('-f', '--pdbFile', required=True, help="The target PDB file.")
parser.add_argument('-p', '--pickleFile', required=False, help="The target pickle file.")
parser.add_argument('-v', '--verbose', default=False, action='store_true', help='Verbose output')
args = parser.parse_args()
pdbFile = convertPath(args.pdbFile)
# parse/load the PDB
dia = getDIAObj()
pdbObj = loadPDB(dia, pdbFile)
# if a pickle is provided, use that for the initial type list
# if not, parse the types from the PDB
if args.pickleFile:
print "Loading type list from pickle."
syms = loadPickle(convertPath(args.pickleFile))
else:
print "Loading type list from PDB %s." % (pdbFile)
syms = parsePDB(pdbObj)
storePickle(syms, os.path.basename(pdbFile))
# we have the types, now what.
print "Found %d total types." % (len(syms))
for sym in syms:
print('%s' % (sym))
if __name__ == '__main__':
do_main()
# EOF
| false | true |
f71df863772cbaeea64ea15afb3a451be08330d9 | 6,151 | py | Python | tensorflow/python/compat/compat.py | craymichael/tensorflow | b5de565c9c57fa7ca02d42bcfe6f470ecf117ba5 | [
"Apache-2.0"
] | 1 | 2016-10-05T18:58:20.000Z | 2016-10-05T18:58:20.000Z | tensorflow/python/compat/compat.py | craymichael/tensorflow | b5de565c9c57fa7ca02d42bcfe6f470ecf117ba5 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/compat/compat.py | craymichael/tensorflow | b5de565c9c57fa7ca02d42bcfe6f470ecf117ba5 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# This value changes every day with an automatic CL. It can be modified in code
# via `forward_compatibility_horizon()` or with the environment variable
# TF_FORWARD_COMPATIBILITY_DELTA_DAYS, which is added to the compatibility date.
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2021, 6, 17)
_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS"
_FORWARD_COMPATIBILITY_DATE_NUMBER = None
def _date_to_date_number(year, month, day):
return (year << 9) | (month << 5) | day
def _update_forward_compatibility_date_number(date_to_override=None):
"""Update the base date to compare in forward_compatible function."""
global _FORWARD_COMPATIBILITY_DATE_NUMBER
if date_to_override:
date = date_to_override
else:
date = _FORWARD_COMPATIBILITY_HORIZON
delta_days = os.getenv(_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME)
if delta_days:
date += datetime.timedelta(days=int(delta_days))
if date < _FORWARD_COMPATIBILITY_HORIZON:
logging.warning("Trying to set the forward compatibility date to the past"
" date %s. This will be ignored by TensorFlow." % (date))
return
_FORWARD_COMPATIBILITY_DATE_NUMBER = _date_to_date_number(
date.year, date.month, date.day)
_update_forward_compatibility_date_number()
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibility, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_DATE_NUMBER > _date_to_date_number(
year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Yields:
Nothing.
"""
try:
_update_forward_compatibility_date_number(datetime.date(year, month, day))
yield
finally:
_update_forward_compatibility_date_number()
| 35.554913 | 82 | 0.745895 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2021, 6, 17)
_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS"
_FORWARD_COMPATIBILITY_DATE_NUMBER = None
def _date_to_date_number(year, month, day):
return (year << 9) | (month << 5) | day
def _update_forward_compatibility_date_number(date_to_override=None):
global _FORWARD_COMPATIBILITY_DATE_NUMBER
if date_to_override:
date = date_to_override
else:
date = _FORWARD_COMPATIBILITY_HORIZON
delta_days = os.getenv(_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME)
if delta_days:
date += datetime.timedelta(days=int(delta_days))
if date < _FORWARD_COMPATIBILITY_HORIZON:
logging.warning("Trying to set the forward compatibility date to the past"
" date %s. This will be ignored by TensorFlow." % (date))
return
_FORWARD_COMPATIBILITY_DATE_NUMBER = _date_to_date_number(
date.year, date.month, date.day)
_update_forward_compatibility_date_number()
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
return _FORWARD_COMPATIBILITY_DATE_NUMBER > _date_to_date_number(
year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
try:
_update_forward_compatibility_date_number(datetime.date(year, month, day))
yield
finally:
_update_forward_compatibility_date_number()
| true | true |
f71dfacd1da249a951dc3ac3e95f73b12974cef7 | 806 | py | Python | algorithms/897. Increasing Order Search Tree.py | woozway/py3-leetcode | e51a9ce7a6bb3e35c0fcb8c8f4f6cd5763708dbf | [
"MIT"
] | 1 | 2020-12-02T13:54:30.000Z | 2020-12-02T13:54:30.000Z | algorithms/897. Increasing Order Search Tree.py | woozway/py3-leetcode | e51a9ce7a6bb3e35c0fcb8c8f4f6cd5763708dbf | [
"MIT"
] | null | null | null | algorithms/897. Increasing Order Search Tree.py | woozway/py3-leetcode | e51a9ce7a6bb3e35c0fcb8c8f4f6cd5763708dbf | [
"MIT"
] | null | null | null | """
1. Clarification
2. Possible solutions
- In-Order Traversal
3. Coding
4. Tests
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
# T=O(n), S=O(n)
class Solution:
def increasingBST(self, root: TreeNode) -> TreeNode:
if not root: return None
self.ans, self.cur = None, None
self.dfs(root)
return self.ans
def dfs(self, node):
if not node: return
self.dfs(node.left)
if not self.cur:
self.ans = TreeNode(node.val)
self.cur = self.ans
else:
self.cur.right = TreeNode(node.val)
self.cur = self.cur.right
self.dfs(node.right)
| 23.028571 | 56 | 0.565757 |
class Solution:
def increasingBST(self, root: TreeNode) -> TreeNode:
if not root: return None
self.ans, self.cur = None, None
self.dfs(root)
return self.ans
def dfs(self, node):
if not node: return
self.dfs(node.left)
if not self.cur:
self.ans = TreeNode(node.val)
self.cur = self.ans
else:
self.cur.right = TreeNode(node.val)
self.cur = self.cur.right
self.dfs(node.right)
| true | true |
f71dfb1af77b347178a2e8e81e8a59a5a5eb3591 | 5,296 | py | Python | pegaflow/service/server.py | polyactis/pegaflow | d30d0bbb501ccfbe0ead6c950946a14d4b6c4708 | [
"Apache-2.0"
] | null | null | null | pegaflow/service/server.py | polyactis/pegaflow | d30d0bbb501ccfbe0ead6c950946a14d4b6c4708 | [
"Apache-2.0"
] | null | null | null | pegaflow/service/server.py | polyactis/pegaflow | d30d0bbb501ccfbe0ead6c950946a14d4b6c4708 | [
"Apache-2.0"
] | null | null | null | import logging
import os
import random
import click
import flask
from OpenSSL import crypto
from pegaflow.service import cache
from pegaflow.service._encoder import PegasusJsonEncoder
from pegaflow.service.base import BooleanConverter
from pegaflow.service.filters import register_jinja2_filters
from pegaflow.service.lifecycle import register_lifecycle_handlers
log = logging.getLogger(__name__)
# Services
services = ["dashboard", "monitoring"]
def generate_self_signed_certificate(certfile, pkeyfile):
"""
SSL.
:param certfile:
:param pkeyfile:
:return:
If certfile and pkeyfile don't exist, create a self-signed certificate
"""
if os.path.isfile(certfile) and os.path.isfile(pkeyfile):
return
logging.info("Generating self-signed certificate")
pkey = crypto.PKey()
pkey.generate_key(crypto.TYPE_RSA, 2048)
cert = crypto.X509()
sub = cert.get_subject()
sub.C = "US"
sub.ST = "California"
sub.L = "Marina Del Rey"
sub.O = "University of Southern California"
sub.OU = "Information Sciences Institute"
sub.CN = "Pegasus Service"
cert.set_version(1)
cert.set_serial_number(random.randint(0, 2 ** 32))
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60) # 10 years
cert.set_issuer(sub)
cert.set_pubkey(pkey)
cert.sign(pkey, "sha1")
open(certfile, "wb").write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
open(pkeyfile, "wb").write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
def run(host="localhost", port=5000, debug=True, verbose=logging.INFO, **kwargs):
app = create_app(env=os.getenv("FLASK_ENV", "development"))
if debug:
app.config.update(DEBUG=True)
logging.getLogger().setLevel(logging.DEBUG)
pegasusdir = os.path.expanduser("~/.pegasus")
if not os.path.isdir(pegasusdir):
os.makedirs(pegasusdir, mode=0o744)
cert = app.config.get("CERTIFICATE", None)
pkey = app.config.get("PRIVATE_KEY", None)
if cert is None or pkey is None:
log.warning("SSL is not configured: Using self-signed certificate")
cert = os.path.expanduser("~/.pegasus/selfcert.pem")
pkey = os.path.expanduser("~/.pegasus/selfkey.pem")
generate_self_signed_certificate(cert, pkey)
ssl_context = (cert, pkey)
if os.getuid() != 0:
log.warning("Service not running as root: Will not be able to switch users")
app.run(
host=host, port=port, threaded=True, ssl_context=ssl_context,
)
log.info("Exiting")
def _load_user_config(app):
# Load user configuration
conf = os.path.expanduser("~/.pegasus/service.py")
if os.path.isfile(conf):
app.config.from_pyfile(conf)
def create_app(config=None, env="development"):
"""Configure app."""
# Environment
os.environ["FLASK_ENV"] = env
app = flask.Flask(__name__)
# Flask Configuration
app.config.from_object("Pegasus.service.defaults")
# app.config.from_object("Pegasus.service.config.%sConfig" % env.capitalize())
_load_user_config(app)
app.config.update(config or {})
if "PEGASUS_ENV" in os.environ:
app.config.from_envvar("PEGASUS_ENV")
# Initialize Extensions
cache.init_app(app)
# db.init_app(app)
# socketio.init_app(app, json=flask.json)
configure_app(app)
# Service Configuration
for service in services:
config_method = "configure_%s" % service
if config_method in globals():
globals()["configure_%s" % service](app)
return app
def configure_app(app):
#
# Flask URL variables support int, float, and path converters.
# Adding support for a boolean converter.
#
app.url_map.converters["boolean"] = BooleanConverter
#
# Relax trailing slash requirement
#
app.url_map.strict_slashes = False
# Attach global JSONEncoder
app.json_encoder = PegasusJsonEncoder
# Register lifecycle methods
register_lifecycle_handlers(app)
# Register Jinja2 Filters
register_jinja2_filters(app)
# Error handlers
## register_error_handlers(app)
...
def configure_dashboard(app):
from pegaflow.service.dashboard import blueprint
app.register_blueprint(blueprint)
def configure_monitoring(app):
from pegaflow.service.monitoring import monitoring
app.register_blueprint(monitoring, url_prefix="/api/v1/user/<string:username>")
@click.command(name="pegasus-service")
@click.option(
"--host",
default="localhost",
metavar="<hostname>",
show_default=True,
help="Hostname",
)
@click.option(
"-p",
"--port",
type=int,
default=5000,
metavar="<port-number>",
show_default=True,
help="Port no. on which to listen for requests",
)
@click.option(
"-d/-nd",
"--debug/--no-debug",
default=True,
metavar="<debug-mode>",
help="Start server in development mode",
)
@click.option(
"-v",
"--verbose",
default=logging.DEBUG,
count=True,
metavar="<verbosity>",
help="Logging verbosity",
)
def main(host: str, port: int, debug: bool, verbose: int):
"""Run the Pegasus Service server."""
run(host=host, port=port, debug=debug, verbose=verbose)
if __name__ == "__main__":
main()
| 25.708738 | 84 | 0.679569 | import logging
import os
import random
import click
import flask
from OpenSSL import crypto
from pegaflow.service import cache
from pegaflow.service._encoder import PegasusJsonEncoder
from pegaflow.service.base import BooleanConverter
from pegaflow.service.filters import register_jinja2_filters
from pegaflow.service.lifecycle import register_lifecycle_handlers
log = logging.getLogger(__name__)
services = ["dashboard", "monitoring"]
def generate_self_signed_certificate(certfile, pkeyfile):
if os.path.isfile(certfile) and os.path.isfile(pkeyfile):
return
logging.info("Generating self-signed certificate")
pkey = crypto.PKey()
pkey.generate_key(crypto.TYPE_RSA, 2048)
cert = crypto.X509()
sub = cert.get_subject()
sub.C = "US"
sub.ST = "California"
sub.L = "Marina Del Rey"
sub.O = "University of Southern California"
sub.OU = "Information Sciences Institute"
sub.CN = "Pegasus Service"
cert.set_version(1)
cert.set_serial_number(random.randint(0, 2 ** 32))
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)
cert.set_issuer(sub)
cert.set_pubkey(pkey)
cert.sign(pkey, "sha1")
open(certfile, "wb").write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
open(pkeyfile, "wb").write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
def run(host="localhost", port=5000, debug=True, verbose=logging.INFO, **kwargs):
app = create_app(env=os.getenv("FLASK_ENV", "development"))
if debug:
app.config.update(DEBUG=True)
logging.getLogger().setLevel(logging.DEBUG)
pegasusdir = os.path.expanduser("~/.pegasus")
if not os.path.isdir(pegasusdir):
os.makedirs(pegasusdir, mode=0o744)
cert = app.config.get("CERTIFICATE", None)
pkey = app.config.get("PRIVATE_KEY", None)
if cert is None or pkey is None:
log.warning("SSL is not configured: Using self-signed certificate")
cert = os.path.expanduser("~/.pegasus/selfcert.pem")
pkey = os.path.expanduser("~/.pegasus/selfkey.pem")
generate_self_signed_certificate(cert, pkey)
ssl_context = (cert, pkey)
if os.getuid() != 0:
log.warning("Service not running as root: Will not be able to switch users")
app.run(
host=host, port=port, threaded=True, ssl_context=ssl_context,
)
log.info("Exiting")
def _load_user_config(app):
conf = os.path.expanduser("~/.pegasus/service.py")
if os.path.isfile(conf):
app.config.from_pyfile(conf)
def create_app(config=None, env="development"):
os.environ["FLASK_ENV"] = env
app = flask.Flask(__name__)
app.config.from_object("Pegasus.service.defaults")
_load_user_config(app)
app.config.update(config or {})
if "PEGASUS_ENV" in os.environ:
app.config.from_envvar("PEGASUS_ENV")
cache.init_app(app)
configure_app(app)
for service in services:
config_method = "configure_%s" % service
if config_method in globals():
globals()["configure_%s" % service](app)
return app
def configure_app(app):
app.url_map.converters["boolean"] = BooleanConverter
app.url_map.strict_slashes = False
app.json_encoder = PegasusJsonEncoder
register_lifecycle_handlers(app)
register_jinja2_filters(app)
oard(app):
from pegaflow.service.dashboard import blueprint
app.register_blueprint(blueprint)
def configure_monitoring(app):
from pegaflow.service.monitoring import monitoring
app.register_blueprint(monitoring, url_prefix="/api/v1/user/<string:username>")
@click.command(name="pegasus-service")
@click.option(
"--host",
default="localhost",
metavar="<hostname>",
show_default=True,
help="Hostname",
)
@click.option(
"-p",
"--port",
type=int,
default=5000,
metavar="<port-number>",
show_default=True,
help="Port no. on which to listen for requests",
)
@click.option(
"-d/-nd",
"--debug/--no-debug",
default=True,
metavar="<debug-mode>",
help="Start server in development mode",
)
@click.option(
"-v",
"--verbose",
default=logging.DEBUG,
count=True,
metavar="<verbosity>",
help="Logging verbosity",
)
def main(host: str, port: int, debug: bool, verbose: int):
run(host=host, port=port, debug=debug, verbose=verbose)
if __name__ == "__main__":
main()
| true | true |
f71dfbaeac1fbc882ae16f2bdf6a603281da54bd | 498 | py | Python | Florence/Base/FlorenceExceptions.py | jdlaubrie/florence | 830dca4a34be00d6e53cbec3007c10d438b27f57 | [
"MIT"
] | 65 | 2017-08-04T10:21:13.000Z | 2022-02-21T21:45:09.000Z | Florence/Base/FlorenceExceptions.py | jdlaubrie/florence | 830dca4a34be00d6e53cbec3007c10d438b27f57 | [
"MIT"
] | 6 | 2018-06-03T02:29:20.000Z | 2022-01-18T02:30:22.000Z | Florence/Base/FlorenceExceptions.py | jdlaubrie/florence | 830dca4a34be00d6e53cbec3007c10d438b27f57 | [
"MIT"
] | 10 | 2018-05-30T09:44:10.000Z | 2021-05-18T08:06:51.000Z |
class JacobianError(ArithmeticError):
def __init__(self,value=None):
self.value = value
def __str__(self):
if self.value is None:
self.value = 'Jacobian of mapping is close to zero'
return repr(self.value)
class IllConditionedError(ArithmeticError):
def __init__(self,value=None):
self.value = value
def __str__(self):
if self.value is None:
self.value = 'Matrix is ill conditioned'
return repr(self.value) | 29.294118 | 63 | 0.638554 |
class JacobianError(ArithmeticError):
def __init__(self,value=None):
self.value = value
def __str__(self):
if self.value is None:
self.value = 'Jacobian of mapping is close to zero'
return repr(self.value)
class IllConditionedError(ArithmeticError):
def __init__(self,value=None):
self.value = value
def __str__(self):
if self.value is None:
self.value = 'Matrix is ill conditioned'
return repr(self.value) | true | true |
f71dfbd89edc1b8d4a1b4250915832240e109b26 | 308 | py | Python | dian/account/urls.py | deggs7/dian-server | 7157831b50cb246a36fc7cd971e346e21747aafc | [
"MIT"
] | null | null | null | dian/account/urls.py | deggs7/dian-server | 7157831b50cb246a36fc7cd971e346e21747aafc | [
"MIT"
] | 3 | 2016-10-04T09:57:44.000Z | 2016-10-04T09:58:22.000Z | dian/account/urls.py | diankuai/dian-server | 7157831b50cb246a36fc7cd971e346e21747aafc | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from account import views
urlpatterns = patterns(
'account.views',
url(r'^my-account/$', 'get_my_account'),
url(r'^password/$', 'change_passwd'),
url(r'^create-seed-user/$', 'create_seed_user'),
)
| 18.117647 | 52 | 0.646104 |
from django.conf.urls import patterns, url
from account import views
urlpatterns = patterns(
'account.views',
url(r'^my-account/$', 'get_my_account'),
url(r'^password/$', 'change_passwd'),
url(r'^create-seed-user/$', 'create_seed_user'),
)
| true | true |
f71dfc096f23a3013d8dd1da61ec8cc371c0943e | 509 | py | Python | wsgi.py | zackmdavis/Finetooth | 2bccd96454da3e968620a5f00704352b0bafc423 | [
"MIT"
] | null | null | null | wsgi.py | zackmdavis/Finetooth | 2bccd96454da3e968620a5f00704352b0bafc423 | [
"MIT"
] | 29 | 2015-02-21T17:58:31.000Z | 2021-06-10T17:26:52.000Z | wsgi.py | zackmdavis/Finetooth | 2bccd96454da3e968620a5f00704352b0bafc423 | [
"MIT"
] | null | null | null | """
WSGI config for Finetooth project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.wsgi import get_wsgi_application
if os.path.exists(".development"):
application = get_wsgi_application()
else:
from dj_static import Cling
application = Cling(get_wsgi_application())
| 25.45 | 78 | 0.768173 |
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.wsgi import get_wsgi_application
if os.path.exists(".development"):
application = get_wsgi_application()
else:
from dj_static import Cling
application = Cling(get_wsgi_application())
| true | true |
f71dfc4303c4b86898c4c47fba2c4bb6f72c3a23 | 98 | py | Python | som_keras/__init__.py | dmg99/som_keras | f6a1e60025976969100f041392b2c015dee49867 | [
"MIT"
] | 2 | 2020-11-04T16:33:40.000Z | 2022-01-21T16:37:25.000Z | som_keras/__init__.py | dmg99/som_keras | f6a1e60025976969100f041392b2c015dee49867 | [
"MIT"
] | null | null | null | som_keras/__init__.py | dmg99/som_keras | f6a1e60025976969100f041392b2c015dee49867 | [
"MIT"
] | null | null | null | from . import SOM
from . import classification
from . import visualizations
from . import metrics
| 19.6 | 28 | 0.795918 | from . import SOM
from . import classification
from . import visualizations
from . import metrics
| true | true |
f71dfde9813a6fb70d9eab236e2ed86bc5c3ea30 | 850 | py | Python | 04_lists/18_sorting.py | r7asmu7s/art_of_doing_python | 62a03bcca084046c319976fc308bf3de3a2d412d | [
"Unlicense"
] | null | null | null | 04_lists/18_sorting.py | r7asmu7s/art_of_doing_python | 62a03bcca084046c319976fc308bf3de3a2d412d | [
"Unlicense"
] | null | null | null | 04_lists/18_sorting.py | r7asmu7s/art_of_doing_python | 62a03bcca084046c319976fc308bf3de3a2d412d | [
"Unlicense"
] | null | null | null | sports = ['baseball', 'golf', 'soccer', 'football']
print(sports)
# sorted() function does only make changes temporarily
print(sorted(sports))
print(sports)
print(sorted(sports, reverse=True))
print(sports)
grades = [88, 74, 95, 100, 92]
print(grades)
print(sorted(grades))
print(sorted(grades, reverse=True))
print(grades)
# sorted_grades = sorted(grades)
# print(sorted_grades)
grade_length = len(grades)
print(grade_length)
print(type(grade_length))
removed_grade = grades.pop()
print('Removing grade: ' + str(removed_grade))
print(len(grades))
# .sort() and .reverse() method change the list permanently and are not temporary
print(sports)
sports.sort()
print(sports)
print(grades)
grades.sort(reverse=True)
print(grades)
print(sports)
sports.reverse()
print(sports)
# sorting is different based on lower or upper case of first letters. | 20.731707 | 81 | 0.749412 | sports = ['baseball', 'golf', 'soccer', 'football']
print(sports)
print(sorted(sports))
print(sports)
print(sorted(sports, reverse=True))
print(sports)
grades = [88, 74, 95, 100, 92]
print(grades)
print(sorted(grades))
print(sorted(grades, reverse=True))
print(grades)
grade_length = len(grades)
print(grade_length)
print(type(grade_length))
removed_grade = grades.pop()
print('Removing grade: ' + str(removed_grade))
print(len(grades))
print(sports)
sports.sort()
print(sports)
print(grades)
grades.sort(reverse=True)
print(grades)
print(sports)
sports.reverse()
print(sports)
| true | true |
f71dff6822d33fc781ff7e65fb3b058730088c2a | 1,444 | py | Python | modules/quiz/forms.py | Maurilearn/learnings | 0af03e5646c9053b3cfc27465983ce466ad2f3cb | [
"MIT"
] | null | null | null | modules/quiz/forms.py | Maurilearn/learnings | 0af03e5646c9053b3cfc27465983ce466ad2f3cb | [
"MIT"
] | null | null | null | modules/quiz/forms.py | Maurilearn/learnings | 0af03e5646c9053b3cfc27465983ce466ad2f3cb | [
"MIT"
] | null | null | null | from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms import TextField
from wtforms.fields.html5 import EmailField
from wtforms import SubmitField
from wtforms import PasswordField
from wtforms.validators import DataRequired
from wtforms.validators import Length
from wtforms.validators import Email
from wtforms import TextAreaField
from flask_wtf.file import FileField
from flask_wtf.file import FileAllowed
from flask_wtf.file import FileRequired
from shopyoapi.init import photos
class AddTrackForm(FlaskForm):
name = StringField('Quizz Track Name', [
DataRequired()
],
render_kw={
'class':'form-control',
'autocomplete':'off',
}
)
award_image = FileField('Award Image', validators=[
FileAllowed(photos, 'Photo must be a png, jpg, or jpeg!'),
FileRequired('File was empty!')
],
render_kw={
'class':'form-control',
'autocomplete':'off',
}
)
submit = SubmitField('Submit',
render_kw={
'class':'btn btn-info'
}
)
# def __init__(self, *args, **kwargs):
# """Create instance."""
# super(AddTrackForm, self).__init__(*args, **kwargs)
# def validate(self):
# """Validate the form."""
# initial_validation = super(AddTrackForm, self).validate()
# return not initial_validation | 29.469388 | 67 | 0.639197 | from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms import TextField
from wtforms.fields.html5 import EmailField
from wtforms import SubmitField
from wtforms import PasswordField
from wtforms.validators import DataRequired
from wtforms.validators import Length
from wtforms.validators import Email
from wtforms import TextAreaField
from flask_wtf.file import FileField
from flask_wtf.file import FileAllowed
from flask_wtf.file import FileRequired
from shopyoapi.init import photos
class AddTrackForm(FlaskForm):
name = StringField('Quizz Track Name', [
DataRequired()
],
render_kw={
'class':'form-control',
'autocomplete':'off',
}
)
award_image = FileField('Award Image', validators=[
FileAllowed(photos, 'Photo must be a png, jpg, or jpeg!'),
FileRequired('File was empty!')
],
render_kw={
'class':'form-control',
'autocomplete':'off',
}
)
submit = SubmitField('Submit',
render_kw={
'class':'btn btn-info'
}
)
| true | true |
f71dffa6529171c4e2470ba732c219ee2f6670fd | 35,903 | py | Python | tests/checkpoint/test_simple_checkpoint.py | cbonilla20/great_expectations | e4f8c70ce1b137133e19eb73589fb1b1f422a380 | [
"Apache-2.0"
] | 6,451 | 2017-09-11T16:32:53.000Z | 2022-03-31T23:27:49.000Z | tests/checkpoint/test_simple_checkpoint.py | chsigjan/great_expectations | c5a587a3b1bc5d72d433950aaceb4d09f199690a | [
"Apache-2.0"
] | 3,892 | 2017-09-08T18:57:50.000Z | 2022-03-31T23:15:20.000Z | tests/checkpoint/test_simple_checkpoint.py | chsigjan/great_expectations | c5a587a3b1bc5d72d433950aaceb4d09f199690a | [
"Apache-2.0"
] | 1,023 | 2017-09-08T15:22:05.000Z | 2022-03-31T21:17:08.000Z | from unittest.mock import patch
import pytest
import great_expectations.exceptions as ge_exceptions
from great_expectations import DataContext
from great_expectations.checkpoint import SimpleCheckpointConfigurator
from great_expectations.checkpoint.checkpoint import (
Checkpoint,
CheckpointResult,
SimpleCheckpoint,
)
from great_expectations.core.batch import RuntimeBatchRequest
from great_expectations.data_context.types.base import CheckpointConfig
from great_expectations.util import filter_properties_dict
@pytest.fixture
def update_data_docs_action():
return {
"name": "update_data_docs",
"action": {"class_name": "UpdateDataDocsAction", "site_names": []},
}
@pytest.fixture
def store_eval_parameter_action():
return {
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
}
@pytest.fixture
def store_validation_result_action():
return {
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
}
@pytest.fixture
def webhook() -> str:
return "https://hooks.slack.com/foo/bar"
@pytest.fixture
def slack_notification_action(webhook):
return {
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": webhook,
"notify_on": "all",
"notify_with": None,
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
}
@pytest.fixture
def context_with_data_source_and_empty_suite(
titanic_pandas_data_context_with_v013_datasource_with_checkpoints_v1_with_empty_store_stats_enabled,
):
context: DataContext = titanic_pandas_data_context_with_v013_datasource_with_checkpoints_v1_with_empty_store_stats_enabled
datasources = context.list_datasources()
assert datasources[0]["class_name"] == "Datasource"
assert "my_special_data_connector" in datasources[0]["data_connectors"].keys()
context.create_expectation_suite("one", overwrite_existing=True)
assert context.list_expectation_suite_names() == ["one"]
return context
@pytest.fixture
def context_with_data_source_and_empty_suite_with_templates(
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
context: DataContext = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates
datasources = context.list_datasources()
assert datasources[0]["class_name"] == "Datasource"
assert "my_special_data_connector" in datasources[0]["data_connectors"].keys()
context.create_expectation_suite("one", overwrite_existing=True)
assert context.list_expectation_suite_names() == ["one"]
return context
@pytest.fixture
def simple_checkpoint_defaults(context_with_data_source_and_empty_suite):
return SimpleCheckpoint(
name="foo", data_context=context_with_data_source_and_empty_suite
)
@pytest.fixture
def two_validations(one_validation):
return [
one_validation,
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "two",
},
]
def test_simple_checkpoint_default_properties_with_no_optional_arguments(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
"""This demonstrates the simplest possible usage."""
checkpoint_config = SimpleCheckpointConfigurator(
"my_minimal_simple_checkpoint", empty_data_context
).build()
assert isinstance(checkpoint_config, CheckpointConfig)
assert checkpoint_config.name == "my_minimal_simple_checkpoint"
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
assert checkpoint_config.config_version == 1.0
assert checkpoint_config.class_name == "Checkpoint"
assert checkpoint_config.evaluation_parameters == {}
assert checkpoint_config.runtime_configuration == {}
assert checkpoint_config.validations == []
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_minimal_simple_checkpoint"
)
checkpoint_config = checkpoint_from_store.config
assert checkpoint_config.name == "my_minimal_simple_checkpoint"
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
assert checkpoint_config.config_version == 1.0
assert checkpoint_config.class_name == "Checkpoint"
assert checkpoint_config.evaluation_parameters == {}
assert checkpoint_config.runtime_configuration == {}
assert checkpoint_config.validations == []
def test_simple_checkpoint_raises_error_on_invalid_slack_webhook(
empty_data_context,
):
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, slack_webhook="bad"
).build()
def test_simple_checkpoint_has_slack_action_with_defaults_when_slack_webhook_is_present(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
webhook,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, slack_webhook=webhook
).build()
expected = [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
]
assert checkpoint_config.action_list == expected
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_simple_checkpoint_with_slack"
)
checkpoint_config = checkpoint_from_store.config
assert checkpoint_config.name == "my_simple_checkpoint_with_slack"
assert checkpoint_config.action_list == expected
def test_simple_checkpoint_raises_error_on_invalid_notify_on(
empty_data_context,
):
for bad in [1, "bar", None, []]:
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_on=bad
).build()
def test_simple_checkpoint_raises_error_on_missing_slack_webhook_when_notify_on_is_list(
empty_data_context, slack_notification_action, webhook
):
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_with=["prod", "dev"]
).build()
def test_simple_checkpoint_raises_error_on_missing_slack_webhook_when_notify_on_is_not_default(
empty_data_context, slack_notification_action, webhook
):
for condition in ["failure", "success"]:
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_on=condition
).build()
def test_simple_checkpoint_raises_error_on_invalid_notify_with(
empty_data_context,
):
for bad in [1, "bar", ["local_site", 3]]:
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_with=bad
).build()
def test_simple_checkpoint_notify_with_all_has_data_docs_action_with_none_specified(
empty_data_context,
slack_notification_action,
webhook,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
"""
The underlying SlackNotificationAction and SlackRenderer default to
including links to all sites if the key notify_with is not present. We are
intentionally hiding this from users of SimpleCheckpoint by having a default
of "all" that sets the configuration appropriately.
"""
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, slack_webhook=webhook, notify_with="all"
).build()
# set the config to include all sites
slack_notification_action["action"]["notify_with"] = None
assert slack_notification_action in checkpoint_config.action_list
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_simple_checkpoint_with_slack_and_notify_with_all"
)
checkpoint_config = checkpoint_from_store.config
assert slack_notification_action in checkpoint_config.action_list
def test_simple_checkpoint_has_slack_action_with_notify_adjustments_slack_webhook_is_present(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
webhook,
):
checkpoint_config = SimpleCheckpointConfigurator(
"foo",
empty_data_context,
slack_webhook=webhook,
notify_on="failure",
notify_with=["local_site", "s3_prod"],
).build()
slack_notification_action["action"]["notify_on"] = "failure"
slack_notification_action["action"]["notify_with"] = ["local_site", "s3_prod"]
expected = [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
]
assert checkpoint_config.action_list == expected
def test_simple_checkpoint_has_no_slack_action_when_no_slack_webhook_is_present(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
checkpoint_config = SimpleCheckpointConfigurator("foo", empty_data_context).build()
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
def test_simple_checkpoint_has_update_data_docs_action_that_should_update_all_sites_when_site_names_is_all(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names="all"
).build()
# This is confusing: the UpdateDataDocsAction default behavior is to update
# all sites if site_names=None
update_data_docs_action["action"]["site_names"] = []
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
def test_simple_checkpoint_raises_errors_on_invalid_site_name_types(
empty_data_context,
):
for junk_input in [[1, "local"], 1, ["local", None]]:
with pytest.raises(TypeError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=junk_input
).build()
def test_simple_checkpoint_raises_errors_on_site_name_that_does_not_exist_on_data_context(
empty_data_context,
):
# assert the fixture is adequate
assert "prod" not in empty_data_context.get_site_names()
with pytest.raises(TypeError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=["prod"]
).build()
def test_simple_checkpoint_has_update_data_docs_action_that_should_update_selected_sites_when_sites_are_selected(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
# assert the fixture is adequate
assert "local_site" in empty_data_context.get_site_names()
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=["local_site"]
).build()
# This is confusing: the UpdateDataDocsAction default behavior is to update
# all sites if site_names=None
update_data_docs_action["action"]["site_names"] = ["local_site"]
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
# assert the fixture is adequate
assert (
"local_site"
in titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_site_names()
)
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_simple_checkpoint_with_site_names"
)
checkpoint_config = checkpoint_from_store.config
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
def test_simple_checkpoint_has_no_update_data_docs_action_when_site_names_is_none(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
# assert the fixture is adequate
assert "local_site" in empty_data_context.get_site_names()
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=None
).build()
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
]
def test_simple_checkpoint_persisted_to_store(
context_with_data_source_and_empty_suite, webhook, one_validation
):
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
initial_checkpoint_config = SimpleCheckpointConfigurator(
"foo",
context_with_data_source_and_empty_suite,
site_names=None,
).build()
# TODO this add_checkpoint will be user facing and it could be more
# ergonomic by accepting a Checkpoint maybe .add_checkpoint() should take a
# Checkpoint and there should be a .create_checkpoint() that accepts all
# the current parameters
context_with_data_source_and_empty_suite.add_checkpoint(
**initial_checkpoint_config.to_json_dict()
)
assert context_with_data_source_and_empty_suite.list_checkpoints() == ["foo"]
checkpoint = context_with_data_source_and_empty_suite.get_checkpoint("foo")
assert isinstance(checkpoint, Checkpoint)
assert isinstance(checkpoint.config, CheckpointConfig)
assert checkpoint.config.to_json_dict() == {
"action_list": [
{
"action": {"class_name": "StoreValidationResultAction"},
"name": "store_validation_result",
},
{
"action": {"class_name": "StoreEvaluationParametersAction"},
"name": "store_evaluation_params",
},
],
"batch_request": None,
"class_name": "Checkpoint",
"config_version": 1.0,
"evaluation_parameters": {},
"expectation_suite_ge_cloud_id": None,
"expectation_suite_name": None,
"ge_cloud_id": None,
"module_name": "great_expectations.checkpoint",
"name": "foo",
"profilers": [],
"run_name_template": None,
"runtime_configuration": {},
"template_name": None,
"validations": [],
}
results = checkpoint.run(validations=[one_validation])
assert results.success
def test_simple_checkpoint_defaults_run_and_no_run_params_raises_checkpoint_error(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults
):
with pytest.raises(ge_exceptions.CheckpointError) as cpe:
# noinspection PyUnusedLocal
result: CheckpointResult = simple_checkpoint_defaults.run()
assert 'Checkpoint "foo" does not contain any validations.' in str(cpe.value)
def test_simple_checkpoint_defaults_run_and_basic_run_params_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
# verify Checkpoint is not persisted in the data context
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
result = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
def test_simple_checkpoint_runtime_kwargs_processing_site_names_only_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
# verify Checkpoint is not persisted in the data context
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": None,
"run_name_template": None,
"expectation_suite_name": None,
"batch_request": None,
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {
"class_name": "UpdateDataDocsAction",
"site_names": ["local_site"],
},
},
],
"evaluation_parameters": None,
"runtime_configuration": {},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
},
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
site_names=["local_site"],
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_runtime_kwargs_processing_slack_webhook_only_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
# verify Checkpoint is not persisted in the data context
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": None,
"run_name_template": None,
"expectation_suite_name": None,
"batch_request": None,
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {"class_name": "UpdateDataDocsAction", "site_names": []},
},
{
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": "https://hooks.slack.com/my_slack_webhook.geocities",
"notify_on": "all",
"notify_with": None,
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
},
],
"evaluation_parameters": None,
"runtime_configuration": {},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
slack_webhook="https://hooks.slack.com/my_slack_webhook.geocities",
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_runtime_kwargs_processing_all_special_kwargs_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
# verify Checkpoint is not persisted in the data context
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": None,
"run_name_template": None,
"expectation_suite_name": None,
"batch_request": None,
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {
"class_name": "UpdateDataDocsAction",
"site_names": ["local_site"],
},
},
{
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": "https://hooks.slack.com/my_slack_webhook.geocities",
"notify_on": "failure",
"notify_with": ["local_site"],
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
},
],
"evaluation_parameters": None,
"runtime_configuration": {},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
site_names=["local_site"],
notify_with=["local_site"],
notify_on="failure",
slack_webhook="https://hooks.slack.com/my_slack_webhook.geocities",
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_runtime_kwargs_processing_all_kwargs(
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
simple_checkpoint_defaults,
one_validation,
monkeypatch,
):
monkeypatch.setenv("GE_ENVIRONMENT", "my_ge_environment")
monkeypatch.setenv("MY_PARAM", "1")
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": "my_simple_template_checkpoint",
"run_name_template": "my_runtime_run_name_template",
"expectation_suite_name": "my_runtime_suite",
"batch_request": {
"data_connector_query": {
"index": -1,
},
},
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {
"class_name": "UpdateDataDocsAction",
"site_names": ["local_site"],
},
},
{
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": "https://hooks.slack.com/my_slack_webhook.geocities",
"notify_on": "failure",
"notify_with": ["local_site"],
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
},
],
"evaluation_parameters": {
"aux_param_0": "1",
"aux_param_1": "1 + 1",
"environment": "my_ge_environment",
"my_runtime_key": "my_runtime_value",
"tolerance": 0.01,
},
"runtime_configuration": {
"my_runtime_key": "my_runtime_value",
"result_format": {
"result_format": "BASIC",
"partial_unexpected_count": 20,
},
},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
template_name="my_simple_template_checkpoint",
run_name_template="my_runtime_run_name_template",
expectation_suite_name="my_runtime_suite",
batch_request={
"data_connector_query": {
"index": -1,
},
},
validations=[one_validation],
evaluation_parameters={"my_runtime_key": "my_runtime_value"},
runtime_configuration={"my_runtime_key": "my_runtime_value"},
site_names=["local_site"],
notify_with=["local_site"],
notify_on="failure",
slack_webhook="https://hooks.slack.com/my_slack_webhook.geocities",
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
expected_runtime_kwargs.pop("template_name")
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_defaults_run_and_basic_run_params_with_persisted_checkpoint_loaded_from_store(
context_with_data_source_and_empty_suite,
simple_checkpoint_defaults,
webhook,
one_validation,
):
context: DataContext = context_with_data_source_and_empty_suite
checkpoint_config = SimpleCheckpointConfigurator(
"foo", context_with_data_source_and_empty_suite, slack_webhook=webhook
).build()
context.add_checkpoint(**checkpoint_config.to_json_dict())
checkpoint_name = checkpoint_config.name
assert context.list_checkpoints() == [checkpoint_name]
del checkpoint_config
checkpoint = context.get_checkpoint(checkpoint_name)
assert isinstance(checkpoint, Checkpoint)
result = checkpoint.run(
run_name="bar",
validations=[one_validation],
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
@pytest.fixture
def one_validation():
return {
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
def test_simple_checkpoint_defaults_run_with_top_level_batch_request_and_suite(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults
):
result = simple_checkpoint_defaults.run(
run_name="bar",
batch_request={
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
expectation_suite_name="one",
validations=[{"expectation_suite_name": "one"}],
)
assert isinstance(result, CheckpointResult)
assert result.success
assert len(result.run_results) == 1
def test_simple_checkpoint_error_with_invalid_top_level_batch_request(
simple_checkpoint_defaults,
):
# raised by _validate_init_parameters() in BatchRequest.__init__()
with pytest.raises(TypeError):
# missing data_asset_name
result = simple_checkpoint_defaults.run(
run_name="bar",
batch_request={
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
},
expectation_suite_name="one",
validations=[{"expectation_suite_name": "one"}],
)
def test_simple_checkpoint_defaults_run_multiple_validations_without_persistence(
context_with_data_source_and_empty_suite,
simple_checkpoint_defaults,
two_validations,
):
context_with_data_source_and_empty_suite.create_expectation_suite("two")
assert len(context_with_data_source_and_empty_suite.list_expectation_suites()) == 2
result = simple_checkpoint_defaults.run(
run_name="bar",
validations=two_validations,
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert sorted(result.list_expectation_suite_names()) == sorted(["one", "two"])
assert len(result.list_validation_results()) == 2
assert result.success
def test_simple_checkpoint_defaults_run_multiple_validations_with_persisted_checkpoint_loaded_from_store(
context_with_data_source_and_empty_suite,
simple_checkpoint_defaults,
two_validations,
):
context: DataContext = context_with_data_source_and_empty_suite
context.create_expectation_suite("two")
assert len(context.list_expectation_suites()) == 2
# persist to store
context.add_checkpoint(**simple_checkpoint_defaults.config.to_json_dict())
checkpoint_name = simple_checkpoint_defaults.name
assert context.list_checkpoints() == [checkpoint_name]
# reload from store
del simple_checkpoint_defaults
checkpoint = context.get_checkpoint(checkpoint_name)
result = checkpoint.run(
run_name="bar",
validations=two_validations,
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert sorted(result.list_expectation_suite_names()) == sorted(["one", "two"])
assert len(result.list_validation_results()) == 2
assert result.success
def test_simple_checkpoint_with_runtime_batch_request_and_runtime_data_connector_creates_config(
context_with_data_source_and_empty_suite,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
context: DataContext = context_with_data_source_and_empty_suite
runtime_batch_request = RuntimeBatchRequest(
datasource_name="my_datasource",
data_connector_name="my_runtime_data_connector",
data_asset_name="users",
batch_identifiers={"pipeline_stage_name": "first"}, # defined in fixture
runtime_parameters={
"query": "SELECT * FROM taxi_data"
}, # not actually run, but used to test configuration
)
checkpoint = SimpleCheckpoint(
name="my_checkpoint", data_context=context, batch_request=runtime_batch_request
)
checkpoint_config = checkpoint.config
assert isinstance(checkpoint_config, CheckpointConfig)
assert checkpoint_config.name == "my_checkpoint"
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
assert checkpoint_config.batch_request == {
"batch_identifiers": {"pipeline_stage_name": "first"},
"data_asset_name": "users",
"data_connector_name": "my_runtime_data_connector",
"datasource_name": "my_datasource",
"runtime_parameters": {"query": "SELECT * FROM taxi_data"},
}
assert checkpoint_config.config_version == 1.0
assert checkpoint_config.class_name == "Checkpoint"
assert checkpoint_config.evaluation_parameters == {}
assert checkpoint_config.runtime_configuration == {}
assert checkpoint_config.validations == []
| 36.785861 | 141 | 0.682088 | from unittest.mock import patch
import pytest
import great_expectations.exceptions as ge_exceptions
from great_expectations import DataContext
from great_expectations.checkpoint import SimpleCheckpointConfigurator
from great_expectations.checkpoint.checkpoint import (
Checkpoint,
CheckpointResult,
SimpleCheckpoint,
)
from great_expectations.core.batch import RuntimeBatchRequest
from great_expectations.data_context.types.base import CheckpointConfig
from great_expectations.util import filter_properties_dict
@pytest.fixture
def update_data_docs_action():
return {
"name": "update_data_docs",
"action": {"class_name": "UpdateDataDocsAction", "site_names": []},
}
@pytest.fixture
def store_eval_parameter_action():
return {
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
}
@pytest.fixture
def store_validation_result_action():
return {
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
}
@pytest.fixture
def webhook() -> str:
return "https://hooks.slack.com/foo/bar"
@pytest.fixture
def slack_notification_action(webhook):
return {
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": webhook,
"notify_on": "all",
"notify_with": None,
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
}
@pytest.fixture
def context_with_data_source_and_empty_suite(
titanic_pandas_data_context_with_v013_datasource_with_checkpoints_v1_with_empty_store_stats_enabled,
):
context: DataContext = titanic_pandas_data_context_with_v013_datasource_with_checkpoints_v1_with_empty_store_stats_enabled
datasources = context.list_datasources()
assert datasources[0]["class_name"] == "Datasource"
assert "my_special_data_connector" in datasources[0]["data_connectors"].keys()
context.create_expectation_suite("one", overwrite_existing=True)
assert context.list_expectation_suite_names() == ["one"]
return context
@pytest.fixture
def context_with_data_source_and_empty_suite_with_templates(
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
context: DataContext = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates
datasources = context.list_datasources()
assert datasources[0]["class_name"] == "Datasource"
assert "my_special_data_connector" in datasources[0]["data_connectors"].keys()
context.create_expectation_suite("one", overwrite_existing=True)
assert context.list_expectation_suite_names() == ["one"]
return context
@pytest.fixture
def simple_checkpoint_defaults(context_with_data_source_and_empty_suite):
return SimpleCheckpoint(
name="foo", data_context=context_with_data_source_and_empty_suite
)
@pytest.fixture
def two_validations(one_validation):
return [
one_validation,
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "two",
},
]
def test_simple_checkpoint_default_properties_with_no_optional_arguments(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
checkpoint_config = SimpleCheckpointConfigurator(
"my_minimal_simple_checkpoint", empty_data_context
).build()
assert isinstance(checkpoint_config, CheckpointConfig)
assert checkpoint_config.name == "my_minimal_simple_checkpoint"
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
assert checkpoint_config.config_version == 1.0
assert checkpoint_config.class_name == "Checkpoint"
assert checkpoint_config.evaluation_parameters == {}
assert checkpoint_config.runtime_configuration == {}
assert checkpoint_config.validations == []
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_minimal_simple_checkpoint"
)
checkpoint_config = checkpoint_from_store.config
assert checkpoint_config.name == "my_minimal_simple_checkpoint"
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
assert checkpoint_config.config_version == 1.0
assert checkpoint_config.class_name == "Checkpoint"
assert checkpoint_config.evaluation_parameters == {}
assert checkpoint_config.runtime_configuration == {}
assert checkpoint_config.validations == []
def test_simple_checkpoint_raises_error_on_invalid_slack_webhook(
empty_data_context,
):
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, slack_webhook="bad"
).build()
def test_simple_checkpoint_has_slack_action_with_defaults_when_slack_webhook_is_present(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
webhook,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, slack_webhook=webhook
).build()
expected = [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
]
assert checkpoint_config.action_list == expected
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_simple_checkpoint_with_slack"
)
checkpoint_config = checkpoint_from_store.config
assert checkpoint_config.name == "my_simple_checkpoint_with_slack"
assert checkpoint_config.action_list == expected
def test_simple_checkpoint_raises_error_on_invalid_notify_on(
empty_data_context,
):
for bad in [1, "bar", None, []]:
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_on=bad
).build()
def test_simple_checkpoint_raises_error_on_missing_slack_webhook_when_notify_on_is_list(
empty_data_context, slack_notification_action, webhook
):
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_with=["prod", "dev"]
).build()
def test_simple_checkpoint_raises_error_on_missing_slack_webhook_when_notify_on_is_not_default(
empty_data_context, slack_notification_action, webhook
):
for condition in ["failure", "success"]:
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_on=condition
).build()
def test_simple_checkpoint_raises_error_on_invalid_notify_with(
empty_data_context,
):
for bad in [1, "bar", ["local_site", 3]]:
with pytest.raises(ValueError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, notify_with=bad
).build()
def test_simple_checkpoint_notify_with_all_has_data_docs_action_with_none_specified(
empty_data_context,
slack_notification_action,
webhook,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, slack_webhook=webhook, notify_with="all"
).build()
slack_notification_action["action"]["notify_with"] = None
assert slack_notification_action in checkpoint_config.action_list
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_simple_checkpoint_with_slack_and_notify_with_all"
)
checkpoint_config = checkpoint_from_store.config
assert slack_notification_action in checkpoint_config.action_list
def test_simple_checkpoint_has_slack_action_with_notify_adjustments_slack_webhook_is_present(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
webhook,
):
checkpoint_config = SimpleCheckpointConfigurator(
"foo",
empty_data_context,
slack_webhook=webhook,
notify_on="failure",
notify_with=["local_site", "s3_prod"],
).build()
slack_notification_action["action"]["notify_on"] = "failure"
slack_notification_action["action"]["notify_with"] = ["local_site", "s3_prod"]
expected = [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
slack_notification_action,
]
assert checkpoint_config.action_list == expected
def test_simple_checkpoint_has_no_slack_action_when_no_slack_webhook_is_present(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
checkpoint_config = SimpleCheckpointConfigurator("foo", empty_data_context).build()
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
def test_simple_checkpoint_has_update_data_docs_action_that_should_update_all_sites_when_site_names_is_all(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names="all"
).build()
update_data_docs_action["action"]["site_names"] = []
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
def test_simple_checkpoint_raises_errors_on_invalid_site_name_types(
empty_data_context,
):
for junk_input in [[1, "local"], 1, ["local", None]]:
with pytest.raises(TypeError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=junk_input
).build()
def test_simple_checkpoint_raises_errors_on_site_name_that_does_not_exist_on_data_context(
empty_data_context,
):
assert "prod" not in empty_data_context.get_site_names()
with pytest.raises(TypeError):
SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=["prod"]
).build()
def test_simple_checkpoint_has_update_data_docs_action_that_should_update_selected_sites_when_sites_are_selected(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
):
assert "local_site" in empty_data_context.get_site_names()
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=["local_site"]
).build()
update_data_docs_action["action"]["site_names"] = ["local_site"]
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
assert (
"local_site"
in titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_site_names()
)
checkpoint_from_store = titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates.get_checkpoint(
"my_simple_checkpoint_with_site_names"
)
checkpoint_config = checkpoint_from_store.config
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
def test_simple_checkpoint_has_no_update_data_docs_action_when_site_names_is_none(
empty_data_context,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
assert "local_site" in empty_data_context.get_site_names()
checkpoint_config = SimpleCheckpointConfigurator(
"foo", empty_data_context, site_names=None
).build()
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
]
def test_simple_checkpoint_persisted_to_store(
context_with_data_source_and_empty_suite, webhook, one_validation
):
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
initial_checkpoint_config = SimpleCheckpointConfigurator(
"foo",
context_with_data_source_and_empty_suite,
site_names=None,
).build()
context_with_data_source_and_empty_suite.add_checkpoint(
**initial_checkpoint_config.to_json_dict()
)
assert context_with_data_source_and_empty_suite.list_checkpoints() == ["foo"]
checkpoint = context_with_data_source_and_empty_suite.get_checkpoint("foo")
assert isinstance(checkpoint, Checkpoint)
assert isinstance(checkpoint.config, CheckpointConfig)
assert checkpoint.config.to_json_dict() == {
"action_list": [
{
"action": {"class_name": "StoreValidationResultAction"},
"name": "store_validation_result",
},
{
"action": {"class_name": "StoreEvaluationParametersAction"},
"name": "store_evaluation_params",
},
],
"batch_request": None,
"class_name": "Checkpoint",
"config_version": 1.0,
"evaluation_parameters": {},
"expectation_suite_ge_cloud_id": None,
"expectation_suite_name": None,
"ge_cloud_id": None,
"module_name": "great_expectations.checkpoint",
"name": "foo",
"profilers": [],
"run_name_template": None,
"runtime_configuration": {},
"template_name": None,
"validations": [],
}
results = checkpoint.run(validations=[one_validation])
assert results.success
def test_simple_checkpoint_defaults_run_and_no_run_params_raises_checkpoint_error(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults
):
with pytest.raises(ge_exceptions.CheckpointError) as cpe:
result: CheckpointResult = simple_checkpoint_defaults.run()
assert 'Checkpoint "foo" does not contain any validations.' in str(cpe.value)
def test_simple_checkpoint_defaults_run_and_basic_run_params_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
result = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
def test_simple_checkpoint_runtime_kwargs_processing_site_names_only_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": None,
"run_name_template": None,
"expectation_suite_name": None,
"batch_request": None,
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {
"class_name": "UpdateDataDocsAction",
"site_names": ["local_site"],
},
},
],
"evaluation_parameters": None,
"runtime_configuration": {},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
},
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
site_names=["local_site"],
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_runtime_kwargs_processing_slack_webhook_only_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": None,
"run_name_template": None,
"expectation_suite_name": None,
"batch_request": None,
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {"class_name": "UpdateDataDocsAction", "site_names": []},
},
{
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": "https://hooks.slack.com/my_slack_webhook.geocities",
"notify_on": "all",
"notify_with": None,
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
},
],
"evaluation_parameters": None,
"runtime_configuration": {},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
slack_webhook="https://hooks.slack.com/my_slack_webhook.geocities",
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_runtime_kwargs_processing_all_special_kwargs_without_persisting_checkpoint(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults, one_validation
):
assert context_with_data_source_and_empty_suite.list_checkpoints() == []
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": None,
"run_name_template": None,
"expectation_suite_name": None,
"batch_request": None,
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {
"class_name": "UpdateDataDocsAction",
"site_names": ["local_site"],
},
},
{
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": "https://hooks.slack.com/my_slack_webhook.geocities",
"notify_on": "failure",
"notify_with": ["local_site"],
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
},
],
"evaluation_parameters": None,
"runtime_configuration": {},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
validations=[one_validation],
site_names=["local_site"],
notify_with=["local_site"],
notify_on="failure",
slack_webhook="https://hooks.slack.com/my_slack_webhook.geocities",
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_runtime_kwargs_processing_all_kwargs(
titanic_pandas_data_context_with_v013_datasource_stats_enabled_with_checkpoints_v1_with_templates,
simple_checkpoint_defaults,
one_validation,
monkeypatch,
):
monkeypatch.setenv("GE_ENVIRONMENT", "my_ge_environment")
monkeypatch.setenv("MY_PARAM", "1")
expected_runtime_kwargs: dict = {
"name": "foo",
"config_version": 1.0,
"module_name": "great_expectations.checkpoint",
"class_name": "Checkpoint",
"template_name": "my_simple_template_checkpoint",
"run_name_template": "my_runtime_run_name_template",
"expectation_suite_name": "my_runtime_suite",
"batch_request": {
"data_connector_query": {
"index": -1,
},
},
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {
"class_name": "UpdateDataDocsAction",
"site_names": ["local_site"],
},
},
{
"name": "send_slack_notification",
"action": {
"class_name": "SlackNotificationAction",
"slack_webhook": "https://hooks.slack.com/my_slack_webhook.geocities",
"notify_on": "failure",
"notify_with": ["local_site"],
"renderer": {
"module_name": "great_expectations.render.renderer.slack_renderer",
"class_name": "SlackRenderer",
},
},
},
],
"evaluation_parameters": {
"aux_param_0": "1",
"aux_param_1": "1 + 1",
"environment": "my_ge_environment",
"my_runtime_key": "my_runtime_value",
"tolerance": 0.01,
},
"runtime_configuration": {
"my_runtime_key": "my_runtime_value",
"result_format": {
"result_format": "BASIC",
"partial_unexpected_count": 20,
},
},
"validations": [
{
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
],
"profilers": None,
}
result: CheckpointResult = simple_checkpoint_defaults.run(
run_name="bar",
template_name="my_simple_template_checkpoint",
run_name_template="my_runtime_run_name_template",
expectation_suite_name="my_runtime_suite",
batch_request={
"data_connector_query": {
"index": -1,
},
},
validations=[one_validation],
evaluation_parameters={"my_runtime_key": "my_runtime_value"},
runtime_configuration={"my_runtime_key": "my_runtime_value"},
site_names=["local_site"],
notify_with=["local_site"],
notify_on="failure",
slack_webhook="https://hooks.slack.com/my_slack_webhook.geocities",
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
substituted_runtime_config: CheckpointConfig = (
simple_checkpoint_defaults.get_substituted_config(
runtime_kwargs=expected_runtime_kwargs
)
)
expected_runtime_kwargs.pop("template_name")
assert filter_properties_dict(
properties=substituted_runtime_config.to_json_dict(), clean_falsy=True
) == filter_properties_dict(properties=expected_runtime_kwargs, clean_falsy=True)
def test_simple_checkpoint_defaults_run_and_basic_run_params_with_persisted_checkpoint_loaded_from_store(
context_with_data_source_and_empty_suite,
simple_checkpoint_defaults,
webhook,
one_validation,
):
context: DataContext = context_with_data_source_and_empty_suite
checkpoint_config = SimpleCheckpointConfigurator(
"foo", context_with_data_source_and_empty_suite, slack_webhook=webhook
).build()
context.add_checkpoint(**checkpoint_config.to_json_dict())
checkpoint_name = checkpoint_config.name
assert context.list_checkpoints() == [checkpoint_name]
del checkpoint_config
checkpoint = context.get_checkpoint(checkpoint_name)
assert isinstance(checkpoint, Checkpoint)
result = checkpoint.run(
run_name="bar",
validations=[one_validation],
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert result.list_expectation_suite_names() == ["one"]
assert len(result.list_validation_results()) == 1
assert result.success
@pytest.fixture
def one_validation():
return {
"batch_request": {
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
"expectation_suite_name": "one",
}
def test_simple_checkpoint_defaults_run_with_top_level_batch_request_and_suite(
context_with_data_source_and_empty_suite, simple_checkpoint_defaults
):
result = simple_checkpoint_defaults.run(
run_name="bar",
batch_request={
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
"data_asset_name": "users",
},
expectation_suite_name="one",
validations=[{"expectation_suite_name": "one"}],
)
assert isinstance(result, CheckpointResult)
assert result.success
assert len(result.run_results) == 1
def test_simple_checkpoint_error_with_invalid_top_level_batch_request(
simple_checkpoint_defaults,
):
with pytest.raises(TypeError):
result = simple_checkpoint_defaults.run(
run_name="bar",
batch_request={
"datasource_name": "my_datasource",
"data_connector_name": "my_special_data_connector",
},
expectation_suite_name="one",
validations=[{"expectation_suite_name": "one"}],
)
def test_simple_checkpoint_defaults_run_multiple_validations_without_persistence(
context_with_data_source_and_empty_suite,
simple_checkpoint_defaults,
two_validations,
):
context_with_data_source_and_empty_suite.create_expectation_suite("two")
assert len(context_with_data_source_and_empty_suite.list_expectation_suites()) == 2
result = simple_checkpoint_defaults.run(
run_name="bar",
validations=two_validations,
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert sorted(result.list_expectation_suite_names()) == sorted(["one", "two"])
assert len(result.list_validation_results()) == 2
assert result.success
def test_simple_checkpoint_defaults_run_multiple_validations_with_persisted_checkpoint_loaded_from_store(
context_with_data_source_and_empty_suite,
simple_checkpoint_defaults,
two_validations,
):
context: DataContext = context_with_data_source_and_empty_suite
context.create_expectation_suite("two")
assert len(context.list_expectation_suites()) == 2
context.add_checkpoint(**simple_checkpoint_defaults.config.to_json_dict())
checkpoint_name = simple_checkpoint_defaults.name
assert context.list_checkpoints() == [checkpoint_name]
del simple_checkpoint_defaults
checkpoint = context.get_checkpoint(checkpoint_name)
result = checkpoint.run(
run_name="bar",
validations=two_validations,
)
assert isinstance(result, CheckpointResult)
assert result.run_id.run_name == "bar"
assert sorted(result.list_expectation_suite_names()) == sorted(["one", "two"])
assert len(result.list_validation_results()) == 2
assert result.success
def test_simple_checkpoint_with_runtime_batch_request_and_runtime_data_connector_creates_config(
context_with_data_source_and_empty_suite,
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
):
context: DataContext = context_with_data_source_and_empty_suite
runtime_batch_request = RuntimeBatchRequest(
datasource_name="my_datasource",
data_connector_name="my_runtime_data_connector",
data_asset_name="users",
batch_identifiers={"pipeline_stage_name": "first"},
runtime_parameters={
"query": "SELECT * FROM taxi_data"
},
)
checkpoint = SimpleCheckpoint(
name="my_checkpoint", data_context=context, batch_request=runtime_batch_request
)
checkpoint_config = checkpoint.config
assert isinstance(checkpoint_config, CheckpointConfig)
assert checkpoint_config.name == "my_checkpoint"
assert checkpoint_config.action_list == [
store_validation_result_action,
store_eval_parameter_action,
update_data_docs_action,
]
assert checkpoint_config.batch_request == {
"batch_identifiers": {"pipeline_stage_name": "first"},
"data_asset_name": "users",
"data_connector_name": "my_runtime_data_connector",
"datasource_name": "my_datasource",
"runtime_parameters": {"query": "SELECT * FROM taxi_data"},
}
assert checkpoint_config.config_version == 1.0
assert checkpoint_config.class_name == "Checkpoint"
assert checkpoint_config.evaluation_parameters == {}
assert checkpoint_config.runtime_configuration == {}
assert checkpoint_config.validations == []
| true | true |
f71dffb01da5be689dd5a395fa5f3d98ff914e8b | 536 | py | Python | leetCode/algorithms/medium/print_foobar_alternately.py | ferhatelmas/algo | a7149c7a605708bc01a5cd30bf5455644cefd04d | [
"WTFPL"
] | 25 | 2015-01-21T16:39:18.000Z | 2021-05-24T07:01:24.000Z | leetCode/algorithms/medium/print_foobar_alternately.py | gauravsingh58/algo | 397859a53429e7a585e5f6964ad24146c6261326 | [
"WTFPL"
] | 2 | 2020-09-30T19:39:36.000Z | 2020-10-01T17:15:16.000Z | leetCode/algorithms/medium/print_foobar_alternately.py | ferhatelmas/algo | a7149c7a605708bc01a5cd30bf5455644cefd04d | [
"WTFPL"
] | 15 | 2015-01-21T16:39:27.000Z | 2020-10-01T17:00:22.000Z | from threading import Lock
class FooBar:
def __init__(self, n):
self.n = n
self.lock1 = Lock()
self.lock2 = Lock()
self.lock2.acquire()
def foo(self, printFoo: "Callable[[], None]") -> None:
for _ in range(self.n):
self.lock1.acquire()
printFoo()
self.lock2.release()
def bar(self, printBar: "Callable[[], None]") -> None:
for _ in range(self.n):
self.lock2.acquire()
printBar()
self.lock1.release()
| 24.363636 | 58 | 0.518657 | from threading import Lock
class FooBar:
def __init__(self, n):
self.n = n
self.lock1 = Lock()
self.lock2 = Lock()
self.lock2.acquire()
def foo(self, printFoo: "Callable[[], None]") -> None:
for _ in range(self.n):
self.lock1.acquire()
printFoo()
self.lock2.release()
def bar(self, printBar: "Callable[[], None]") -> None:
for _ in range(self.n):
self.lock2.acquire()
printBar()
self.lock1.release()
| true | true |
f71dffe042d519807bb94e8a66e0077a2ed3f78f | 2,415 | py | Python | GraphDegeneracy.py | skytreader/pads | 3b5011b6725810ff73ad376b1589d7510c156b1c | [
"MIT"
] | 3 | 2021-09-14T02:05:28.000Z | 2021-11-14T17:05:44.000Z | GraphDegeneracy.py | skytreader/pads | 3b5011b6725810ff73ad376b1589d7510c156b1c | [
"MIT"
] | null | null | null | GraphDegeneracy.py | skytreader/pads | 3b5011b6725810ff73ad376b1589d7510c156b1c | [
"MIT"
] | null | null | null | """GraphDegeneracy.py
Compute the degeneracy of graphs, and degeneracy orderings of graphs.
D. Eppstein, July 2016.
"""
import unittest
from Graphs import isUndirected
from BucketQueue import BucketQueue
def degeneracySequence(G):
"""Generate pairs (vertex,number of later neighbors) in degeneracy order."""
if not isUndirected(G):
raise TypeError("Graph must be undirected")
Q = BucketQueue()
for v in G:
Q[v] = len(G[v]) # prioritize vertices by degree
for v,d in Q.items():
yield v,d # output vertices in priority order
for w in G[v]:
if w in Q:
Q[w] -= 1 # one fewer remaining neighbor
def degeneracy(G):
"""Calculate the degeneracy of a given graph"""
return max(d for v,d in degeneracySequence(G))
def degeneracyOrientation(G):
"""Directed version of G with <= degeneracy out-neighbors per vertex."""
D = {}
for v,d in degeneracySequence(G):
D[v] = {w for w in G[v] if w not in D}
return D
def core(G,k=None):
"""The k-core of G, or the deepest core if k is not given.
The return value is a set of vertices; use Graphs.InducedSubgraph
if the edges are also needed."""
level = 0
coreset = set()
for v,d in degeneracySequence(G):
if d > level: # new depth record?
if k == None or level < k: # we care about new records?
coreset = set() # yes, restart core
level = d
coreset.add(v)
return coreset
def triangles(G):
"""Use degeneracy to list all the triangles in G"""
G = degeneracyOrientation(G)
return ((u,v,w) for u in G for v in G[u] for w in G[u] if w in G[v])
# ============================================================
# If run from command line, perform unit tests
# ============================================================
class DegeneracyTest(unittest.TestCase):
G = {1:[2,5],2:[1,3,5],3:[2,4],4:[3,5,6],5:[1,2,4],6:[4]} #File:6n-graf.svg
def testDegeneracy(self):
self.assertEqual(degeneracy(DegeneracyTest.G),2)
def testCore(self):
self.assertEqual(core(DegeneracyTest.G),{1,2,3,4,5})
def testTriangles(self):
T = list(triangles(DegeneracyTest.G))
self.assertEqual(len(T),1)
self.assertEqual(set(T[0]),{1,2,5})
if __name__ == "__main__":
unittest.main()
| 31.776316 | 80 | 0.578882 |
import unittest
from Graphs import isUndirected
from BucketQueue import BucketQueue
def degeneracySequence(G):
if not isUndirected(G):
raise TypeError("Graph must be undirected")
Q = BucketQueue()
for v in G:
Q[v] = len(G[v])
for v,d in Q.items():
yield v,d
for w in G[v]:
if w in Q:
Q[w] -= 1
def degeneracy(G):
return max(d for v,d in degeneracySequence(G))
def degeneracyOrientation(G):
D = {}
for v,d in degeneracySequence(G):
D[v] = {w for w in G[v] if w not in D}
return D
def core(G,k=None):
level = 0
coreset = set()
for v,d in degeneracySequence(G):
if d > level:
if k == None or level < k:
coreset = set()
level = d
coreset.add(v)
return coreset
def triangles(G):
G = degeneracyOrientation(G)
return ((u,v,w) for u in G for v in G[u] for w in G[u] if w in G[v])
class DegeneracyTest(unittest.TestCase):
G = {1:[2,5],2:[1,3,5],3:[2,4],4:[3,5,6],5:[1,2,4],6:[4]}
def testDegeneracy(self):
self.assertEqual(degeneracy(DegeneracyTest.G),2)
def testCore(self):
self.assertEqual(core(DegeneracyTest.G),{1,2,3,4,5})
def testTriangles(self):
T = list(triangles(DegeneracyTest.G))
self.assertEqual(len(T),1)
self.assertEqual(set(T[0]),{1,2,5})
if __name__ == "__main__":
unittest.main()
| true | true |
f71e00f3db6a72d74385d12d183fbf8fd6b0a6d2 | 816 | py | Python | spark_auto_mapper_fhir/value_sets/v2_0178.py | imranq2/SparkAutoMapper.FHIR | dd23b218fb0097d1edc2f3e688e8d6d4d7278bd2 | [
"Apache-2.0"
] | 1 | 2020-10-31T23:25:07.000Z | 2020-10-31T23:25:07.000Z | spark_auto_mapper_fhir/value_sets/v2_0178.py | icanbwell/SparkAutoMapper.FHIR | 98f368e781b46523142c7cb513c670d659a93c9b | [
"Apache-2.0"
] | null | null | null | spark_auto_mapper_fhir/value_sets/v2_0178.py | icanbwell/SparkAutoMapper.FHIR | 98f368e781b46523142c7cb513c670d659a93c9b | [
"Apache-2.0"
] | null | null | null | from __future__ import annotations
from spark_auto_mapper_fhir.fhir_types.uri import FhirUri
from spark_auto_mapper_fhir.value_sets.generic_type import GenericTypeCode
from spark_auto_mapper.type_definitions.defined_types import AutoMapperTextInputType
# This file is auto-generated by generate_classes so do not edit manually
# noinspection PyPep8Naming
class V2_0178(GenericTypeCode):
"""
v2.0178
From: http://terminology.hl7.org/ValueSet/v2-0178 in v2-tables.xml
FHIR Value set/code system definition for HL7 v2 table 0178 ( FILE-LEVEL EVENT
CODE)
"""
def __init__(self, value: AutoMapperTextInputType):
super().__init__(value=value)
"""
http://terminology.hl7.org/ValueSet/v2-0178
"""
codeset: FhirUri = "http://terminology.hl7.org/ValueSet/v2-0178"
| 31.384615 | 86 | 0.756127 | from __future__ import annotations
from spark_auto_mapper_fhir.fhir_types.uri import FhirUri
from spark_auto_mapper_fhir.value_sets.generic_type import GenericTypeCode
from spark_auto_mapper.type_definitions.defined_types import AutoMapperTextInputType
class V2_0178(GenericTypeCode):
def __init__(self, value: AutoMapperTextInputType):
super().__init__(value=value)
codeset: FhirUri = "http://terminology.hl7.org/ValueSet/v2-0178"
| true | true |
f71e010012f044722955c0170d63a67b154875fd | 4,401 | py | Python | hummingbot/client/config/config_validators.py | withshubh/hummingbot | 8af7b6b6a4d482c8418f293f1ebd4d0d6a86eff5 | [
"Apache-2.0"
] | 37 | 2020-07-08T03:44:26.000Z | 2022-01-16T12:35:26.000Z | hummingbot/client/config/config_validators.py | svamol/hummingbot | 03c6ab488cca620f622c432a5a09a40cd854e475 | [
"Apache-2.0"
] | 3 | 2021-04-13T10:40:05.000Z | 2021-05-09T16:15:42.000Z | hummingbot/client/config/config_validators.py | svamol/hummingbot | 03c6ab488cca620f622c432a5a09a40cd854e475 | [
"Apache-2.0"
] | 17 | 2021-04-07T21:29:46.000Z | 2022-02-03T02:01:04.000Z | from decimal import Decimal
from typing import Optional
# Validators
def validate_exchange(value: str) -> Optional[str]:
from hummingbot.client.settings import EXCHANGES
if value not in EXCHANGES:
return f"Invalid exchange, please choose value from {EXCHANGES}"
def validate_derivative(value: str) -> Optional[str]:
from hummingbot.client.settings import DERIVATIVES
if value not in DERIVATIVES:
return f"Invalid derivative, please choose value from {DERIVATIVES}"
def validate_connector(value: str) -> Optional[str]:
from hummingbot.client.settings import CONNECTOR_SETTINGS
if value not in CONNECTOR_SETTINGS:
return f"Invalid connector, please choose value from {CONNECTOR_SETTINGS.keys()}"
def validate_strategy(value: str) -> Optional[str]:
from hummingbot.client.settings import STRATEGIES
if value not in STRATEGIES:
return f"Invalid strategy, please choose value from {STRATEGIES}"
def validate_decimal(value: str, min_value: Decimal = None, max_value: Decimal = None, inclusive=True) -> Optional[str]:
try:
decimal_value = Decimal(value)
except Exception:
return f"{value} is not in decimal format."
if inclusive:
if min_value is not None and max_value is not None:
if not (Decimal(str(min_value)) <= decimal_value <= Decimal(str(max_value))):
return f"Value must be between {min_value} and {max_value}."
elif min_value is not None and not decimal_value >= Decimal(str(min_value)):
return f"Value cannot be less than {min_value}."
elif max_value is not None and not decimal_value <= Decimal(str(max_value)):
return f"Value cannot be more than {max_value}."
else:
if min_value is not None and max_value is not None:
if not (Decimal(str(min_value)) < decimal_value < Decimal(str(max_value))):
return f"Value must be between {min_value} and {max_value} (exclusive)."
elif min_value is not None and not decimal_value > Decimal(str(min_value)):
return f"Value must be more than {min_value}."
elif max_value is not None and not decimal_value < Decimal(str(max_value)):
return f"Value must be less than {max_value}."
def validate_market_trading_pair(market: str, value: str) -> Optional[str]:
# Since trading pair validation and autocomplete are UI optimizations that do not impact bot performances,
# in case of network issues or slow wifi, this check returns true and does not prevent users from proceeding,
from hummingbot.core.utils.trading_pair_fetcher import TradingPairFetcher
trading_pair_fetcher: TradingPairFetcher = TradingPairFetcher.get_instance()
if trading_pair_fetcher.ready:
trading_pairs = trading_pair_fetcher.trading_pairs.get(market, [])
if len(trading_pairs) == 0:
return None
elif value not in trading_pairs:
return f"{value} is not an active market on {market}."
def validate_bool(value: str) -> Optional[str]:
valid_values = ('true', 'yes', 'y', 'false', 'no', 'n')
if value.lower() not in valid_values:
return f"Invalid value, please choose value from {valid_values}"
def validate_int(value: str, min_value: int = None, max_value: int = None, inclusive=True) -> Optional[str]:
try:
int_value = int(value)
except Exception:
return f"{value} is not in integer format."
if inclusive:
if min_value is not None and max_value is not None:
if not (min_value <= int_value <= max_value):
return f"Value must be between {min_value} and {max_value}."
elif min_value is not None and not int_value >= min_value:
return f"Value cannot be less than {min_value}."
elif max_value is not None and not int_value <= max_value:
return f"Value cannot be more than {max_value}."
else:
if min_value is not None and max_value is not None:
if not (min_value < int_value < max_value):
return f"Value must be between {min_value} and {max_value} (exclusive)."
elif min_value is not None and not int_value > min_value:
return f"Value must be more than {min_value}."
elif max_value is not None and not int_value < max_value:
return f"Value must be less than {max_value}."
| 47.322581 | 120 | 0.684163 | from decimal import Decimal
from typing import Optional
def validate_exchange(value: str) -> Optional[str]:
from hummingbot.client.settings import EXCHANGES
if value not in EXCHANGES:
return f"Invalid exchange, please choose value from {EXCHANGES}"
def validate_derivative(value: str) -> Optional[str]:
from hummingbot.client.settings import DERIVATIVES
if value not in DERIVATIVES:
return f"Invalid derivative, please choose value from {DERIVATIVES}"
def validate_connector(value: str) -> Optional[str]:
from hummingbot.client.settings import CONNECTOR_SETTINGS
if value not in CONNECTOR_SETTINGS:
return f"Invalid connector, please choose value from {CONNECTOR_SETTINGS.keys()}"
def validate_strategy(value: str) -> Optional[str]:
from hummingbot.client.settings import STRATEGIES
if value not in STRATEGIES:
return f"Invalid strategy, please choose value from {STRATEGIES}"
def validate_decimal(value: str, min_value: Decimal = None, max_value: Decimal = None, inclusive=True) -> Optional[str]:
try:
decimal_value = Decimal(value)
except Exception:
return f"{value} is not in decimal format."
if inclusive:
if min_value is not None and max_value is not None:
if not (Decimal(str(min_value)) <= decimal_value <= Decimal(str(max_value))):
return f"Value must be between {min_value} and {max_value}."
elif min_value is not None and not decimal_value >= Decimal(str(min_value)):
return f"Value cannot be less than {min_value}."
elif max_value is not None and not decimal_value <= Decimal(str(max_value)):
return f"Value cannot be more than {max_value}."
else:
if min_value is not None and max_value is not None:
if not (Decimal(str(min_value)) < decimal_value < Decimal(str(max_value))):
return f"Value must be between {min_value} and {max_value} (exclusive)."
elif min_value is not None and not decimal_value > Decimal(str(min_value)):
return f"Value must be more than {min_value}."
elif max_value is not None and not decimal_value < Decimal(str(max_value)):
return f"Value must be less than {max_value}."
def validate_market_trading_pair(market: str, value: str) -> Optional[str]:
from hummingbot.core.utils.trading_pair_fetcher import TradingPairFetcher
trading_pair_fetcher: TradingPairFetcher = TradingPairFetcher.get_instance()
if trading_pair_fetcher.ready:
trading_pairs = trading_pair_fetcher.trading_pairs.get(market, [])
if len(trading_pairs) == 0:
return None
elif value not in trading_pairs:
return f"{value} is not an active market on {market}."
def validate_bool(value: str) -> Optional[str]:
valid_values = ('true', 'yes', 'y', 'false', 'no', 'n')
if value.lower() not in valid_values:
return f"Invalid value, please choose value from {valid_values}"
def validate_int(value: str, min_value: int = None, max_value: int = None, inclusive=True) -> Optional[str]:
try:
int_value = int(value)
except Exception:
return f"{value} is not in integer format."
if inclusive:
if min_value is not None and max_value is not None:
if not (min_value <= int_value <= max_value):
return f"Value must be between {min_value} and {max_value}."
elif min_value is not None and not int_value >= min_value:
return f"Value cannot be less than {min_value}."
elif max_value is not None and not int_value <= max_value:
return f"Value cannot be more than {max_value}."
else:
if min_value is not None and max_value is not None:
if not (min_value < int_value < max_value):
return f"Value must be between {min_value} and {max_value} (exclusive)."
elif min_value is not None and not int_value > min_value:
return f"Value must be more than {min_value}."
elif max_value is not None and not int_value < max_value:
return f"Value must be less than {max_value}."
| true | true |
f71e014b970b1ad7cd48e6ece7ed389083c5749d | 1,568 | py | Python | crypt0/base/hash.py | Crypto0/Crypt0Lib-V1 | 9ea367cd4467222542a88c45c0a92be78158cceb | [
"MIT"
] | 2 | 2018-10-09T18:10:23.000Z | 2018-10-09T18:56:21.000Z | crypt0/base/hash.py | Crypto0/Crypt0Lib-V1 | 9ea367cd4467222542a88c45c0a92be78158cceb | [
"MIT"
] | null | null | null | crypt0/base/hash.py | Crypto0/Crypt0Lib-V1 | 9ea367cd4467222542a88c45c0a92be78158cceb | [
"MIT"
] | null | null | null | import hashlib,time,sys
def hash(hashType,userHash,wordlist):
if hashType == "md5" or hashType == "MD5":
h = hashlib.md5
elif hashType == "sha1" or hashType == "SHA1":
h = hashlib.sha1
elif hashType == "sha224" or hashType == "SHA224":
h = hashlib.sha224
elif hashType == "sha256" or hashType == "SHA256":
h = hashlib.sha256
elif hashType == "sha384" or hashType == "SHA384":
h = hashlib.sha384
elif hashType == "sha512" or hashType == "SHA512":
h = hashlib.sha512
else:
return "Sorry!, We cant BruteForce %s , Please make sure you entered HashType correctly." % hashType
exit()
verbose = True
start = time.time()
with open(wordlist, "rU") as infile:
for line in infile:
line = line.strip()
lineHash = h(line).hexdigest()
if (verbose == True):
sys.stdout.write('\r' + str(line) + ' ' * 20)
sys.stdout.flush()
if (str(lineHash) == str(userHash.lower())):
end = time.time()
return "\n\n[+] HASH IS CRACKED SUCCESSFUL : [ %s ]" % line
return "[*] Time Taken: %s seconds" % round((end - start), 2)
exit()
end = time.time()
return "\n\n[-]Cracking Failed"
return "[*]Reached end of wordlist"
return "[*] Time Taken: %s seconds" % round((end - start), 2)
exit()
| 42.378378 | 112 | 0.49426 | import hashlib,time,sys
def hash(hashType,userHash,wordlist):
if hashType == "md5" or hashType == "MD5":
h = hashlib.md5
elif hashType == "sha1" or hashType == "SHA1":
h = hashlib.sha1
elif hashType == "sha224" or hashType == "SHA224":
h = hashlib.sha224
elif hashType == "sha256" or hashType == "SHA256":
h = hashlib.sha256
elif hashType == "sha384" or hashType == "SHA384":
h = hashlib.sha384
elif hashType == "sha512" or hashType == "SHA512":
h = hashlib.sha512
else:
return "Sorry!, We cant BruteForce %s , Please make sure you entered HashType correctly." % hashType
exit()
verbose = True
start = time.time()
with open(wordlist, "rU") as infile:
for line in infile:
line = line.strip()
lineHash = h(line).hexdigest()
if (verbose == True):
sys.stdout.write('\r' + str(line) + ' ' * 20)
sys.stdout.flush()
if (str(lineHash) == str(userHash.lower())):
end = time.time()
return "\n\n[+] HASH IS CRACKED SUCCESSFUL : [ %s ]" % line
return "[*] Time Taken: %s seconds" % round((end - start), 2)
exit()
end = time.time()
return "\n\n[-]Cracking Failed"
return "[*]Reached end of wordlist"
return "[*] Time Taken: %s seconds" % round((end - start), 2)
exit()
| true | true |
f71e0156583dd0c3994a0e97c1ded6e7fadec7e7 | 6,964 | py | Python | anytask/courses/pythontask.py | hariton27sy/anytask | b734985ad7e8e884edbea3fab1a5cf5c86210b68 | [
"MIT"
] | null | null | null | anytask/courses/pythontask.py | hariton27sy/anytask | b734985ad7e8e884edbea3fab1a5cf5c86210b68 | [
"MIT"
] | null | null | null | anytask/courses/pythontask.py | hariton27sy/anytask | b734985ad7e8e884edbea3fab1a5cf5c86210b68 | [
"MIT"
] | 1 | 2020-05-07T15:51:21.000Z | 2020-05-07T15:51:21.000Z | from tasks.models import Task, TaskTaken
from issues.models import Issue
from django.conf import settings
from django.db.models import Q
from django.db import transaction
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
import datetime
class PythonTaskStat(object):
def __init__(self, course_tasks):
self.tasks = course_tasks
self.group_stat = {}
self.course_stat = {
'total': 0.0,
'active_students': 0,
'avg_score': 0.0,
}
def update(self, group):
self._group_update(group)
self._course_update(group)
def get_group_stat(self):
return [(group, stat['student_stat']) for (group, stat) in self.group_stat.iteritems()]
def get_course_stat(self):
stat = [
(group, stat['total'], stat['active_students'], stat['avg_score'])
for (group, stat) in self.group_stat.iteritems()
]
stat.append(
(None, self.course_stat['total'], self.course_stat['active_students'], self.course_stat['avg_score'])
)
return stat
def _student_stat(self, tasks):
total = 0.0
tasks_list = []
for task in tasks:
total += task.score
tasks_list.append((task.task, task.score))
return (total, tasks_list)
def _group_update(self, group):
stat = {
'total': 0.0,
'active_students': 0,
'avg_score': 0.0,
'student_stat': [],
}
group_students = []
for student in group.students.filter(is_active=True).order_by('last_name', 'first_name'):
tasks = TaskTaken.objects.filter(user=student).filter(task__in=self.tasks) \
.filter(Q(Q(status=TaskTaken.STATUS_TAKEN) | Q(status=TaskTaken.STATUS_SCORED)))
if tasks.count() > 0:
stat['active_students'] += 1
scores, student_tasks = self._student_stat(tasks)
group_students.append((student, scores, student_tasks))
stat['total'] += scores
stat['student_stat'] = group_students
if stat['active_students'] > 0:
stat['avg_score'] = stat['total'] / stat['active_students']
self.group_stat[group] = stat
def _course_update(self, group):
stat = self.group_stat[group]
self.course_stat['total'] += stat['total']
self.course_stat['active_students'] += stat['active_students']
if self.course_stat['active_students'] > 0:
self.course_stat['avg_score'] = self.course_stat['total'] / self.course_stat['active_students']
else:
self.course_stat['avg_score'] = 0.0
def tasks_list(request, course):
user = request.user
course.can_edit = course.user_can_edit_course(user)
delta = datetime.timedelta(days=settings.PYTHONTASK_MAX_DAYS_WITHOUT_SCORES)
task_and_task_taken = []
for task in Task.objects.filter(course=course).filter(parent_task=None).order_by('title'):
task.add_user_properties(user)
if task.task_text is None:
task.task_text = ''
task_taken_list = []
for task_taken in TaskTaken.objects.filter(task=task).exclude(task__is_hidden=True).filter(
Q(Q(status=TaskTaken.STATUS_TAKEN) | Q(status=TaskTaken.STATUS_SCORED))):
if settings.PYTHONTASK_MAX_DAYS_WITHOUT_SCORES and task_taken.status == TaskTaken.STATUS_TAKEN:
task_taken.cancel_date = task_taken.taken_time + delta
task_taken_list.append(task_taken)
if task.has_subtasks():
subtask_and_task_takens = []
for subtask in Task.objects.filter(parent_task=task).order_by('title'):
subtask.add_user_properties(user)
if subtask.task_text is None:
subtask.task_text = ''
subtask_takens = list(TaskTaken.objects.filter(task=subtask).exclude(task__is_hidden=True).exclude(
task__parent_task__is_hidden=True).filter(
Q(Q(status=TaskTaken.STATUS_TAKEN) | Q(status=TaskTaken.STATUS_SCORED))))
if settings.PYTHONTASK_MAX_DAYS_WITHOUT_SCORES:
for subtask_taken in filter(lambda x: x.status == TaskTaken.STATUS_TAKEN, subtask_takens):
subtask_taken.cancel_date = subtask_taken.taken_time + delta
subtask_and_task_takens.append((subtask, subtask_takens))
task_and_task_taken.append((task, subtask_and_task_takens))
else:
task_and_task_taken.append((task, task_taken_list))
context = {
'course': course,
'user': user,
'tasks_taken': task_and_task_taken,
'user_is_teacher': course.user_is_teacher(user),
'STATUS_TAKEN': TaskTaken.STATUS_TAKEN,
'STATUS_SCORED': TaskTaken.STATUS_SCORED,
}
return render_to_response('course_tasks_potok.html', context, context_instance=RequestContext(request))
def python_stat(request, course):
tasks = Task.objects.filter(course=course)
stat = PythonTaskStat(tasks)
for group in course.groups.all().order_by('name'):
stat.update(group)
context = {
'course': course,
'group_stat': stat.get_group_stat(),
'course_stat': stat.get_course_stat()
}
return render_to_response('statistics.html', context, context_instance=RequestContext(request))
@login_required
@transaction.commit_on_success
def get_task(request, course_id, task_id):
user = request.user
task = get_object_or_404(Task, id=task_id)
user_can_take_task, reason = task.user_can_take_task(user)
if user_can_take_task:
task_taken, created = TaskTaken.objects.get_or_create(user=user, task=task)
task_taken.take()
if not task_taken.issue:
issue, created = Issue.objects.get_or_create(task=task, student=user)
task_taken.issue = issue
task_taken.save()
task_taken.issue.add_comment(unicode(_("zapisalsya_na_task")))
return redirect('courses.views.course_page', course_id=course_id)
@login_required
def cancel_task(request, course_id, task_id):
user = request.user
task = get_object_or_404(Task, id=task_id)
if task.user_can_cancel_task(user):
task_taken = get_object_or_404(TaskTaken, user=user, task=task)
task_taken.cancel()
if not task_taken.issue:
issue, created = Issue.objects.get_or_create(task=task, student=user)
task_taken.issue = issue
task_taken.save()
task_taken.issue.add_comment(u"{} {} {}".format(user.first_name, user.last_name, _("otkazalsya_ot_taska")))
return redirect('courses.views.course_page', course_id=course_id)
| 34.994975 | 115 | 0.652068 | from tasks.models import Task, TaskTaken
from issues.models import Issue
from django.conf import settings
from django.db.models import Q
from django.db import transaction
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
import datetime
class PythonTaskStat(object):
def __init__(self, course_tasks):
self.tasks = course_tasks
self.group_stat = {}
self.course_stat = {
'total': 0.0,
'active_students': 0,
'avg_score': 0.0,
}
def update(self, group):
self._group_update(group)
self._course_update(group)
def get_group_stat(self):
return [(group, stat['student_stat']) for (group, stat) in self.group_stat.iteritems()]
def get_course_stat(self):
stat = [
(group, stat['total'], stat['active_students'], stat['avg_score'])
for (group, stat) in self.group_stat.iteritems()
]
stat.append(
(None, self.course_stat['total'], self.course_stat['active_students'], self.course_stat['avg_score'])
)
return stat
def _student_stat(self, tasks):
total = 0.0
tasks_list = []
for task in tasks:
total += task.score
tasks_list.append((task.task, task.score))
return (total, tasks_list)
def _group_update(self, group):
stat = {
'total': 0.0,
'active_students': 0,
'avg_score': 0.0,
'student_stat': [],
}
group_students = []
for student in group.students.filter(is_active=True).order_by('last_name', 'first_name'):
tasks = TaskTaken.objects.filter(user=student).filter(task__in=self.tasks) \
.filter(Q(Q(status=TaskTaken.STATUS_TAKEN) | Q(status=TaskTaken.STATUS_SCORED)))
if tasks.count() > 0:
stat['active_students'] += 1
scores, student_tasks = self._student_stat(tasks)
group_students.append((student, scores, student_tasks))
stat['total'] += scores
stat['student_stat'] = group_students
if stat['active_students'] > 0:
stat['avg_score'] = stat['total'] / stat['active_students']
self.group_stat[group] = stat
def _course_update(self, group):
stat = self.group_stat[group]
self.course_stat['total'] += stat['total']
self.course_stat['active_students'] += stat['active_students']
if self.course_stat['active_students'] > 0:
self.course_stat['avg_score'] = self.course_stat['total'] / self.course_stat['active_students']
else:
self.course_stat['avg_score'] = 0.0
def tasks_list(request, course):
user = request.user
course.can_edit = course.user_can_edit_course(user)
delta = datetime.timedelta(days=settings.PYTHONTASK_MAX_DAYS_WITHOUT_SCORES)
task_and_task_taken = []
for task in Task.objects.filter(course=course).filter(parent_task=None).order_by('title'):
task.add_user_properties(user)
if task.task_text is None:
task.task_text = ''
task_taken_list = []
for task_taken in TaskTaken.objects.filter(task=task).exclude(task__is_hidden=True).filter(
Q(Q(status=TaskTaken.STATUS_TAKEN) | Q(status=TaskTaken.STATUS_SCORED))):
if settings.PYTHONTASK_MAX_DAYS_WITHOUT_SCORES and task_taken.status == TaskTaken.STATUS_TAKEN:
task_taken.cancel_date = task_taken.taken_time + delta
task_taken_list.append(task_taken)
if task.has_subtasks():
subtask_and_task_takens = []
for subtask in Task.objects.filter(parent_task=task).order_by('title'):
subtask.add_user_properties(user)
if subtask.task_text is None:
subtask.task_text = ''
subtask_takens = list(TaskTaken.objects.filter(task=subtask).exclude(task__is_hidden=True).exclude(
task__parent_task__is_hidden=True).filter(
Q(Q(status=TaskTaken.STATUS_TAKEN) | Q(status=TaskTaken.STATUS_SCORED))))
if settings.PYTHONTASK_MAX_DAYS_WITHOUT_SCORES:
for subtask_taken in filter(lambda x: x.status == TaskTaken.STATUS_TAKEN, subtask_takens):
subtask_taken.cancel_date = subtask_taken.taken_time + delta
subtask_and_task_takens.append((subtask, subtask_takens))
task_and_task_taken.append((task, subtask_and_task_takens))
else:
task_and_task_taken.append((task, task_taken_list))
context = {
'course': course,
'user': user,
'tasks_taken': task_and_task_taken,
'user_is_teacher': course.user_is_teacher(user),
'STATUS_TAKEN': TaskTaken.STATUS_TAKEN,
'STATUS_SCORED': TaskTaken.STATUS_SCORED,
}
return render_to_response('course_tasks_potok.html', context, context_instance=RequestContext(request))
def python_stat(request, course):
tasks = Task.objects.filter(course=course)
stat = PythonTaskStat(tasks)
for group in course.groups.all().order_by('name'):
stat.update(group)
context = {
'course': course,
'group_stat': stat.get_group_stat(),
'course_stat': stat.get_course_stat()
}
return render_to_response('statistics.html', context, context_instance=RequestContext(request))
@login_required
@transaction.commit_on_success
def get_task(request, course_id, task_id):
user = request.user
task = get_object_or_404(Task, id=task_id)
user_can_take_task, reason = task.user_can_take_task(user)
if user_can_take_task:
task_taken, created = TaskTaken.objects.get_or_create(user=user, task=task)
task_taken.take()
if not task_taken.issue:
issue, created = Issue.objects.get_or_create(task=task, student=user)
task_taken.issue = issue
task_taken.save()
task_taken.issue.add_comment(unicode(_("zapisalsya_na_task")))
return redirect('courses.views.course_page', course_id=course_id)
@login_required
def cancel_task(request, course_id, task_id):
user = request.user
task = get_object_or_404(Task, id=task_id)
if task.user_can_cancel_task(user):
task_taken = get_object_or_404(TaskTaken, user=user, task=task)
task_taken.cancel()
if not task_taken.issue:
issue, created = Issue.objects.get_or_create(task=task, student=user)
task_taken.issue = issue
task_taken.save()
task_taken.issue.add_comment(u"{} {} {}".format(user.first_name, user.last_name, _("otkazalsya_ot_taska")))
return redirect('courses.views.course_page', course_id=course_id)
| true | true |
f71e028eb65130eb4b7081cc53d9c71b60ee649c | 1,342 | py | Python | app/core/tests/test_models.py | adelvanL/recipe-app-api | 3977f961786a4b677259bdf90bbb37281cbc43c0 | [
"MIT"
] | null | null | null | app/core/tests/test_models.py | adelvanL/recipe-app-api | 3977f961786a4b677259bdf90bbb37281cbc43c0 | [
"MIT"
] | null | null | null | app/core/tests/test_models.py | adelvanL/recipe-app-api | 3977f961786a4b677259bdf90bbb37281cbc43c0 | [
"MIT"
] | null | null | null | from django.test import TestCase
from django.contrib.auth import get_user_model
class ModelTests(TestCase):
def test_create_user_with_email_successful(self):
"""Test creating a new user with an email is sucessful"""
email = "test@respposta.com"
password = "Test@123"
user = get_user_model().objects.create_user(
email=email,
password=password
)
# testa o e-mail
self.assertEqual(user.email, email)
# testa a senha
self.assertTrue(user.check_password(password))
def test_new_user_email_normalized(self):
"""Test the email for a new user is normalized"""
email = "test@RESPPOSTA.COM"
user = get_user_model().objects.create_user(email, "Test@123")
self.assertEqual(user.email, email.lower())
def test_new_user_invalid_email(self):
"""Test creating user with no email raises error"""
with self.assertRaises(ValueError):
get_user_model().objects.create_user(None, "Test@123")
def test_create_new_superuser(self):
"""Test creating a new superuser"""
user = get_user_model().objects.create_superuser(
"test@respposta.com",
"Test@123"
)
self.assertTrue(user.is_superuser)
self.assertTrue(user.is_staff)
| 30.5 | 70 | 0.64456 | from django.test import TestCase
from django.contrib.auth import get_user_model
class ModelTests(TestCase):
def test_create_user_with_email_successful(self):
email = "test@respposta.com"
password = "Test@123"
user = get_user_model().objects.create_user(
email=email,
password=password
)
self.assertEqual(user.email, email)
self.assertTrue(user.check_password(password))
def test_new_user_email_normalized(self):
email = "test@RESPPOSTA.COM"
user = get_user_model().objects.create_user(email, "Test@123")
self.assertEqual(user.email, email.lower())
def test_new_user_invalid_email(self):
with self.assertRaises(ValueError):
get_user_model().objects.create_user(None, "Test@123")
def test_create_new_superuser(self):
user = get_user_model().objects.create_superuser(
"test@respposta.com",
"Test@123"
)
self.assertTrue(user.is_superuser)
self.assertTrue(user.is_staff)
| true | true |
f71e032f516a353dced53594bb69eb1b3b7aaa53 | 8,685 | py | Python | chemdataextractor/scrape/clean.py | materialsintelligence/deprecated-cde-selfcontained-nodawg | f899d64c7e09be1b56cd1d90ba34b86b4270e78a | [
"MIT"
] | 199 | 2016-10-07T06:55:23.000Z | 2022-03-29T09:50:03.000Z | chemdataextractor/scrape/clean.py | qingtong00/ChemDataExtractor | 349a3bea965f2073141d62043b89319222e46af1 | [
"MIT"
] | 29 | 2016-10-04T08:56:05.000Z | 2022-03-06T19:36:55.000Z | chemdataextractor/scrape/clean.py | qingtong00/ChemDataExtractor | 349a3bea965f2073141d62043b89319222e46af1 | [
"MIT"
] | 95 | 2016-10-10T14:24:27.000Z | 2022-03-16T18:30:00.000Z | # -*- coding: utf-8 -*-
"""
chemdataextractor.scrape.clean
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tools for cleaning up XML/HTML by removing tags entirely or replacing with their contents.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import copy
import logging
import re
from lxml.etree import fromstring, tostring
from lxml.html import fromstring as html_fromstring
import six
from . import BLOCK_ELEMENTS
log = logging.getLogger(__name__)
class Cleaner(object):
"""Clean HTML or XML by removing tags completely or replacing with their contents.
A Cleaner instance provides a ``clean_markup`` method::
cleaner = Cleaner()
htmlstring = '<html><body><script>alert("test")</script><p>Some text</p></body></html>'
print(cleaner.clean_markup(htmlstring))
A Cleaner instance is also a callable that can be applied to lxml document trees::
tree = lxml.etree.fromstring(htmlstring)
cleaner(tree)
print(lxml.etree.tostring(tree))
Elements that are matched by ``kill_xpath`` are removed entirely, along with their contents. By default,
``kill_xpath`` matches all script and style tags, as well as comments and processing instructions.
Elements that are matched by ``strip_xpath`` are replaced with their contents. By default, no elements are stripped.
A common use-case is to set ``strip_xpath`` to ``.//*``, which specifies that all elements should be stripped.
Elements that are matched by ``allow_xpath`` are excepted from stripping, even if they are also matched by
``strip_xpath``. This is useful when setting ``strip_xpath`` to strip all tags, allowing a few expections to be
specified by ``allow_xpath``.
"""
kill_xpath = './/script | .//style | .//comment() | .//processing-instruction() | .//*[@style="display:none;"]'
strip_xpath = None
allow_xpath = None
fix_whitespace = True
namespaces = {
're': 'http://exslt.org/regular-expressions',
'set': 'http://exslt.org/sets',
'dc': 'http://purl.org/dc/elements/1.1/',
'prism': 'http://prismstandard.org/namespaces/basic/2.0/',
'xml': 'http://www.w3.org/XML/1998/namespace',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns',
}
def __init__(self, **kwargs):
"""Behaviour can be customized by overriding attributes in a subclass or setting them in the constructor.
:param string kill_xpath: XPath expression for tags to remove along with their contents.
:param string strip_xpath: XPath expression for tags to replace with their contents.
:param string allow_xpath: XPath expression for tags to except from strip_xpath.
:param bool fix_whitespace: Normalize whitespace to a single space and ensure newlines around block elements.
:param dict namespaces: Namespace prefixes to register for the XPaths.
"""
# TODO: This is weird. Why don't we change to proper individual keyword arguments with class attribs as default
for name, value in kwargs.items():
if not hasattr(self, name):
raise TypeError('Unknown parameter: %s=%r' % (name, value))
setattr(self, name, value)
def __call__(self, doc):
"""Clean the document."""
if hasattr(doc, 'getroot'):
doc = doc.getroot()
if self.fix_whitespace:
# Ensure newlines around block elements
for el in doc.iterdescendants():
if el.tag in BLOCK_ELEMENTS:
el.tail = (el.tail or '') + '\n'
previous = el.getprevious()
parent = el.getparent()
if previous is None:
parent.text = (parent.text or '') + '\n'
else:
previous.tail = (previous.tail or '') + '\n'
# Remove elements that match kill_xpath
if self.kill_xpath:
for el in doc.xpath(self.kill_xpath, namespaces=self.namespaces):
#log.debug('Killing: %s' % tostring(el))
parent = el.getparent()
# We can't kill the root element!
if parent is None:
continue
if el.tail:
previous = el.getprevious()
if previous is None:
parent.text = (parent.text or '') + el.tail
else:
previous.tail = (previous.tail or '') + el.tail
parent.remove(el)
# Collect all the allowed elements
to_keep = [el for el in doc.xpath(self.allow_xpath, namespaces=self.namespaces)] if self.allow_xpath else []
# Replace elements that match strip_xpath with their contents
if self.strip_xpath:
for el in doc.xpath(self.strip_xpath, namespaces=self.namespaces):
# Skip if allowed by allow_xpath
if el in to_keep:
continue
parent = el.getparent()
previous = el.getprevious()
# We can't strip the root element!
if parent is None:
continue
# Append the text to previous tail (or parent text if no previous), ensuring newline if block level
if el.text and isinstance(el.tag, six.string_types):
if previous is None:
parent.text = (parent.text or '') + el.text
else:
previous.tail = (previous.tail or '') + el.text
# Append the tail to last child tail, or previous tail, or parent text, ensuring newline if block level
if el.tail:
if len(el):
last = el[-1]
last.tail = (last.tail or '') + el.tail
elif previous is None:
parent.text = (parent.text or '') + el.tail
else:
previous.tail = (previous.tail or '') + el.tail
index = parent.index(el)
parent[index:index+1] = el[:]
# Collapse whitespace down to a single space or a single newline
if self.fix_whitespace:
for el in doc.iter():
if el.text is not None:
el.text = re.sub(r'\s*\n\s*', '\n', el.text)
el.text = re.sub(r'[ \t]+', ' ', el.text)
# el.text = re.sub(r'\s+', ' ', el.text)
if el.tail is not None:
el.tail = re.sub(r'\s*\n\s*', '\n', el.tail)
el.tail = re.sub(r'[ \t]+', ' ', el.tail)
# el.tail = re.sub(r'\s+', ' ', el.tail)
def clean_html(self, html):
"""Apply ``Cleaner`` to HTML string or document and return a cleaned string or document."""
result_type = type(html)
if isinstance(html, six.string_types):
doc = html_fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
if issubclass(result_type, six.binary_type):
return tostring(doc, encoding='utf-8')
elif issubclass(result_type, six.text_type):
return tostring(doc, encoding='unicode')
else:
return doc
def clean_markup(self, markup, parser=None):
"""Apply ``Cleaner`` to markup string or document and return a cleaned string or document."""
result_type = type(markup)
if isinstance(markup, six.string_types):
doc = fromstring(markup, parser=parser)
else:
doc = copy.deepcopy(markup)
self(doc)
if issubclass(result_type, six.binary_type):
return tostring(doc, encoding='utf-8')
elif issubclass(result_type, six.text_type):
return tostring(doc, encoding='unicode')
else:
return doc
#: A default Cleaner instance, which kills comments, processing instructions, script tags, style tags.
clean = Cleaner()
#: Convenience function for applying ``clean`` to a string.
clean_markup = clean.clean_markup
#: Convenience function for applying ``clean`` to a HTML string.
clean_html = clean.clean_html
#: A Cleaner instance that is configured to strip all tags, replacing them with their text contents.
strip = Cleaner(strip_xpath='.//*')
#: Convenience function for applying ``strip`` to a string.
strip_markup = strip.clean_markup
#: Convenience function for applying ``strip`` to a HTML string.
strip_html = strip.clean_html
| 41.956522 | 120 | 0.591019 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import copy
import logging
import re
from lxml.etree import fromstring, tostring
from lxml.html import fromstring as html_fromstring
import six
from . import BLOCK_ELEMENTS
log = logging.getLogger(__name__)
class Cleaner(object):
kill_xpath = './/script | .//style | .//comment() | .//processing-instruction() | .//*[@style="display:none;"]'
strip_xpath = None
allow_xpath = None
fix_whitespace = True
namespaces = {
're': 'http://exslt.org/regular-expressions',
'set': 'http://exslt.org/sets',
'dc': 'http://purl.org/dc/elements/1.1/',
'prism': 'http://prismstandard.org/namespaces/basic/2.0/',
'xml': 'http://www.w3.org/XML/1998/namespace',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns',
}
def __init__(self, **kwargs):
for name, value in kwargs.items():
if not hasattr(self, name):
raise TypeError('Unknown parameter: %s=%r' % (name, value))
setattr(self, name, value)
def __call__(self, doc):
if hasattr(doc, 'getroot'):
doc = doc.getroot()
if self.fix_whitespace:
# Ensure newlines around block elements
for el in doc.iterdescendants():
if el.tag in BLOCK_ELEMENTS:
el.tail = (el.tail or '') + '\n'
previous = el.getprevious()
parent = el.getparent()
if previous is None:
parent.text = (parent.text or '') + '\n'
else:
previous.tail = (previous.tail or '') + '\n'
# Remove elements that match kill_xpath
if self.kill_xpath:
for el in doc.xpath(self.kill_xpath, namespaces=self.namespaces):
#log.debug('Killing: %s' % tostring(el))
parent = el.getparent()
# We can't kill the root element!
if parent is None:
continue
if el.tail:
previous = el.getprevious()
if previous is None:
parent.text = (parent.text or '') + el.tail
else:
previous.tail = (previous.tail or '') + el.tail
parent.remove(el)
to_keep = [el for el in doc.xpath(self.allow_xpath, namespaces=self.namespaces)] if self.allow_xpath else []
if self.strip_xpath:
for el in doc.xpath(self.strip_xpath, namespaces=self.namespaces):
if el in to_keep:
continue
parent = el.getparent()
previous = el.getprevious()
if parent is None:
continue
# Append the text to previous tail (or parent text if no previous), ensuring newline if block level
if el.text and isinstance(el.tag, six.string_types):
if previous is None:
parent.text = (parent.text or '') + el.text
else:
previous.tail = (previous.tail or '') + el.text
# Append the tail to last child tail, or previous tail, or parent text, ensuring newline if block level
if el.tail:
if len(el):
last = el[-1]
last.tail = (last.tail or '') + el.tail
elif previous is None:
parent.text = (parent.text or '') + el.tail
else:
previous.tail = (previous.tail or '') + el.tail
index = parent.index(el)
parent[index:index+1] = el[:]
# Collapse whitespace down to a single space or a single newline
if self.fix_whitespace:
for el in doc.iter():
if el.text is not None:
el.text = re.sub(r'\s*\n\s*', '\n', el.text)
el.text = re.sub(r'[ \t]+', ' ', el.text)
# el.text = re.sub(r'\s+', ' ', el.text)
if el.tail is not None:
el.tail = re.sub(r'\s*\n\s*', '\n', el.tail)
el.tail = re.sub(r'[ \t]+', ' ', el.tail)
# el.tail = re.sub(r'\s+', ' ', el.tail)
def clean_html(self, html):
result_type = type(html)
if isinstance(html, six.string_types):
doc = html_fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
if issubclass(result_type, six.binary_type):
return tostring(doc, encoding='utf-8')
elif issubclass(result_type, six.text_type):
return tostring(doc, encoding='unicode')
else:
return doc
def clean_markup(self, markup, parser=None):
result_type = type(markup)
if isinstance(markup, six.string_types):
doc = fromstring(markup, parser=parser)
else:
doc = copy.deepcopy(markup)
self(doc)
if issubclass(result_type, six.binary_type):
return tostring(doc, encoding='utf-8')
elif issubclass(result_type, six.text_type):
return tostring(doc, encoding='unicode')
else:
return doc
#: A default Cleaner instance, which kills comments, processing instructions, script tags, style tags.
clean = Cleaner()
#: Convenience function for applying ``clean`` to a string.
clean_markup = clean.clean_markup
#: Convenience function for applying ``clean`` to a HTML string.
clean_html = clean.clean_html
#: A Cleaner instance that is configured to strip all tags, replacing them with their text contents.
strip = Cleaner(strip_xpath='.//*')
#: Convenience function for applying ``strip`` to a string.
strip_markup = strip.clean_markup
#: Convenience function for applying ``strip`` to a HTML string.
strip_html = strip.clean_html
| true | true |
f71e041b4adf8e3e90f5927ee502d16b93e75667 | 2,197 | py | Python | private/scripts/extras/parallel.py | earora97/stopstalk-deployment | 777c3e622bf1efdc1ba3b4b43f70008d48ec71aa | [
"MIT"
] | 342 | 2016-01-05T21:22:15.000Z | 2022-03-28T20:05:45.000Z | private/scripts/extras/parallel.py | earora97/stopstalk-deployment | 777c3e622bf1efdc1ba3b4b43f70008d48ec71aa | [
"MIT"
] | 374 | 2015-12-25T05:38:28.000Z | 2022-03-03T05:03:36.000Z | private/scripts/extras/parallel.py | earora97/stopstalk-deployment | 777c3e622bf1efdc1ba3b4b43f70008d48ec71aa | [
"MIT"
] | 131 | 2016-03-30T09:13:35.000Z | 2022-01-24T10:30:18.000Z | """
Copyright (c) 2015-2020 Raj Patel(raj454raj@gmail.com), StopStalk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
PROXY = {"http": "http://proxy.iiit.ac.in:8080/",
"https": "http://proxy.iiit.ac.in:8080/"}
user_agent = "Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5"
import requests
import gevent
from gevent import queue, monkey
from gevent.pool import Group
monkey.patch_all(thread=False)
count = 1
def fetch(pid):
global count
while True:
tmp = requests.get("https://www.codechef.com/recent/user?user_handle=tryingtocode&page=4",
headers={"User-Agent": user_agent},
proxies=PROXY)
count += 1
if tmp.status_code == 200:
break
print tmp
def synchronous():
for i in range(1,100):
fetch(i)
def asynchronous():
threads = []
for i in xrange(100):
threads.append(gevent.spawn(fetch, i))
gevent.joinall(threads)
print('Synchronous:')
synchronous()
print('Asynchronous:')
asynchronous()
print count
| 34.873016 | 163 | 0.695949 | """
Copyright (c) 2015-2020 Raj Patel(raj454raj@gmail.com), StopStalk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
PROXY = {"http": "http://proxy.iiit.ac.in:8080/",
"https": "http://proxy.iiit.ac.in:8080/"}
user_agent = "Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5"
import requests
import gevent
from gevent import queue, monkey
from gevent.pool import Group
monkey.patch_all(thread=False)
count = 1
def fetch(pid):
global count
while True:
tmp = requests.get("https://www.codechef.com/recent/user?user_handle=tryingtocode&page=4",
headers={"User-Agent": user_agent},
proxies=PROXY)
count += 1
if tmp.status_code == 200:
break
print tmp
def synchronous():
for i in range(1,100):
fetch(i)
def asynchronous():
threads = []
for i in xrange(100):
threads.append(gevent.spawn(fetch, i))
gevent.joinall(threads)
print('Synchronous:')
synchronous()
print('Asynchronous:')
asynchronous()
print count
| false | true |
f71e045da49e0516e6f92e87647ff790d1d8f98a | 4,525 | py | Python | tasks/tests/test_views.py | kanz84/issue-manager | d31233b63a4d7acedef43619a71d78053ff93156 | [
"MIT"
] | null | null | null | tasks/tests/test_views.py | kanz84/issue-manager | d31233b63a4d7acedef43619a71d78053ff93156 | [
"MIT"
] | null | null | null | tasks/tests/test_views.py | kanz84/issue-manager | d31233b63a4d7acedef43619a71d78053ff93156 | [
"MIT"
] | null | null | null | from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from tasks.enums import TaskStatusEnum
from tasks.models import Task
from tasks.tests.factories import UserFactory, TaskFactory
class TaskViewSetTestCase(APITestCase):
@classmethod
def setUpTestData(cls):
cls.user = user = UserFactory()
cls.user_other = user_other = UserFactory()
cls.task = TaskFactory(owner=user)
TaskFactory(owner=user)
TaskFactory(owner=user)
cls.task_other = TaskFactory(owner=user_other)
def setUp(self):
self.client.force_authenticate(user=self.user)
def test_list(self):
res = self.client.get(reverse("tasks:task-list"))
self.assertEqual(status.HTTP_200_OK, res.status_code)
res_data = {task["id"]: task for task in res.data}
self.assertEqual(3, len(res_data))
self.assertEqual(res_data.keys(), {task.id for task in Task.objects.filter(owner=self.user).all()})
# validate schema ...
res_data1 = res_data[self.task.id]
self.assertEqual(self.task.title, res_data1["title"])
self.assertEqual(self.task.status, res_data1["status"])
self.assertEqual(self.task.description, res_data1["description"])
self.assertEqual(self.task.status_str(), res_data1["status_str"])
self.assertEqual(self.task.owner.id, res_data1["owner_id"])
def test_retrieve(self):
res = self.client.get(reverse("tasks:task-detail", kwargs={"pk": self.task.id}))
self.assertEqual(status.HTTP_200_OK, res.status_code)
self.assertEqual(self.task.id, res.data["id"])
self.assertEqual(self.task.title, res.data["title"])
self.assertEqual(self.task.status, res.data["status"])
self.assertEqual(self.task.description, res.data["description"])
self.assertEqual(self.task.status_str(), res.data["status_str"])
self.assertEqual(self.task.owner.id, res.data["owner_id"])
def test_retrieve_wrong_owner(self):
res = self.client.get(reverse("tasks:task-detail", kwargs={"pk": self.task_other.id}))
self.assertEqual(status.HTTP_404_NOT_FOUND, res.status_code)
def test_update(self):
task = TaskFactory(owner=self.user)
data = {"title": "New title", "description": "New description"}
res = self.client.put(reverse("tasks:task-detail", kwargs={"pk": task.id}), data=data)
self.assertEqual(status.HTTP_200_OK, res.status_code)
self.assertEqual(data["title"], res.data["title"])
self.assertEqual(data["description"], res.data["description"])
task_ = Task.objects.get(pk=task.id)
self.assertEqual(data["title"], task_.title)
self.assertEqual(data["description"], task_.description)
def test_update_wrong_owner(self):
res = self.client.put(reverse("tasks:task-detail", kwargs={"pk": self.task_other.id}), data={})
self.assertEqual(status.HTTP_404_NOT_FOUND, res.status_code)
def test_create(self):
data = {
"title": "New title",
"status": TaskStatusEnum.NOT_STARTED,
"description": "New description",
}
res = self.client.post(reverse("tasks:task-list"), data=data)
self.assertEqual(status.HTTP_201_CREATED, res.status_code)
self.assertEqual(data["title"], res.data["title"])
self.assertEqual(data["description"], res.data["description"])
task_ = Task.objects.get(pk=res.data["id"])
self.assertEqual(data["title"], task_.title)
self.assertEqual(data["description"], task_.description)
self.assertEqual(self.user.id, task_.owner.id)
def test_create_required_fields(self):
res = self.client.post(reverse("tasks:task-list"), data={})
self.assertEqual(status.HTTP_400_BAD_REQUEST, res.status_code)
self.assertEqual("required", res.data["title"][0].code)
self.assertEqual("required", res.data["description"][0].code)
def test_delete(self):
task = TaskFactory(owner=self.user)
res = self.client.delete(reverse("tasks:task-detail", kwargs={"pk": task.id}))
self.assertEqual(status.HTTP_204_NO_CONTENT, res.status_code)
self.assertFalse(Task.objects.filter(pk=task.id).exists())
def test_delete_wrong_owner(self):
res = self.client.delete(reverse("tasks:task-detail", kwargs={"pk": self.task_other.id}))
self.assertEqual(status.HTTP_404_NOT_FOUND, res.status_code)
| 41.136364 | 107 | 0.673591 | from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from tasks.enums import TaskStatusEnum
from tasks.models import Task
from tasks.tests.factories import UserFactory, TaskFactory
class TaskViewSetTestCase(APITestCase):
@classmethod
def setUpTestData(cls):
cls.user = user = UserFactory()
cls.user_other = user_other = UserFactory()
cls.task = TaskFactory(owner=user)
TaskFactory(owner=user)
TaskFactory(owner=user)
cls.task_other = TaskFactory(owner=user_other)
def setUp(self):
self.client.force_authenticate(user=self.user)
def test_list(self):
res = self.client.get(reverse("tasks:task-list"))
self.assertEqual(status.HTTP_200_OK, res.status_code)
res_data = {task["id"]: task for task in res.data}
self.assertEqual(3, len(res_data))
self.assertEqual(res_data.keys(), {task.id for task in Task.objects.filter(owner=self.user).all()})
res_data1 = res_data[self.task.id]
self.assertEqual(self.task.title, res_data1["title"])
self.assertEqual(self.task.status, res_data1["status"])
self.assertEqual(self.task.description, res_data1["description"])
self.assertEqual(self.task.status_str(), res_data1["status_str"])
self.assertEqual(self.task.owner.id, res_data1["owner_id"])
def test_retrieve(self):
res = self.client.get(reverse("tasks:task-detail", kwargs={"pk": self.task.id}))
self.assertEqual(status.HTTP_200_OK, res.status_code)
self.assertEqual(self.task.id, res.data["id"])
self.assertEqual(self.task.title, res.data["title"])
self.assertEqual(self.task.status, res.data["status"])
self.assertEqual(self.task.description, res.data["description"])
self.assertEqual(self.task.status_str(), res.data["status_str"])
self.assertEqual(self.task.owner.id, res.data["owner_id"])
def test_retrieve_wrong_owner(self):
res = self.client.get(reverse("tasks:task-detail", kwargs={"pk": self.task_other.id}))
self.assertEqual(status.HTTP_404_NOT_FOUND, res.status_code)
def test_update(self):
task = TaskFactory(owner=self.user)
data = {"title": "New title", "description": "New description"}
res = self.client.put(reverse("tasks:task-detail", kwargs={"pk": task.id}), data=data)
self.assertEqual(status.HTTP_200_OK, res.status_code)
self.assertEqual(data["title"], res.data["title"])
self.assertEqual(data["description"], res.data["description"])
task_ = Task.objects.get(pk=task.id)
self.assertEqual(data["title"], task_.title)
self.assertEqual(data["description"], task_.description)
def test_update_wrong_owner(self):
res = self.client.put(reverse("tasks:task-detail", kwargs={"pk": self.task_other.id}), data={})
self.assertEqual(status.HTTP_404_NOT_FOUND, res.status_code)
def test_create(self):
data = {
"title": "New title",
"status": TaskStatusEnum.NOT_STARTED,
"description": "New description",
}
res = self.client.post(reverse("tasks:task-list"), data=data)
self.assertEqual(status.HTTP_201_CREATED, res.status_code)
self.assertEqual(data["title"], res.data["title"])
self.assertEqual(data["description"], res.data["description"])
task_ = Task.objects.get(pk=res.data["id"])
self.assertEqual(data["title"], task_.title)
self.assertEqual(data["description"], task_.description)
self.assertEqual(self.user.id, task_.owner.id)
def test_create_required_fields(self):
res = self.client.post(reverse("tasks:task-list"), data={})
self.assertEqual(status.HTTP_400_BAD_REQUEST, res.status_code)
self.assertEqual("required", res.data["title"][0].code)
self.assertEqual("required", res.data["description"][0].code)
def test_delete(self):
task = TaskFactory(owner=self.user)
res = self.client.delete(reverse("tasks:task-detail", kwargs={"pk": task.id}))
self.assertEqual(status.HTTP_204_NO_CONTENT, res.status_code)
self.assertFalse(Task.objects.filter(pk=task.id).exists())
def test_delete_wrong_owner(self):
res = self.client.delete(reverse("tasks:task-detail", kwargs={"pk": self.task_other.id}))
self.assertEqual(status.HTTP_404_NOT_FOUND, res.status_code)
| true | true |
f71e053c757c7e039e6b515037e8138a52eb19ad | 887 | py | Python | utils/device.py | atranitell/TensorGate | 855ae0c69a706c179c26ba4a75a8067a514285fe | [
"Apache-2.0"
] | null | null | null | utils/device.py | atranitell/TensorGate | 855ae0c69a706c179c26ba4a75a8067a514285fe | [
"Apache-2.0"
] | null | null | null | utils/device.py | atranitell/TensorGate | 855ae0c69a706c179c26ba4a75a8067a514285fe | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 The KaiJIN Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""performance tools"""
from tensorflow.python.client import device_lib
def showing_avaliable_device():
"""Showing the available device."""
for x in device_lib.list_local_devices():
print(x)
| 36.958333 | 80 | 0.689966 |
from tensorflow.python.client import device_lib
def showing_avaliable_device():
for x in device_lib.list_local_devices():
print(x)
| true | true |
f71e06acfa22e39e7bfe97135b5bd5714a6b2302 | 2,324 | py | Python | networkx/algorithms/tests/test_bridges.py | jmmcd/networkx | 207ff7d1e9bfaff013ac77c8d6bb79619892c994 | [
"BSD-3-Clause"
] | 1 | 2020-08-08T21:52:34.000Z | 2020-08-08T21:52:34.000Z | networkx/algorithms/tests/test_bridges.py | jmmcd/networkx | 207ff7d1e9bfaff013ac77c8d6bb79619892c994 | [
"BSD-3-Clause"
] | 2 | 2019-11-13T03:48:53.000Z | 2021-02-15T16:52:09.000Z | networkx/algorithms/tests/test_bridges.py | jmmcd/networkx | 207ff7d1e9bfaff013ac77c8d6bb79619892c994 | [
"BSD-3-Clause"
] | null | null | null | # test_bridges.py - unit tests for bridge-finding algorithms
#
# Copyright 2004-2019 NetworkX developers.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
"""Unit tests for bridge-finding algorithms."""
import networkx as nx
class TestBridges:
"""Unit tests for the bridge-finding function."""
def test_single_bridge(self):
edges = [
# DFS tree edges.
(1, 2), (2, 3), (3, 4), (3, 5), (5, 6), (6, 7), (7, 8), (5, 9),
(9, 10),
# Nontree edges.
(1, 3), (1, 4), (2, 5), (5, 10), (6, 8)
]
G = nx.Graph(edges)
source = 1
bridges = list(nx.bridges(G, source))
assert bridges == [(5, 6)]
def test_barbell_graph(self):
# The (3, 0) barbell graph has two triangles joined by a single edge.
G = nx.barbell_graph(3, 0)
source = 0
bridges = list(nx.bridges(G, source))
assert bridges == [(2, 3)]
class TestLocalBridges:
"""Unit tests for the local_bridge function."""
@classmethod
def setup_class(cls):
cls.BB = nx.barbell_graph(4, 0)
cls.square = nx.cycle_graph(4)
cls.tri = nx.cycle_graph(3)
def test_nospan(self):
expected = {(3, 4), (4, 3)}
assert next(nx.local_bridges(self.BB, with_span=False)) in expected
assert set(nx.local_bridges(self.square, with_span=False)) == self.square.edges
assert list(nx.local_bridges(self.tri, with_span=False)) == []
def test_no_weight(self):
inf = float('inf')
expected = {(3, 4, inf), (4, 3, inf)}
assert next(nx.local_bridges(self.BB)) in expected
expected = {(u, v, 3) for u, v, in self.square.edges}
assert set(nx.local_bridges(self.square)) == expected
assert list(nx.local_bridges(self.tri)) == []
def test_weight(self):
inf = float('inf')
G = self.square.copy()
G.edges[1, 2]['weight'] = 2
expected = {(u, v, 5 - wt) for u, v, wt in G.edges(data='weight', default=1)}
assert set(nx.local_bridges(G, weight='weight')) == expected
expected = {(u, v, 6) for u, v in G.edges}
lb = nx.local_bridges(G, weight=lambda u, v, d: 2)
assert set(lb) == expected
| 32.277778 | 87 | 0.575731 |
import networkx as nx
class TestBridges:
def test_single_bridge(self):
edges = [
(1, 2), (2, 3), (3, 4), (3, 5), (5, 6), (6, 7), (7, 8), (5, 9),
(9, 10),
(1, 3), (1, 4), (2, 5), (5, 10), (6, 8)
]
G = nx.Graph(edges)
source = 1
bridges = list(nx.bridges(G, source))
assert bridges == [(5, 6)]
def test_barbell_graph(self):
G = nx.barbell_graph(3, 0)
source = 0
bridges = list(nx.bridges(G, source))
assert bridges == [(2, 3)]
class TestLocalBridges:
@classmethod
def setup_class(cls):
cls.BB = nx.barbell_graph(4, 0)
cls.square = nx.cycle_graph(4)
cls.tri = nx.cycle_graph(3)
def test_nospan(self):
expected = {(3, 4), (4, 3)}
assert next(nx.local_bridges(self.BB, with_span=False)) in expected
assert set(nx.local_bridges(self.square, with_span=False)) == self.square.edges
assert list(nx.local_bridges(self.tri, with_span=False)) == []
def test_no_weight(self):
inf = float('inf')
expected = {(3, 4, inf), (4, 3, inf)}
assert next(nx.local_bridges(self.BB)) in expected
expected = {(u, v, 3) for u, v, in self.square.edges}
assert set(nx.local_bridges(self.square)) == expected
assert list(nx.local_bridges(self.tri)) == []
def test_weight(self):
inf = float('inf')
G = self.square.copy()
G.edges[1, 2]['weight'] = 2
expected = {(u, v, 5 - wt) for u, v, wt in G.edges(data='weight', default=1)}
assert set(nx.local_bridges(G, weight='weight')) == expected
expected = {(u, v, 6) for u, v in G.edges}
lb = nx.local_bridges(G, weight=lambda u, v, d: 2)
assert set(lb) == expected
| true | true |
f71e075eaaed40df7b85f6e5a476d987e2992291 | 99,049 | py | Python | src/transformers/__init__.py | reichang182/Transformer | 301536b15f1e757c51411800c25876617e9f1191 | [
"Apache-2.0"
] | 1 | 2021-08-02T14:22:01.000Z | 2021-08-02T14:22:01.000Z | src/transformers/__init__.py | slavetothebiologicalforce/transformers | 6f90c29eaaba898919b7689ab7e2cfce1604cdb8 | [
"Apache-2.0"
] | null | null | null | src/transformers/__init__.py | slavetothebiologicalforce/transformers | 6f90c29eaaba898919b7689ab7e2cfce1604cdb8 | [
"Apache-2.0"
] | null | null | null | # flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# When adding a new object to this init, remember to add it twice: once inside the `_import_structure` dictionary and
# once inside the `if TYPE_CHECKING` branch. The `TYPE_CHECKING` should have import statements as usual, but they are
# only there for type checking. The `_import_structure` is a dictionary submodule to list of object names, and is used
# to defer the actual importing for when the objects are requested. This way `import transformers` provides the names
# in the namespace without actually importing anything (and especially none of the backends).
__version__ = "4.6.0.dev0"
# Work around to update TensorFlow's absl.logging threshold which alters the
# default Python logging output behavior when present.
# see: https://github.com/abseil/abseil-py/issues/99
# and: https://github.com/tensorflow/tensorflow/issues/26691#issuecomment-500369493
try:
import absl.logging
except ImportError:
pass
else:
absl.logging.set_verbosity("info")
absl.logging.set_stderrthreshold("info")
absl.logging._warn_preinit_stderr = False
from typing import TYPE_CHECKING
# Check the dependencies satisfy the minimal versions required.
from . import dependency_versions_check
from .file_utils import (
_BaseLazyModule,
is_flax_available,
is_sentencepiece_available,
is_speech_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
is_vision_available,
)
from .utils import logging
logger = logging.get_logger(__name__) # pylint: disable=invalid-name
# Base objects, independent of any specific backend
_import_structure = {
"configuration_utils": ["PretrainedConfig"],
"data": [
"DataProcessor",
"InputExample",
"InputFeatures",
"SingleSentenceClassificationProcessor",
"SquadExample",
"SquadFeatures",
"SquadV1Processor",
"SquadV2Processor",
"glue_compute_metrics",
"glue_convert_examples_to_features",
"glue_output_modes",
"glue_processors",
"glue_tasks_num_labels",
"squad_convert_examples_to_features",
"xnli_compute_metrics",
"xnli_output_modes",
"xnli_processors",
"xnli_tasks_num_labels",
],
"feature_extraction_sequence_utils": ["BatchFeature", "SequenceFeatureExtractor"],
"file_utils": [
"CONFIG_NAME",
"MODEL_CARD_NAME",
"PYTORCH_PRETRAINED_BERT_CACHE",
"PYTORCH_TRANSFORMERS_CACHE",
"SPIECE_UNDERLINE",
"TF2_WEIGHTS_NAME",
"TF_WEIGHTS_NAME",
"TRANSFORMERS_CACHE",
"WEIGHTS_NAME",
"TensorType",
"add_end_docstrings",
"add_start_docstrings",
"cached_path",
"is_apex_available",
"is_datasets_available",
"is_faiss_available",
"is_flax_available",
"is_psutil_available",
"is_py3nvml_available",
"is_sentencepiece_available",
"is_sklearn_available",
"is_speech_available",
"is_tf_available",
"is_tokenizers_available",
"is_torch_available",
"is_torch_tpu_available",
"is_vision_available",
],
"hf_argparser": ["HfArgumentParser"],
"integrations": [
"is_comet_available",
"is_optuna_available",
"is_ray_available",
"is_ray_tune_available",
"is_tensorboard_available",
"is_wandb_available",
],
"modelcard": ["ModelCard"],
"modeling_tf_pytorch_utils": [
"convert_tf_weight_name_to_pt_weight_name",
"load_pytorch_checkpoint_in_tf2_model",
"load_pytorch_model_in_tf2_model",
"load_pytorch_weights_in_tf2_model",
"load_tf2_checkpoint_in_pytorch_model",
"load_tf2_model_in_pytorch_model",
"load_tf2_weights_in_pytorch_model",
],
# Models
"models": [],
"models.albert": ["ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "AlbertConfig"],
"models.auto": [
"ALL_PRETRAINED_CONFIG_ARCHIVE_MAP",
"CONFIG_MAPPING",
"FEATURE_EXTRACTOR_MAPPING",
"MODEL_NAMES_MAPPING",
"TOKENIZER_MAPPING",
"AutoConfig",
"AutoFeatureExtractor",
"AutoTokenizer",
],
"models.bart": ["BartConfig", "BartTokenizer"],
"models.barthez": [],
"models.bert": [
"BERT_PRETRAINED_CONFIG_ARCHIVE_MAP",
"BasicTokenizer",
"BertConfig",
"BertTokenizer",
"WordpieceTokenizer",
],
"models.bert_generation": ["BertGenerationConfig"],
"models.bert_japanese": ["BertJapaneseTokenizer", "CharacterTokenizer", "MecabTokenizer"],
"models.bertweet": ["BertweetTokenizer"],
"models.big_bird": ["BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP", "BigBirdConfig", "BigBirdTokenizer"],
"models.blenderbot": ["BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BlenderbotConfig", "BlenderbotTokenizer"],
"models.blenderbot_small": [
"BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP",
"BlenderbotSmallConfig",
"BlenderbotSmallTokenizer",
],
"models.camembert": ["CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "CamembertConfig"],
"models.convbert": ["CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "ConvBertConfig", "ConvBertTokenizer"],
"models.cpm": ["CpmTokenizer"],
"models.ctrl": ["CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP", "CTRLConfig", "CTRLTokenizer"],
"models.deberta": ["DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP", "DebertaConfig", "DebertaTokenizer"],
"models.deberta_v2": ["DEBERTA_V2_PRETRAINED_CONFIG_ARCHIVE_MAP", "DebertaV2Config"],
"models.distilbert": ["DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "DistilBertConfig", "DistilBertTokenizer"],
"models.dpr": [
"DPR_PRETRAINED_CONFIG_ARCHIVE_MAP",
"DPRConfig",
"DPRContextEncoderTokenizer",
"DPRQuestionEncoderTokenizer",
"DPRReaderOutput",
"DPRReaderTokenizer",
],
"models.electra": ["ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP", "ElectraConfig", "ElectraTokenizer"],
"models.encoder_decoder": ["EncoderDecoderConfig"],
"models.flaubert": ["FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "FlaubertConfig", "FlaubertTokenizer"],
"models.fsmt": ["FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP", "FSMTConfig", "FSMTTokenizer"],
"models.funnel": ["FUNNEL_PRETRAINED_CONFIG_ARCHIVE_MAP", "FunnelConfig", "FunnelTokenizer"],
"models.gpt2": ["GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP", "GPT2Config", "GPT2Tokenizer"],
"models.gpt_neo": ["GPT_NEO_PRETRAINED_CONFIG_ARCHIVE_MAP", "GPTNeoConfig"],
"models.herbert": ["HerbertTokenizer"],
"models.ibert": ["IBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "IBertConfig"],
"models.layoutlm": ["LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP", "LayoutLMConfig", "LayoutLMTokenizer"],
"models.led": ["LED_PRETRAINED_CONFIG_ARCHIVE_MAP", "LEDConfig", "LEDTokenizer"],
"models.longformer": ["LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "LongformerConfig", "LongformerTokenizer"],
"models.lxmert": ["LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "LxmertConfig", "LxmertTokenizer"],
"models.m2m_100": ["M2M_100_PRETRAINED_CONFIG_ARCHIVE_MAP", "M2M100Config"],
"models.marian": ["MarianConfig"],
"models.mbart": ["MBartConfig"],
"models.megatron_bert": ["MEGATRON_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "MegatronBertConfig"],
"models.mmbt": ["MMBTConfig"],
"models.mobilebert": ["MOBILEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "MobileBertConfig", "MobileBertTokenizer"],
"models.mpnet": ["MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "MPNetConfig", "MPNetTokenizer"],
"models.mt5": ["MT5Config"],
"models.openai": ["OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP", "OpenAIGPTConfig", "OpenAIGPTTokenizer"],
"models.pegasus": ["PegasusConfig"],
"models.phobert": ["PhobertTokenizer"],
"models.prophetnet": ["PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "ProphetNetConfig", "ProphetNetTokenizer"],
"models.rag": ["RagConfig", "RagRetriever", "RagTokenizer"],
"models.reformer": ["REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "ReformerConfig"],
"models.retribert": ["RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "RetriBertConfig", "RetriBertTokenizer"],
"models.roberta": ["ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP", "RobertaConfig", "RobertaTokenizer"],
"models.speech_to_text": [
"SPEECH_TO_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP",
"Speech2TextConfig",
],
"models.squeezebert": ["SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "SqueezeBertConfig", "SqueezeBertTokenizer"],
"models.t5": ["T5_PRETRAINED_CONFIG_ARCHIVE_MAP", "T5Config"],
"models.tapas": ["TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP", "TapasConfig", "TapasTokenizer"],
"models.transfo_xl": [
"TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP",
"TransfoXLConfig",
"TransfoXLCorpus",
"TransfoXLTokenizer",
],
"models.vit": ["VIT_PRETRAINED_CONFIG_ARCHIVE_MAP", "ViTConfig"],
"models.wav2vec2": [
"WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP",
"Wav2Vec2Config",
"Wav2Vec2CTCTokenizer",
"Wav2Vec2FeatureExtractor",
"Wav2Vec2Processor",
"Wav2Vec2Tokenizer",
],
"models.xlm": ["XLM_PRETRAINED_CONFIG_ARCHIVE_MAP", "XLMConfig", "XLMTokenizer"],
"models.xlm_prophetnet": ["XLM_PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "XLMProphetNetConfig"],
"models.xlm_roberta": ["XLM_ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP", "XLMRobertaConfig"],
"models.xlnet": ["XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "XLNetConfig"],
"pipelines": [
"Conversation",
"ConversationalPipeline",
"CsvPipelineDataFormat",
"FeatureExtractionPipeline",
"FillMaskPipeline",
"JsonPipelineDataFormat",
"NerPipeline",
"PipedPipelineDataFormat",
"Pipeline",
"PipelineDataFormat",
"QuestionAnsweringPipeline",
"SummarizationPipeline",
"TableQuestionAnsweringPipeline",
"Text2TextGenerationPipeline",
"TextClassificationPipeline",
"TextGenerationPipeline",
"TokenClassificationPipeline",
"TranslationPipeline",
"ZeroShotClassificationPipeline",
"pipeline",
],
"tokenization_utils": ["PreTrainedTokenizer"],
"tokenization_utils_base": [
"AddedToken",
"BatchEncoding",
"CharSpan",
"PreTrainedTokenizerBase",
"SpecialTokensMixin",
"TokenSpan",
],
"trainer_callback": [
"DefaultFlowCallback",
"EarlyStoppingCallback",
"PrinterCallback",
"ProgressCallback",
"TrainerCallback",
"TrainerControl",
"TrainerState",
],
"trainer_utils": ["EvalPrediction", "IntervalStrategy", "SchedulerType", "set_seed"],
"training_args": ["TrainingArguments"],
"training_args_seq2seq": ["Seq2SeqTrainingArguments"],
"training_args_tf": ["TFTrainingArguments"],
"utils": ["logging"],
}
# sentencepiece-backed objects
if is_sentencepiece_available():
_import_structure["models.albert"].append("AlbertTokenizer")
_import_structure["models.barthez"].append("BarthezTokenizer")
_import_structure["models.bert_generation"].append("BertGenerationTokenizer")
_import_structure["models.camembert"].append("CamembertTokenizer")
_import_structure["models.deberta_v2"].append("DebertaV2Tokenizer")
_import_structure["models.m2m_100"].append("M2M100Tokenizer")
_import_structure["models.marian"].append("MarianTokenizer")
_import_structure["models.mbart"].append("MBartTokenizer")
_import_structure["models.mbart"].append("MBart50Tokenizer")
_import_structure["models.mt5"].append("MT5Tokenizer")
_import_structure["models.pegasus"].append("PegasusTokenizer")
_import_structure["models.reformer"].append("ReformerTokenizer")
_import_structure["models.speech_to_text"].append("Speech2TextTokenizer")
_import_structure["models.t5"].append("T5Tokenizer")
_import_structure["models.xlm_prophetnet"].append("XLMProphetNetTokenizer")
_import_structure["models.xlm_roberta"].append("XLMRobertaTokenizer")
_import_structure["models.xlnet"].append("XLNetTokenizer")
else:
from .utils import dummy_sentencepiece_objects
_import_structure["utils.dummy_sentencepiece_objects"] = [
name for name in dir(dummy_sentencepiece_objects) if not name.startswith("_")
]
# tokenizers-backed objects
if is_tokenizers_available():
# Fast tokenizers
_import_structure["models.convbert"].append("ConvBertTokenizerFast")
_import_structure["models.albert"].append("AlbertTokenizerFast")
_import_structure["models.bart"].append("BartTokenizerFast")
_import_structure["models.barthez"].append("BarthezTokenizerFast")
_import_structure["models.bert"].append("BertTokenizerFast")
_import_structure["models.camembert"].append("CamembertTokenizerFast")
_import_structure["models.distilbert"].append("DistilBertTokenizerFast")
_import_structure["models.dpr"].extend(
["DPRContextEncoderTokenizerFast", "DPRQuestionEncoderTokenizerFast", "DPRReaderTokenizerFast"]
)
_import_structure["models.electra"].append("ElectraTokenizerFast")
_import_structure["models.funnel"].append("FunnelTokenizerFast")
_import_structure["models.gpt2"].append("GPT2TokenizerFast")
_import_structure["models.herbert"].append("HerbertTokenizerFast")
_import_structure["models.layoutlm"].append("LayoutLMTokenizerFast")
_import_structure["models.led"].append("LEDTokenizerFast")
_import_structure["models.longformer"].append("LongformerTokenizerFast")
_import_structure["models.lxmert"].append("LxmertTokenizerFast")
_import_structure["models.mbart"].append("MBartTokenizerFast")
_import_structure["models.mbart"].append("MBart50TokenizerFast")
_import_structure["models.mobilebert"].append("MobileBertTokenizerFast")
_import_structure["models.mpnet"].append("MPNetTokenizerFast")
_import_structure["models.mt5"].append("MT5TokenizerFast")
_import_structure["models.openai"].append("OpenAIGPTTokenizerFast")
_import_structure["models.pegasus"].append("PegasusTokenizerFast")
_import_structure["models.reformer"].append("ReformerTokenizerFast")
_import_structure["models.retribert"].append("RetriBertTokenizerFast")
_import_structure["models.roberta"].append("RobertaTokenizerFast")
_import_structure["models.squeezebert"].append("SqueezeBertTokenizerFast")
_import_structure["models.t5"].append("T5TokenizerFast")
_import_structure["models.xlm_roberta"].append("XLMRobertaTokenizerFast")
_import_structure["models.xlnet"].append("XLNetTokenizerFast")
_import_structure["tokenization_utils_fast"] = ["PreTrainedTokenizerFast"]
else:
from .utils import dummy_tokenizers_objects
_import_structure["utils.dummy_tokenizers_objects"] = [
name for name in dir(dummy_tokenizers_objects) if not name.startswith("_")
]
if is_sentencepiece_available() and is_tokenizers_available():
_import_structure["convert_slow_tokenizer"] = ["SLOW_TO_FAST_CONVERTERS", "convert_slow_tokenizer"]
else:
from .utils import dummy_sentencepiece_and_tokenizers_objects
_import_structure["utils.dummy_sentencepiece_and_tokenizers_objects"] = [
name for name in dir(dummy_sentencepiece_and_tokenizers_objects) if not name.startswith("_")
]
# Speech-specific objects
if is_speech_available():
_import_structure["models.speech_to_text"].append("Speech2TextFeatureExtractor")
else:
from .utils import dummy_speech_objects
_import_structure["utils.dummy_speech_objects"] = [
name for name in dir(dummy_speech_objects) if not name.startswith("_")
]
if is_sentencepiece_available() and is_speech_available():
_import_structure["models.speech_to_text"].append("Speech2TextProcessor")
else:
from .utils import dummy_sentencepiece_and_speech_objects
_import_structure["utils.dummy_sentencepiece_and_speech_objects"] = [
name for name in dir(dummy_sentencepiece_and_speech_objects) if not name.startswith("_")
]
# Vision-specific objects
if is_vision_available():
_import_structure["image_utils"] = ["ImageFeatureExtractionMixin"]
_import_structure["models.vit"].append("ViTFeatureExtractor")
else:
from .utils import dummy_vision_objects
_import_structure["utils.dummy_vision_objects"] = [
name for name in dir(dummy_vision_objects) if not name.startswith("_")
]
# PyTorch-backed objects
if is_torch_available():
_import_structure["benchmark.benchmark"] = ["PyTorchBenchmark"]
_import_structure["benchmark.benchmark_args"] = ["PyTorchBenchmarkArguments"]
_import_structure["data.data_collator"] = [
"DataCollator",
"DataCollatorForLanguageModeling",
"DataCollatorForPermutationLanguageModeling",
"DataCollatorForSeq2Seq",
"DataCollatorForSOP",
"DataCollatorForTokenClassification",
"DataCollatorForWholeWordMask",
"DataCollatorWithPadding",
"default_data_collator",
]
_import_structure["data.datasets"] = [
"GlueDataset",
"GlueDataTrainingArguments",
"LineByLineTextDataset",
"LineByLineWithRefDataset",
"LineByLineWithSOPTextDataset",
"SquadDataset",
"SquadDataTrainingArguments",
"TextDataset",
"TextDatasetForNextSentencePrediction",
]
_import_structure["generation_beam_search"] = ["BeamScorer", "BeamSearchScorer"]
_import_structure["generation_logits_process"] = [
"ForcedBOSTokenLogitsProcessor",
"ForcedEOSTokenLogitsProcessor",
"HammingDiversityLogitsProcessor",
"InfNanRemoveLogitsProcessor",
"LogitsProcessor",
"LogitsProcessorList",
"LogitsWarper",
"MinLengthLogitsProcessor",
"NoBadWordsLogitsProcessor",
"NoRepeatNGramLogitsProcessor",
"PrefixConstrainedLogitsProcessor",
"RepetitionPenaltyLogitsProcessor",
"TemperatureLogitsWarper",
"TopKLogitsWarper",
"TopPLogitsWarper",
]
_import_structure["generation_stopping_criteria"] = [
"MaxLengthCriteria",
"MaxTimeCriteria",
"StoppingCriteria",
"StoppingCriteriaList",
]
_import_structure["generation_utils"] = ["top_k_top_p_filtering"]
_import_structure["modeling_utils"] = ["Conv1D", "PreTrainedModel", "apply_chunking_to_forward", "prune_layer"]
# PyTorch models structure
_import_structure["models.albert"].extend(
[
"ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"AlbertForMaskedLM",
"AlbertForMultipleChoice",
"AlbertForPreTraining",
"AlbertForQuestionAnswering",
"AlbertForSequenceClassification",
"AlbertForTokenClassification",
"AlbertModel",
"AlbertPreTrainedModel",
"load_tf_weights_in_albert",
]
)
_import_structure["models.auto"].extend(
[
"MODEL_FOR_CAUSAL_LM_MAPPING",
"MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING",
"MODEL_FOR_MASKED_LM_MAPPING",
"MODEL_FOR_MULTIPLE_CHOICE_MAPPING",
"MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING",
"MODEL_FOR_PRETRAINING_MAPPING",
"MODEL_FOR_QUESTION_ANSWERING_MAPPING",
"MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING",
"MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING",
"MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING",
"MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING",
"MODEL_MAPPING",
"MODEL_WITH_LM_HEAD_MAPPING",
"AutoModel",
"AutoModelForCausalLM",
"AutoModelForMaskedLM",
"AutoModelForMultipleChoice",
"AutoModelForNextSentencePrediction",
"AutoModelForPreTraining",
"AutoModelForQuestionAnswering",
"AutoModelForSeq2SeqLM",
"AutoModelForSequenceClassification",
"AutoModelForTableQuestionAnswering",
"AutoModelForTokenClassification",
"AutoModelWithLMHead",
]
)
_import_structure["models.bart"].extend(
[
"BART_PRETRAINED_MODEL_ARCHIVE_LIST",
"BartForCausalLM",
"BartForConditionalGeneration",
"BartForQuestionAnswering",
"BartForSequenceClassification",
"BartModel",
"BartPretrainedModel",
"PretrainedBartModel",
]
)
_import_structure["models.bert"].extend(
[
"BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BertForMaskedLM",
"BertForMultipleChoice",
"BertForNextSentencePrediction",
"BertForPreTraining",
"BertForQuestionAnswering",
"BertForSequenceClassification",
"BertForTokenClassification",
"BertLayer",
"BertLMHeadModel",
"BertModel",
"BertPreTrainedModel",
"load_tf_weights_in_bert",
]
)
_import_structure["models.bert_generation"].extend(
[
"BertGenerationDecoder",
"BertGenerationEncoder",
"load_tf_weights_in_bert_generation",
]
)
_import_structure["models.big_bird"].extend(
[
"BIG_BIRD_PRETRAINED_MODEL_ARCHIVE_LIST",
"BigBirdForCausalLM",
"BigBirdForMaskedLM",
"BigBirdForMultipleChoice",
"BigBirdForPreTraining",
"BigBirdForQuestionAnswering",
"BigBirdForSequenceClassification",
"BigBirdForTokenClassification",
"BigBirdLayer",
"BigBirdModel",
"BigBirdPreTrainedModel",
"load_tf_weights_in_big_bird",
]
)
_import_structure["models.blenderbot"].extend(
[
"BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BlenderbotForCausalLM",
"BlenderbotForConditionalGeneration",
"BlenderbotModel",
]
)
_import_structure["models.blenderbot_small"].extend(
[
"BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST",
"BlenderbotSmallForCausalLM",
"BlenderbotSmallForConditionalGeneration",
"BlenderbotSmallModel",
]
)
_import_structure["models.camembert"].extend(
[
"CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"CamembertForCausalLM",
"CamembertForMaskedLM",
"CamembertForMultipleChoice",
"CamembertForQuestionAnswering",
"CamembertForSequenceClassification",
"CamembertForTokenClassification",
"CamembertModel",
]
)
_import_structure["models.convbert"].extend(
[
"CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"ConvBertForMaskedLM",
"ConvBertForMultipleChoice",
"ConvBertForQuestionAnswering",
"ConvBertForSequenceClassification",
"ConvBertForTokenClassification",
"ConvBertLayer",
"ConvBertModel",
"ConvBertPreTrainedModel",
"load_tf_weights_in_convbert",
]
)
_import_structure["models.ctrl"].extend(
[
"CTRL_PRETRAINED_MODEL_ARCHIVE_LIST",
"CTRLForSequenceClassification",
"CTRLLMHeadModel",
"CTRLModel",
"CTRLPreTrainedModel",
]
)
_import_structure["models.deberta"].extend(
[
"DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"DebertaForMaskedLM",
"DebertaForQuestionAnswering",
"DebertaForSequenceClassification",
"DebertaForTokenClassification",
"DebertaModel",
"DebertaPreTrainedModel",
]
)
_import_structure["models.deberta_v2"].extend(
[
"DEBERTA_V2_PRETRAINED_MODEL_ARCHIVE_LIST",
"DebertaV2ForMaskedLM",
"DebertaV2ForQuestionAnswering",
"DebertaV2ForSequenceClassification",
"DebertaV2ForTokenClassification",
"DebertaV2Model",
"DebertaV2PreTrainedModel",
]
)
_import_structure["models.distilbert"].extend(
[
"DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"DistilBertForMaskedLM",
"DistilBertForMultipleChoice",
"DistilBertForQuestionAnswering",
"DistilBertForSequenceClassification",
"DistilBertForTokenClassification",
"DistilBertModel",
"DistilBertPreTrainedModel",
]
)
_import_structure["models.dpr"].extend(
[
"DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST",
"DPRContextEncoder",
"DPRPretrainedContextEncoder",
"DPRPretrainedQuestionEncoder",
"DPRPretrainedReader",
"DPRQuestionEncoder",
"DPRReader",
]
)
_import_structure["models.electra"].extend(
[
"ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST",
"ElectraForMaskedLM",
"ElectraForMultipleChoice",
"ElectraForPreTraining",
"ElectraForQuestionAnswering",
"ElectraForSequenceClassification",
"ElectraForTokenClassification",
"ElectraModel",
"ElectraPreTrainedModel",
"load_tf_weights_in_electra",
]
)
_import_structure["models.encoder_decoder"].append("EncoderDecoderModel")
_import_structure["models.flaubert"].extend(
[
"FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"FlaubertForMultipleChoice",
"FlaubertForQuestionAnswering",
"FlaubertForQuestionAnsweringSimple",
"FlaubertForSequenceClassification",
"FlaubertForTokenClassification",
"FlaubertModel",
"FlaubertWithLMHeadModel",
]
)
_import_structure["models.fsmt"].extend(["FSMTForConditionalGeneration", "FSMTModel", "PretrainedFSMTModel"])
_import_structure["models.funnel"].extend(
[
"FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST",
"FunnelBaseModel",
"FunnelForMaskedLM",
"FunnelForMultipleChoice",
"FunnelForPreTraining",
"FunnelForQuestionAnswering",
"FunnelForSequenceClassification",
"FunnelForTokenClassification",
"FunnelModel",
"load_tf_weights_in_funnel",
]
)
_import_structure["models.gpt2"].extend(
[
"GPT2_PRETRAINED_MODEL_ARCHIVE_LIST",
"GPT2DoubleHeadsModel",
"GPT2ForSequenceClassification",
"GPT2LMHeadModel",
"GPT2Model",
"GPT2PreTrainedModel",
"load_tf_weights_in_gpt2",
]
)
_import_structure["models.gpt_neo"].extend(
[
"GPT_NEO_PRETRAINED_MODEL_ARCHIVE_LIST",
"GPTNeoForCausalLM",
"GPTNeoModel",
"GPTNeoPreTrainedModel",
"load_tf_weights_in_gpt_neo",
]
)
_import_structure["models.ibert"].extend(
[
"IBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"IBertForMaskedLM",
"IBertForMultipleChoice",
"IBertForQuestionAnswering",
"IBertForSequenceClassification",
"IBertForTokenClassification",
"IBertModel",
"IBertPreTrainedModel",
]
)
_import_structure["models.layoutlm"].extend(
[
"LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"LayoutLMForMaskedLM",
"LayoutLMForSequenceClassification",
"LayoutLMForTokenClassification",
"LayoutLMModel",
]
)
_import_structure["models.led"].extend(
[
"LED_PRETRAINED_MODEL_ARCHIVE_LIST",
"LEDForConditionalGeneration",
"LEDForQuestionAnswering",
"LEDForSequenceClassification",
"LEDModel",
]
)
_import_structure["models.longformer"].extend(
[
"LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"LongformerForMaskedLM",
"LongformerForMultipleChoice",
"LongformerForQuestionAnswering",
"LongformerForSequenceClassification",
"LongformerForTokenClassification",
"LongformerModel",
"LongformerSelfAttention",
]
)
_import_structure["models.lxmert"].extend(
[
"LxmertEncoder",
"LxmertForPreTraining",
"LxmertForQuestionAnswering",
"LxmertModel",
"LxmertPreTrainedModel",
"LxmertVisualFeatureEncoder",
"LxmertXLayer",
]
)
_import_structure["models.m2m_100"].extend(
[
"M2M_100_PRETRAINED_MODEL_ARCHIVE_LIST",
"M2M100ForConditionalGeneration",
"M2M100Model",
]
)
_import_structure["models.marian"].extend(["MarianForCausalLM", "MarianModel", "MarianMTModel"])
_import_structure["models.mbart"].extend(
[
"MBartForCausalLM",
"MBartForConditionalGeneration",
"MBartForQuestionAnswering",
"MBartForSequenceClassification",
"MBartModel",
]
)
_import_structure["models.megatron_bert"].extend(
[
"MEGATRON_BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"MegatronBertForCausalLM",
"MegatronBertForMaskedLM",
"MegatronBertForMultipleChoice",
"MegatronBertForNextSentencePrediction",
"MegatronBertForPreTraining",
"MegatronBertForQuestionAnswering",
"MegatronBertForSequenceClassification",
"MegatronBertForTokenClassification",
"MegatronBertModel",
]
)
_import_structure["models.mmbt"].extend(["MMBTForClassification", "MMBTModel", "ModalEmbeddings"])
_import_structure["models.mobilebert"].extend(
[
"MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"MobileBertForMaskedLM",
"MobileBertForMultipleChoice",
"MobileBertForNextSentencePrediction",
"MobileBertForPreTraining",
"MobileBertForQuestionAnswering",
"MobileBertForSequenceClassification",
"MobileBertForTokenClassification",
"MobileBertLayer",
"MobileBertModel",
"MobileBertPreTrainedModel",
"load_tf_weights_in_mobilebert",
]
)
_import_structure["models.mpnet"].extend(
[
"MPNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"MPNetForMaskedLM",
"MPNetForMultipleChoice",
"MPNetForQuestionAnswering",
"MPNetForSequenceClassification",
"MPNetForTokenClassification",
"MPNetLayer",
"MPNetModel",
"MPNetPreTrainedModel",
]
)
_import_structure["models.mt5"].extend(["MT5EncoderModel", "MT5ForConditionalGeneration", "MT5Model"])
_import_structure["models.openai"].extend(
[
"OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST",
"OpenAIGPTDoubleHeadsModel",
"OpenAIGPTForSequenceClassification",
"OpenAIGPTLMHeadModel",
"OpenAIGPTModel",
"OpenAIGPTPreTrainedModel",
"load_tf_weights_in_openai_gpt",
]
)
_import_structure["models.pegasus"].extend(
["PegasusForCausalLM", "PegasusForConditionalGeneration", "PegasusModel"]
)
_import_structure["models.prophetnet"].extend(
[
"PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"ProphetNetDecoder",
"ProphetNetEncoder",
"ProphetNetForCausalLM",
"ProphetNetForConditionalGeneration",
"ProphetNetModel",
"ProphetNetPreTrainedModel",
]
)
_import_structure["models.rag"].extend(["RagModel", "RagSequenceForGeneration", "RagTokenForGeneration"])
_import_structure["models.reformer"].extend(
[
"REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"ReformerAttention",
"ReformerForMaskedLM",
"ReformerForQuestionAnswering",
"ReformerForSequenceClassification",
"ReformerLayer",
"ReformerModel",
"ReformerModelWithLMHead",
]
)
_import_structure["models.retribert"].extend(
["RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "RetriBertModel", "RetriBertPreTrainedModel"]
)
_import_structure["models.roberta"].extend(
[
"ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"RobertaForCausalLM",
"RobertaForMaskedLM",
"RobertaForMultipleChoice",
"RobertaForQuestionAnswering",
"RobertaForSequenceClassification",
"RobertaForTokenClassification",
"RobertaModel",
]
)
_import_structure["models.speech_to_text"].extend(
[
"SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST",
"Speech2TextForConditionalGeneration",
"Speech2TextModel",
]
)
_import_structure["models.squeezebert"].extend(
[
"SQUEEZEBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"SqueezeBertForMaskedLM",
"SqueezeBertForMultipleChoice",
"SqueezeBertForQuestionAnswering",
"SqueezeBertForSequenceClassification",
"SqueezeBertForTokenClassification",
"SqueezeBertModel",
"SqueezeBertModule",
"SqueezeBertPreTrainedModel",
]
)
_import_structure["models.t5"].extend(
[
"T5_PRETRAINED_MODEL_ARCHIVE_LIST",
"T5EncoderModel",
"T5ForConditionalGeneration",
"T5Model",
"T5PreTrainedModel",
"load_tf_weights_in_t5",
]
)
_import_structure["models.tapas"].extend(
[
"TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST",
"TapasForMaskedLM",
"TapasForQuestionAnswering",
"TapasForSequenceClassification",
"TapasModel",
]
)
_import_structure["models.transfo_xl"].extend(
[
"TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST",
"AdaptiveEmbedding",
"TransfoXLForSequenceClassification",
"TransfoXLLMHeadModel",
"TransfoXLModel",
"TransfoXLPreTrainedModel",
"load_tf_weights_in_transfo_xl",
]
)
_import_structure["models.vit"].extend(
[
"VIT_PRETRAINED_MODEL_ARCHIVE_LIST",
"ViTForImageClassification",
"ViTModel",
"ViTPreTrainedModel",
]
)
_import_structure["models.wav2vec2"].extend(
[
"WAV_2_VEC_2_PRETRAINED_MODEL_ARCHIVE_LIST",
"Wav2Vec2ForCTC",
"Wav2Vec2ForMaskedLM",
"Wav2Vec2Model",
"Wav2Vec2PreTrainedModel",
]
)
_import_structure["models.xlm"].extend(
[
"XLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"XLMForMultipleChoice",
"XLMForQuestionAnswering",
"XLMForQuestionAnsweringSimple",
"XLMForSequenceClassification",
"XLMForTokenClassification",
"XLMModel",
"XLMPreTrainedModel",
"XLMWithLMHeadModel",
]
)
_import_structure["models.xlm_prophetnet"].extend(
[
"XLM_PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"XLMProphetNetDecoder",
"XLMProphetNetEncoder",
"XLMProphetNetForCausalLM",
"XLMProphetNetForConditionalGeneration",
"XLMProphetNetModel",
]
)
_import_structure["models.xlm_roberta"].extend(
[
"XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"XLMRobertaForCausalLM",
"XLMRobertaForMaskedLM",
"XLMRobertaForMultipleChoice",
"XLMRobertaForQuestionAnswering",
"XLMRobertaForSequenceClassification",
"XLMRobertaForTokenClassification",
"XLMRobertaModel",
]
)
_import_structure["models.xlnet"].extend(
[
"XLNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"XLNetForMultipleChoice",
"XLNetForQuestionAnswering",
"XLNetForQuestionAnsweringSimple",
"XLNetForSequenceClassification",
"XLNetForTokenClassification",
"XLNetLMHeadModel",
"XLNetModel",
"XLNetPreTrainedModel",
"load_tf_weights_in_xlnet",
]
)
_import_structure["optimization"] = [
"Adafactor",
"AdamW",
"get_constant_schedule",
"get_constant_schedule_with_warmup",
"get_cosine_schedule_with_warmup",
"get_cosine_with_hard_restarts_schedule_with_warmup",
"get_linear_schedule_with_warmup",
"get_polynomial_decay_schedule_with_warmup",
"get_scheduler",
]
_import_structure["trainer"] = ["Trainer"]
_import_structure["trainer_pt_utils"] = ["torch_distributed_zero_first"]
_import_structure["trainer_seq2seq"] = ["Seq2SeqTrainer"]
else:
from .utils import dummy_pt_objects
_import_structure["utils.dummy_pt_objects"] = [name for name in dir(dummy_pt_objects) if not name.startswith("_")]
# TensorFlow-backed objects
if is_tf_available():
_import_structure["benchmark.benchmark_args_tf"] = ["TensorFlowBenchmarkArguments"]
_import_structure["benchmark.benchmark_tf"] = ["TensorFlowBenchmark"]
_import_structure["generation_tf_utils"] = ["tf_top_k_top_p_filtering"]
_import_structure["modeling_tf_utils"] = [
"TFPreTrainedModel",
"TFSequenceSummary",
"TFSharedEmbeddings",
"shape_list",
]
# TensorFlow models structure
_import_structure["models.albert"].extend(
[
"TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFAlbertForMaskedLM",
"TFAlbertForMultipleChoice",
"TFAlbertForPreTraining",
"TFAlbertForQuestionAnswering",
"TFAlbertForSequenceClassification",
"TFAlbertForTokenClassification",
"TFAlbertMainLayer",
"TFAlbertModel",
"TFAlbertPreTrainedModel",
]
)
_import_structure["models.auto"].extend(
[
"TF_MODEL_FOR_CAUSAL_LM_MAPPING",
"TF_MODEL_FOR_MASKED_LM_MAPPING",
"TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING",
"TF_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING",
"TF_MODEL_FOR_PRETRAINING_MAPPING",
"TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING",
"TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING",
"TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING",
"TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING",
"TF_MODEL_MAPPING",
"TF_MODEL_WITH_LM_HEAD_MAPPING",
"TFAutoModel",
"TFAutoModelForCausalLM",
"TFAutoModelForMaskedLM",
"TFAutoModelForMultipleChoice",
"TFAutoModelForPreTraining",
"TFAutoModelForQuestionAnswering",
"TFAutoModelForSeq2SeqLM",
"TFAutoModelForSequenceClassification",
"TFAutoModelForTokenClassification",
"TFAutoModelWithLMHead",
]
)
_import_structure["models.bart"].extend(["TFBartForConditionalGeneration", "TFBartModel", "TFBartPretrainedModel"])
_import_structure["models.bert"].extend(
[
"TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFBertEmbeddings",
"TFBertForMaskedLM",
"TFBertForMultipleChoice",
"TFBertForNextSentencePrediction",
"TFBertForPreTraining",
"TFBertForQuestionAnswering",
"TFBertForSequenceClassification",
"TFBertForTokenClassification",
"TFBertLMHeadModel",
"TFBertMainLayer",
"TFBertModel",
"TFBertPreTrainedModel",
]
)
_import_structure["models.blenderbot"].extend(["TFBlenderbotForConditionalGeneration", "TFBlenderbotModel"])
_import_structure["models.blenderbot_small"].extend(
["TFBlenderbotSmallForConditionalGeneration", "TFBlenderbotSmallModel"]
)
_import_structure["models.camembert"].extend(
[
"TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFCamembertForMaskedLM",
"TFCamembertForMultipleChoice",
"TFCamembertForQuestionAnswering",
"TFCamembertForSequenceClassification",
"TFCamembertForTokenClassification",
"TFCamembertModel",
]
)
_import_structure["models.convbert"].extend(
[
"TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFConvBertForMaskedLM",
"TFConvBertForMultipleChoice",
"TFConvBertForQuestionAnswering",
"TFConvBertForSequenceClassification",
"TFConvBertForTokenClassification",
"TFConvBertLayer",
"TFConvBertModel",
"TFConvBertPreTrainedModel",
]
)
_import_structure["models.ctrl"].extend(
[
"TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFCTRLForSequenceClassification",
"TFCTRLLMHeadModel",
"TFCTRLModel",
"TFCTRLPreTrainedModel",
]
)
_import_structure["models.distilbert"].extend(
[
"TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFDistilBertForMaskedLM",
"TFDistilBertForMultipleChoice",
"TFDistilBertForQuestionAnswering",
"TFDistilBertForSequenceClassification",
"TFDistilBertForTokenClassification",
"TFDistilBertMainLayer",
"TFDistilBertModel",
"TFDistilBertPreTrainedModel",
]
)
_import_structure["models.dpr"].extend(
[
"TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TF_DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TF_DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFDPRContextEncoder",
"TFDPRPretrainedContextEncoder",
"TFDPRPretrainedQuestionEncoder",
"TFDPRPretrainedReader",
"TFDPRQuestionEncoder",
"TFDPRReader",
]
)
_import_structure["models.electra"].extend(
[
"TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFElectraForMaskedLM",
"TFElectraForMultipleChoice",
"TFElectraForPreTraining",
"TFElectraForQuestionAnswering",
"TFElectraForSequenceClassification",
"TFElectraForTokenClassification",
"TFElectraModel",
"TFElectraPreTrainedModel",
]
)
_import_structure["models.flaubert"].extend(
[
"TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFFlaubertForMultipleChoice",
"TFFlaubertForQuestionAnsweringSimple",
"TFFlaubertForSequenceClassification",
"TFFlaubertForTokenClassification",
"TFFlaubertModel",
"TFFlaubertWithLMHeadModel",
]
)
_import_structure["models.funnel"].extend(
[
"TF_FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFFunnelBaseModel",
"TFFunnelForMaskedLM",
"TFFunnelForMultipleChoice",
"TFFunnelForPreTraining",
"TFFunnelForQuestionAnswering",
"TFFunnelForSequenceClassification",
"TFFunnelForTokenClassification",
"TFFunnelModel",
]
)
_import_structure["models.gpt2"].extend(
[
"TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFGPT2DoubleHeadsModel",
"TFGPT2ForSequenceClassification",
"TFGPT2LMHeadModel",
"TFGPT2MainLayer",
"TFGPT2Model",
"TFGPT2PreTrainedModel",
]
)
_import_structure["models.layoutlm"].extend(
[
"TF_LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFLayoutLMForMaskedLM",
"TFLayoutLMForSequenceClassification",
"TFLayoutLMForTokenClassification",
"TFLayoutLMMainLayer",
"TFLayoutLMModel",
"TFLayoutLMPreTrainedModel",
]
)
_import_structure["models.led"].extend(["TFLEDForConditionalGeneration", "TFLEDModel", "TFLEDPreTrainedModel"])
_import_structure["models.longformer"].extend(
[
"TF_LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFLongformerForMaskedLM",
"TFLongformerForMultipleChoice",
"TFLongformerForQuestionAnswering",
"TFLongformerForSequenceClassification",
"TFLongformerForTokenClassification",
"TFLongformerModel",
"TFLongformerSelfAttention",
]
)
_import_structure["models.lxmert"].extend(
[
"TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFLxmertForPreTraining",
"TFLxmertMainLayer",
"TFLxmertModel",
"TFLxmertPreTrainedModel",
"TFLxmertVisualFeatureEncoder",
]
)
_import_structure["models.marian"].extend(["TFMarianModel", "TFMarianMTModel"])
_import_structure["models.mbart"].extend(["TFMBartForConditionalGeneration", "TFMBartModel"])
_import_structure["models.mobilebert"].extend(
[
"TF_MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFMobileBertForMaskedLM",
"TFMobileBertForMultipleChoice",
"TFMobileBertForNextSentencePrediction",
"TFMobileBertForPreTraining",
"TFMobileBertForQuestionAnswering",
"TFMobileBertForSequenceClassification",
"TFMobileBertForTokenClassification",
"TFMobileBertMainLayer",
"TFMobileBertModel",
"TFMobileBertPreTrainedModel",
]
)
_import_structure["models.mpnet"].extend(
[
"TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFMPNetForMaskedLM",
"TFMPNetForMultipleChoice",
"TFMPNetForQuestionAnswering",
"TFMPNetForSequenceClassification",
"TFMPNetForTokenClassification",
"TFMPNetMainLayer",
"TFMPNetModel",
"TFMPNetPreTrainedModel",
]
)
_import_structure["models.mt5"].extend(["TFMT5EncoderModel", "TFMT5ForConditionalGeneration", "TFMT5Model"])
_import_structure["models.openai"].extend(
[
"TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFOpenAIGPTDoubleHeadsModel",
"TFOpenAIGPTForSequenceClassification",
"TFOpenAIGPTLMHeadModel",
"TFOpenAIGPTMainLayer",
"TFOpenAIGPTModel",
"TFOpenAIGPTPreTrainedModel",
]
)
_import_structure["models.pegasus"].extend(["TFPegasusForConditionalGeneration", "TFPegasusModel"])
_import_structure["models.rag"].extend(
[
"TFRagModel",
"TFRagSequenceForGeneration",
"TFRagTokenForGeneration",
]
)
_import_structure["models.roberta"].extend(
[
"TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFRobertaForMaskedLM",
"TFRobertaForMultipleChoice",
"TFRobertaForQuestionAnswering",
"TFRobertaForSequenceClassification",
"TFRobertaForTokenClassification",
"TFRobertaMainLayer",
"TFRobertaModel",
"TFRobertaPreTrainedModel",
]
)
_import_structure["models.t5"].extend(
[
"TF_T5_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFT5EncoderModel",
"TFT5ForConditionalGeneration",
"TFT5Model",
"TFT5PreTrainedModel",
]
)
_import_structure["models.transfo_xl"].extend(
[
"TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFAdaptiveEmbedding",
"TFTransfoXLForSequenceClassification",
"TFTransfoXLLMHeadModel",
"TFTransfoXLMainLayer",
"TFTransfoXLModel",
"TFTransfoXLPreTrainedModel",
]
)
_import_structure["models.xlm"].extend(
[
"TF_XLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFXLMForMultipleChoice",
"TFXLMForQuestionAnsweringSimple",
"TFXLMForSequenceClassification",
"TFXLMForTokenClassification",
"TFXLMMainLayer",
"TFXLMModel",
"TFXLMPreTrainedModel",
"TFXLMWithLMHeadModel",
]
)
_import_structure["models.xlm_roberta"].extend(
[
"TF_XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFXLMRobertaForMaskedLM",
"TFXLMRobertaForMultipleChoice",
"TFXLMRobertaForQuestionAnswering",
"TFXLMRobertaForSequenceClassification",
"TFXLMRobertaForTokenClassification",
"TFXLMRobertaModel",
]
)
_import_structure["models.xlnet"].extend(
[
"TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFXLNetForMultipleChoice",
"TFXLNetForQuestionAnsweringSimple",
"TFXLNetForSequenceClassification",
"TFXLNetForTokenClassification",
"TFXLNetLMHeadModel",
"TFXLNetMainLayer",
"TFXLNetModel",
"TFXLNetPreTrainedModel",
]
)
_import_structure["optimization_tf"] = ["AdamWeightDecay", "GradientAccumulator", "WarmUp", "create_optimizer"]
_import_structure["trainer_tf"] = ["TFTrainer"]
else:
from .utils import dummy_tf_objects
_import_structure["utils.dummy_tf_objects"] = [name for name in dir(dummy_tf_objects) if not name.startswith("_")]
# FLAX-backed objects
if is_flax_available():
_import_structure["modeling_flax_utils"] = ["FlaxPreTrainedModel"]
_import_structure["models.auto"].extend(
[
"FLAX_MODEL_FOR_MASKED_LM_MAPPING",
"FLAX_MODEL_FOR_MULTIPLE_CHOICE_MAPPING",
"FLAX_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING",
"FLAX_MODEL_FOR_PRETRAINING_MAPPING",
"FLAX_MODEL_FOR_QUESTION_ANSWERING_MAPPING",
"FLAX_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING",
"FLAX_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING",
"FLAX_MODEL_MAPPING",
"FlaxAutoModel",
"FlaxAutoModelForMaskedLM",
"FlaxAutoModelForMultipleChoice",
"FlaxAutoModelForNextSentencePrediction",
"FlaxAutoModelForPreTraining",
"FlaxAutoModelForQuestionAnswering",
"FlaxAutoModelForSequenceClassification",
"FlaxAutoModelForTokenClassification",
]
)
_import_structure["models.bert"].extend(
[
"FlaxBertForMaskedLM",
"FlaxBertForMultipleChoice",
"FlaxBertForNextSentencePrediction",
"FlaxBertForPreTraining",
"FlaxBertForQuestionAnswering",
"FlaxBertForSequenceClassification",
"FlaxBertForTokenClassification",
"FlaxBertModel",
"FlaxBertPreTrainedModel",
]
)
_import_structure["models.roberta"].append("FlaxRobertaModel")
else:
from .utils import dummy_flax_objects
_import_structure["utils.dummy_flax_objects"] = [
name for name in dir(dummy_flax_objects) if not name.startswith("_")
]
# Direct imports for type-checking
if TYPE_CHECKING:
# Configuration
from .configuration_utils import PretrainedConfig
# Data
from .data import (
DataProcessor,
InputExample,
InputFeatures,
SingleSentenceClassificationProcessor,
SquadExample,
SquadFeatures,
SquadV1Processor,
SquadV2Processor,
glue_compute_metrics,
glue_convert_examples_to_features,
glue_output_modes,
glue_processors,
glue_tasks_num_labels,
squad_convert_examples_to_features,
xnli_compute_metrics,
xnli_output_modes,
xnli_processors,
xnli_tasks_num_labels,
)
# Feature Extractor
from .feature_extraction_utils import BatchFeature, SequenceFeatureExtractor
# Files and general utilities
from .file_utils import (
CONFIG_NAME,
MODEL_CARD_NAME,
PYTORCH_PRETRAINED_BERT_CACHE,
PYTORCH_TRANSFORMERS_CACHE,
SPIECE_UNDERLINE,
TF2_WEIGHTS_NAME,
TF_WEIGHTS_NAME,
TRANSFORMERS_CACHE,
WEIGHTS_NAME,
TensorType,
add_end_docstrings,
add_start_docstrings,
cached_path,
is_apex_available,
is_datasets_available,
is_faiss_available,
is_flax_available,
is_psutil_available,
is_py3nvml_available,
is_sentencepiece_available,
is_sklearn_available,
is_speech_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
is_torch_tpu_available,
is_vision_available,
)
from .hf_argparser import HfArgumentParser
# Integrations
from .integrations import (
is_comet_available,
is_optuna_available,
is_ray_available,
is_ray_tune_available,
is_tensorboard_available,
is_wandb_available,
)
# Model Cards
from .modelcard import ModelCard
# TF 2.0 <=> PyTorch conversion utilities
from .modeling_tf_pytorch_utils import (
convert_tf_weight_name_to_pt_weight_name,
load_pytorch_checkpoint_in_tf2_model,
load_pytorch_model_in_tf2_model,
load_pytorch_weights_in_tf2_model,
load_tf2_checkpoint_in_pytorch_model,
load_tf2_model_in_pytorch_model,
load_tf2_weights_in_pytorch_model,
)
from .models.albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig
from .models.auto import (
ALL_PRETRAINED_CONFIG_ARCHIVE_MAP,
CONFIG_MAPPING,
FEATURE_EXTRACTOR_MAPPING,
MODEL_NAMES_MAPPING,
TOKENIZER_MAPPING,
AutoConfig,
AutoFeatureExtractor,
AutoTokenizer,
)
from .models.bart import BartConfig, BartTokenizer
from .models.bert import (
BERT_PRETRAINED_CONFIG_ARCHIVE_MAP,
BasicTokenizer,
BertConfig,
BertTokenizer,
WordpieceTokenizer,
)
from .models.bert_generation import BertGenerationConfig
from .models.bert_japanese import BertJapaneseTokenizer, CharacterTokenizer, MecabTokenizer
from .models.bertweet import BertweetTokenizer
from .models.big_bird import BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP, BigBirdConfig, BigBirdTokenizer
from .models.blenderbot import BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotConfig, BlenderbotTokenizer
from .models.blenderbot_small import (
BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP,
BlenderbotSmallConfig,
BlenderbotSmallTokenizer,
)
from .models.camembert import CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, CamembertConfig
from .models.convbert import CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, ConvBertConfig, ConvBertTokenizer
from .models.cpm import CpmTokenizer
from .models.ctrl import CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP, CTRLConfig, CTRLTokenizer
from .models.deberta import DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, DebertaConfig, DebertaTokenizer
from .models.deberta_v2 import DEBERTA_V2_PRETRAINED_CONFIG_ARCHIVE_MAP, DebertaV2Config
from .models.distilbert import DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, DistilBertConfig, DistilBertTokenizer
from .models.dpr import (
DPR_PRETRAINED_CONFIG_ARCHIVE_MAP,
DPRConfig,
DPRContextEncoderTokenizer,
DPRQuestionEncoderTokenizer,
DPRReaderOutput,
DPRReaderTokenizer,
)
from .models.electra import ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP, ElectraConfig, ElectraTokenizer
from .models.encoder_decoder import EncoderDecoderConfig
from .models.flaubert import FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, FlaubertConfig, FlaubertTokenizer
from .models.fsmt import FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP, FSMTConfig, FSMTTokenizer
from .models.funnel import FUNNEL_PRETRAINED_CONFIG_ARCHIVE_MAP, FunnelConfig, FunnelTokenizer
from .models.gpt2 import GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP, GPT2Config, GPT2Tokenizer
from .models.gpt_neo import GPT_NEO_PRETRAINED_CONFIG_ARCHIVE_MAP, GPTNeoConfig
from .models.herbert import HerbertTokenizer
from .models.ibert import IBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, IBertConfig
from .models.layoutlm import LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP, LayoutLMConfig, LayoutLMTokenizer
from .models.led import LED_PRETRAINED_CONFIG_ARCHIVE_MAP, LEDConfig, LEDTokenizer
from .models.longformer import LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, LongformerConfig, LongformerTokenizer
from .models.lxmert import LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP, LxmertConfig, LxmertTokenizer
from .models.m2m_100 import M2M_100_PRETRAINED_CONFIG_ARCHIVE_MAP, M2M100Config
from .models.marian import MarianConfig
from .models.mbart import MBartConfig
from .models.megatron_bert import MEGATRON_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP, MegatronBertConfig
from .models.mmbt import MMBTConfig
from .models.mobilebert import MOBILEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, MobileBertConfig, MobileBertTokenizer
from .models.mpnet import MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP, MPNetConfig, MPNetTokenizer
from .models.mt5 import MT5Config
from .models.openai import OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenAIGPTConfig, OpenAIGPTTokenizer
from .models.pegasus import PegasusConfig
from .models.phobert import PhobertTokenizer
from .models.prophetnet import PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP, ProphetNetConfig, ProphetNetTokenizer
from .models.rag import RagConfig, RagRetriever, RagTokenizer
from .models.reformer import REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, ReformerConfig
from .models.retribert import RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, RetriBertConfig, RetriBertTokenizer
from .models.roberta import ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, RobertaConfig, RobertaTokenizer
from .models.speech_to_text import SPEECH_TO_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP, Speech2TextConfig
from .models.squeezebert import SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, SqueezeBertConfig, SqueezeBertTokenizer
from .models.t5 import T5_PRETRAINED_CONFIG_ARCHIVE_MAP, T5Config
from .models.tapas import TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP, TapasConfig, TapasTokenizer
from .models.transfo_xl import (
TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP,
TransfoXLConfig,
TransfoXLCorpus,
TransfoXLTokenizer,
)
from .models.vit import VIT_PRETRAINED_CONFIG_ARCHIVE_MAP, ViTConfig
from .models.wav2vec2 import (
WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP,
Wav2Vec2Config,
Wav2Vec2CTCTokenizer,
Wav2Vec2FeatureExtractor,
Wav2Vec2Processor,
Wav2Vec2Tokenizer,
)
from .models.xlm import XLM_PRETRAINED_CONFIG_ARCHIVE_MAP, XLMConfig, XLMTokenizer
from .models.xlm_prophetnet import XLM_PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP, XLMProphetNetConfig
from .models.xlm_roberta import XLM_ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, XLMRobertaConfig
from .models.xlnet import XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP, XLNetConfig
# Pipelines
from .pipelines import (
Conversation,
ConversationalPipeline,
CsvPipelineDataFormat,
FeatureExtractionPipeline,
FillMaskPipeline,
JsonPipelineDataFormat,
NerPipeline,
PipedPipelineDataFormat,
Pipeline,
PipelineDataFormat,
QuestionAnsweringPipeline,
SummarizationPipeline,
TableQuestionAnsweringPipeline,
Text2TextGenerationPipeline,
TextClassificationPipeline,
TextGenerationPipeline,
TokenClassificationPipeline,
TranslationPipeline,
ZeroShotClassificationPipeline,
pipeline,
)
# Tokenization
from .tokenization_utils import PreTrainedTokenizer
from .tokenization_utils_base import (
AddedToken,
BatchEncoding,
CharSpan,
PreTrainedTokenizerBase,
SpecialTokensMixin,
TokenSpan,
)
# Trainer
from .trainer_callback import (
DefaultFlowCallback,
EarlyStoppingCallback,
PrinterCallback,
ProgressCallback,
TrainerCallback,
TrainerControl,
TrainerState,
)
from .trainer_utils import EvalPrediction, IntervalStrategy, SchedulerType, set_seed
from .training_args import TrainingArguments
from .training_args_seq2seq import Seq2SeqTrainingArguments
from .training_args_tf import TFTrainingArguments
from .utils import logging
if is_sentencepiece_available():
from .models.albert import AlbertTokenizer
from .models.barthez import BarthezTokenizer
from .models.bert_generation import BertGenerationTokenizer
from .models.camembert import CamembertTokenizer
from .models.deberta_v2 import DebertaV2Tokenizer
from .models.m2m_100 import M2M100Tokenizer
from .models.marian import MarianTokenizer
from .models.mbart import MBart50Tokenizer, MBartTokenizer
from .models.mt5 import MT5Tokenizer
from .models.pegasus import PegasusTokenizer
from .models.reformer import ReformerTokenizer
from .models.speech_to_text import Speech2TextTokenizer
from .models.t5 import T5Tokenizer
from .models.xlm_prophetnet import XLMProphetNetTokenizer
from .models.xlm_roberta import XLMRobertaTokenizer
from .models.xlnet import XLNetTokenizer
else:
from .utils.dummy_sentencepiece_objects import *
if is_tokenizers_available():
from .models.albert import AlbertTokenizerFast
from .models.bart import BartTokenizerFast
from .models.barthez import BarthezTokenizerFast
from .models.bert import BertTokenizerFast
from .models.camembert import CamembertTokenizerFast
from .models.convbert import ConvBertTokenizerFast
from .models.distilbert import DistilBertTokenizerFast
from .models.dpr import DPRContextEncoderTokenizerFast, DPRQuestionEncoderTokenizerFast, DPRReaderTokenizerFast
from .models.electra import ElectraTokenizerFast
from .models.funnel import FunnelTokenizerFast
from .models.gpt2 import GPT2TokenizerFast
from .models.herbert import HerbertTokenizerFast
from .models.layoutlm import LayoutLMTokenizerFast
from .models.led import LEDTokenizerFast
from .models.longformer import LongformerTokenizerFast
from .models.lxmert import LxmertTokenizerFast
from .models.mbart import MBart50TokenizerFast, MBartTokenizerFast
from .models.mobilebert import MobileBertTokenizerFast
from .models.mpnet import MPNetTokenizerFast
from .models.mt5 import MT5TokenizerFast
from .models.openai import OpenAIGPTTokenizerFast
from .models.pegasus import PegasusTokenizerFast
from .models.reformer import ReformerTokenizerFast
from .models.retribert import RetriBertTokenizerFast
from .models.roberta import RobertaTokenizerFast
from .models.squeezebert import SqueezeBertTokenizerFast
from .models.t5 import T5TokenizerFast
from .models.xlm_roberta import XLMRobertaTokenizerFast
from .models.xlnet import XLNetTokenizerFast
from .tokenization_utils_fast import PreTrainedTokenizerFast
else:
from .utils.dummy_tokenizers_objects import *
if is_sentencepiece_available() and is_tokenizers_available():
from .convert_slow_tokenizer import SLOW_TO_FAST_CONVERTERS, convert_slow_tokenizer
else:
from .utils.dummies_sentencepiece_and_tokenizers_objects import *
if is_speech_available():
from .models.speech_to_text import Speech2TextFeatureExtractor
else:
from .utils.dummy_speech_objects import *
if is_speech_available() and is_sentencepiece_available():
from .models.speech_to_text import Speech2TextProcessor
else:
from .utils.dummy_sentencepiece_and_speech_objects import *
if is_vision_available():
from .image_utils import ImageFeatureExtractionMixin
from .models.vit import ViTFeatureExtractor
else:
from .utils.dummy_vision_objects import *
# Modeling
if is_torch_available():
# Benchmarks
from .benchmark.benchmark import PyTorchBenchmark
from .benchmark.benchmark_args import PyTorchBenchmarkArguments
from .data.data_collator import (
DataCollator,
DataCollatorForLanguageModeling,
DataCollatorForPermutationLanguageModeling,
DataCollatorForSeq2Seq,
DataCollatorForSOP,
DataCollatorForTokenClassification,
DataCollatorForWholeWordMask,
DataCollatorWithPadding,
default_data_collator,
)
from .data.datasets import (
GlueDataset,
GlueDataTrainingArguments,
LineByLineTextDataset,
LineByLineWithRefDataset,
LineByLineWithSOPTextDataset,
SquadDataset,
SquadDataTrainingArguments,
TextDataset,
TextDatasetForNextSentencePrediction,
)
from .generation_beam_search import BeamScorer, BeamSearchScorer
from .generation_logits_process import (
ForcedBOSTokenLogitsProcessor,
ForcedEOSTokenLogitsProcessor,
HammingDiversityLogitsProcessor,
InfNanRemoveLogitsProcessor,
LogitsProcessor,
LogitsProcessorList,
LogitsWarper,
MinLengthLogitsProcessor,
NoBadWordsLogitsProcessor,
NoRepeatNGramLogitsProcessor,
PrefixConstrainedLogitsProcessor,
RepetitionPenaltyLogitsProcessor,
TemperatureLogitsWarper,
TopKLogitsWarper,
TopPLogitsWarper,
)
from .generation_stopping_criteria import (
MaxLengthCriteria,
MaxTimeCriteria,
StoppingCriteria,
StoppingCriteriaList,
)
from .generation_utils import top_k_top_p_filtering
from .modeling_utils import Conv1D, PreTrainedModel, apply_chunking_to_forward, prune_layer
from .models.albert import (
ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
AlbertForMaskedLM,
AlbertForMultipleChoice,
AlbertForPreTraining,
AlbertForQuestionAnswering,
AlbertForSequenceClassification,
AlbertForTokenClassification,
AlbertModel,
AlbertPreTrainedModel,
load_tf_weights_in_albert,
)
from .models.auto import (
MODEL_FOR_CAUSAL_LM_MAPPING,
MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING,
MODEL_FOR_MASKED_LM_MAPPING,
MODEL_FOR_MULTIPLE_CHOICE_MAPPING,
MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING,
MODEL_FOR_PRETRAINING_MAPPING,
MODEL_FOR_QUESTION_ANSWERING_MAPPING,
MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING,
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING,
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING,
MODEL_MAPPING,
MODEL_WITH_LM_HEAD_MAPPING,
AutoModel,
AutoModelForCausalLM,
AutoModelForMaskedLM,
AutoModelForMultipleChoice,
AutoModelForNextSentencePrediction,
AutoModelForPreTraining,
AutoModelForQuestionAnswering,
AutoModelForSeq2SeqLM,
AutoModelForSequenceClassification,
AutoModelForTableQuestionAnswering,
AutoModelForTokenClassification,
AutoModelWithLMHead,
)
from .models.bart import (
BART_PRETRAINED_MODEL_ARCHIVE_LIST,
BartForCausalLM,
BartForConditionalGeneration,
BartForQuestionAnswering,
BartForSequenceClassification,
BartModel,
BartPretrainedModel,
PretrainedBartModel,
)
from .models.bert import (
BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
BertForMaskedLM,
BertForMultipleChoice,
BertForNextSentencePrediction,
BertForPreTraining,
BertForQuestionAnswering,
BertForSequenceClassification,
BertForTokenClassification,
BertLayer,
BertLMHeadModel,
BertModel,
BertPreTrainedModel,
load_tf_weights_in_bert,
)
from .models.bert_generation import (
BertGenerationDecoder,
BertGenerationEncoder,
load_tf_weights_in_bert_generation,
)
from .models.big_bird import (
BIG_BIRD_PRETRAINED_MODEL_ARCHIVE_LIST,
BigBirdForCausalLM,
BigBirdForMaskedLM,
BigBirdForMultipleChoice,
BigBirdForPreTraining,
BigBirdForQuestionAnswering,
BigBirdForSequenceClassification,
BigBirdForTokenClassification,
BigBirdLayer,
BigBirdModel,
BigBirdPreTrainedModel,
load_tf_weights_in_big_bird,
)
from .models.blenderbot import (
BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST,
BlenderbotForCausalLM,
BlenderbotForConditionalGeneration,
BlenderbotModel,
)
from .models.blenderbot_small import (
BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST,
BlenderbotSmallForCausalLM,
BlenderbotSmallForConditionalGeneration,
BlenderbotSmallModel,
)
from .models.camembert import (
CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
CamembertForCausalLM,
CamembertForMaskedLM,
CamembertForMultipleChoice,
CamembertForQuestionAnswering,
CamembertForSequenceClassification,
CamembertForTokenClassification,
CamembertModel,
)
from .models.convbert import (
CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
ConvBertForMaskedLM,
ConvBertForMultipleChoice,
ConvBertForQuestionAnswering,
ConvBertForSequenceClassification,
ConvBertForTokenClassification,
ConvBertLayer,
ConvBertModel,
ConvBertPreTrainedModel,
load_tf_weights_in_convbert,
)
from .models.ctrl import (
CTRL_PRETRAINED_MODEL_ARCHIVE_LIST,
CTRLForSequenceClassification,
CTRLLMHeadModel,
CTRLModel,
CTRLPreTrainedModel,
)
from .models.deberta import (
DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
DebertaForMaskedLM,
DebertaForQuestionAnswering,
DebertaForSequenceClassification,
DebertaForTokenClassification,
DebertaModel,
DebertaPreTrainedModel,
)
from .models.deberta_v2 import (
DEBERTA_V2_PRETRAINED_MODEL_ARCHIVE_LIST,
DebertaV2ForMaskedLM,
DebertaV2ForQuestionAnswering,
DebertaV2ForSequenceClassification,
DebertaV2ForTokenClassification,
DebertaV2Model,
DebertaV2PreTrainedModel,
)
from .models.distilbert import (
DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
DistilBertForMaskedLM,
DistilBertForMultipleChoice,
DistilBertForQuestionAnswering,
DistilBertForSequenceClassification,
DistilBertForTokenClassification,
DistilBertModel,
DistilBertPreTrainedModel,
)
from .models.dpr import (
DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST,
DPRContextEncoder,
DPRPretrainedContextEncoder,
DPRPretrainedQuestionEncoder,
DPRPretrainedReader,
DPRQuestionEncoder,
DPRReader,
)
from .models.electra import (
ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST,
ElectraForMaskedLM,
ElectraForMultipleChoice,
ElectraForPreTraining,
ElectraForQuestionAnswering,
ElectraForSequenceClassification,
ElectraForTokenClassification,
ElectraModel,
ElectraPreTrainedModel,
load_tf_weights_in_electra,
)
from .models.encoder_decoder import EncoderDecoderModel
from .models.flaubert import (
FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
FlaubertForMultipleChoice,
FlaubertForQuestionAnswering,
FlaubertForQuestionAnsweringSimple,
FlaubertForSequenceClassification,
FlaubertForTokenClassification,
FlaubertModel,
FlaubertWithLMHeadModel,
)
from .models.fsmt import FSMTForConditionalGeneration, FSMTModel, PretrainedFSMTModel
from .models.funnel import (
FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST,
FunnelBaseModel,
FunnelForMaskedLM,
FunnelForMultipleChoice,
FunnelForPreTraining,
FunnelForQuestionAnswering,
FunnelForSequenceClassification,
FunnelForTokenClassification,
FunnelModel,
load_tf_weights_in_funnel,
)
from .models.gpt2 import (
GPT2_PRETRAINED_MODEL_ARCHIVE_LIST,
GPT2DoubleHeadsModel,
GPT2ForSequenceClassification,
GPT2LMHeadModel,
GPT2Model,
GPT2PreTrainedModel,
load_tf_weights_in_gpt2,
)
from .models.gpt_neo import (
GPT_NEO_PRETRAINED_MODEL_ARCHIVE_LIST,
GPTNeoForCausalLM,
GPTNeoModel,
GPTNeoPreTrainedModel,
load_tf_weights_in_gpt_neo,
)
from .models.ibert import (
IBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
IBertForMaskedLM,
IBertForMultipleChoice,
IBertForQuestionAnswering,
IBertForSequenceClassification,
IBertForTokenClassification,
IBertModel,
IBertPreTrainedModel,
)
from .models.layoutlm import (
LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST,
LayoutLMForMaskedLM,
LayoutLMForSequenceClassification,
LayoutLMForTokenClassification,
LayoutLMModel,
)
from .models.led import (
LED_PRETRAINED_MODEL_ARCHIVE_LIST,
LEDForConditionalGeneration,
LEDForQuestionAnswering,
LEDForSequenceClassification,
LEDModel,
)
from .models.longformer import (
LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
LongformerForMaskedLM,
LongformerForMultipleChoice,
LongformerForQuestionAnswering,
LongformerForSequenceClassification,
LongformerForTokenClassification,
LongformerModel,
LongformerSelfAttention,
)
from .models.lxmert import (
LxmertEncoder,
LxmertForPreTraining,
LxmertForQuestionAnswering,
LxmertModel,
LxmertPreTrainedModel,
LxmertVisualFeatureEncoder,
LxmertXLayer,
)
from .models.m2m_100 import M2M_100_PRETRAINED_MODEL_ARCHIVE_LIST, M2M100ForConditionalGeneration, M2M100Model
from .models.marian import MarianForCausalLM, MarianModel, MarianMTModel
from .models.mbart import (
MBartForCausalLM,
MBartForConditionalGeneration,
MBartForQuestionAnswering,
MBartForSequenceClassification,
MBartModel,
)
from .models.megatron_bert import (
MEGATRON_BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
MegatronBertForCausalLM,
MegatronBertForMaskedLM,
MegatronBertForMultipleChoice,
MegatronBertForNextSentencePrediction,
MegatronBertForPreTraining,
MegatronBertForQuestionAnswering,
MegatronBertForSequenceClassification,
MegatronBertForTokenClassification,
MegatronBertModel,
)
from .models.mmbt import MMBTForClassification, MMBTModel, ModalEmbeddings
from .models.mobilebert import (
MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
MobileBertForMaskedLM,
MobileBertForMultipleChoice,
MobileBertForNextSentencePrediction,
MobileBertForPreTraining,
MobileBertForQuestionAnswering,
MobileBertForSequenceClassification,
MobileBertForTokenClassification,
MobileBertLayer,
MobileBertModel,
MobileBertPreTrainedModel,
load_tf_weights_in_mobilebert,
)
from .models.mpnet import (
MPNET_PRETRAINED_MODEL_ARCHIVE_LIST,
MPNetForMaskedLM,
MPNetForMultipleChoice,
MPNetForQuestionAnswering,
MPNetForSequenceClassification,
MPNetForTokenClassification,
MPNetLayer,
MPNetModel,
MPNetPreTrainedModel,
)
from .models.mt5 import MT5EncoderModel, MT5ForConditionalGeneration, MT5Model
from .models.openai import (
OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
OpenAIGPTDoubleHeadsModel,
OpenAIGPTForSequenceClassification,
OpenAIGPTLMHeadModel,
OpenAIGPTModel,
OpenAIGPTPreTrainedModel,
load_tf_weights_in_openai_gpt,
)
from .models.pegasus import PegasusForCausalLM, PegasusForConditionalGeneration, PegasusModel
from .models.prophetnet import (
PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST,
ProphetNetDecoder,
ProphetNetEncoder,
ProphetNetForCausalLM,
ProphetNetForConditionalGeneration,
ProphetNetModel,
ProphetNetPreTrainedModel,
)
from .models.rag import RagModel, RagSequenceForGeneration, RagTokenForGeneration
from .models.reformer import (
REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
ReformerAttention,
ReformerForMaskedLM,
ReformerForQuestionAnswering,
ReformerForSequenceClassification,
ReformerLayer,
ReformerModel,
ReformerModelWithLMHead,
)
from .models.retribert import RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST, RetriBertModel, RetriBertPreTrainedModel
from .models.roberta import (
ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
RobertaForCausalLM,
RobertaForMaskedLM,
RobertaForMultipleChoice,
RobertaForQuestionAnswering,
RobertaForSequenceClassification,
RobertaForTokenClassification,
RobertaModel,
)
from .models.speech_to_text import (
SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST,
Speech2TextForConditionalGeneration,
Speech2TextModel,
)
from .models.squeezebert import (
SQUEEZEBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
SqueezeBertForMaskedLM,
SqueezeBertForMultipleChoice,
SqueezeBertForQuestionAnswering,
SqueezeBertForSequenceClassification,
SqueezeBertForTokenClassification,
SqueezeBertModel,
SqueezeBertModule,
SqueezeBertPreTrainedModel,
)
from .models.t5 import (
T5_PRETRAINED_MODEL_ARCHIVE_LIST,
T5EncoderModel,
T5ForConditionalGeneration,
T5Model,
T5PreTrainedModel,
load_tf_weights_in_t5,
)
from .models.tapas import (
TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST,
TapasForMaskedLM,
TapasForQuestionAnswering,
TapasForSequenceClassification,
TapasModel,
)
from .models.transfo_xl import (
TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST,
AdaptiveEmbedding,
TransfoXLForSequenceClassification,
TransfoXLLMHeadModel,
TransfoXLModel,
TransfoXLPreTrainedModel,
load_tf_weights_in_transfo_xl,
)
from .models.vit import (
VIT_PRETRAINED_MODEL_ARCHIVE_LIST,
ViTForImageClassification,
ViTModel,
ViTPreTrainedModel,
)
from .models.wav2vec2 import (
WAV_2_VEC_2_PRETRAINED_MODEL_ARCHIVE_LIST,
Wav2Vec2ForCTC,
Wav2Vec2ForMaskedLM,
Wav2Vec2Model,
Wav2Vec2PreTrainedModel,
)
from .models.xlm import (
XLM_PRETRAINED_MODEL_ARCHIVE_LIST,
XLMForMultipleChoice,
XLMForQuestionAnswering,
XLMForQuestionAnsweringSimple,
XLMForSequenceClassification,
XLMForTokenClassification,
XLMModel,
XLMPreTrainedModel,
XLMWithLMHeadModel,
)
from .models.xlm_prophetnet import (
XLM_PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST,
XLMProphetNetDecoder,
XLMProphetNetEncoder,
XLMProphetNetForCausalLM,
XLMProphetNetForConditionalGeneration,
XLMProphetNetModel,
)
from .models.xlm_roberta import (
XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
XLMRobertaForCausalLM,
XLMRobertaForMaskedLM,
XLMRobertaForMultipleChoice,
XLMRobertaForQuestionAnswering,
XLMRobertaForSequenceClassification,
XLMRobertaForTokenClassification,
XLMRobertaModel,
)
from .models.xlnet import (
XLNET_PRETRAINED_MODEL_ARCHIVE_LIST,
XLNetForMultipleChoice,
XLNetForQuestionAnswering,
XLNetForQuestionAnsweringSimple,
XLNetForSequenceClassification,
XLNetForTokenClassification,
XLNetLMHeadModel,
XLNetModel,
XLNetPreTrainedModel,
load_tf_weights_in_xlnet,
)
# Optimization
from .optimization import (
Adafactor,
AdamW,
get_constant_schedule,
get_constant_schedule_with_warmup,
get_cosine_schedule_with_warmup,
get_cosine_with_hard_restarts_schedule_with_warmup,
get_linear_schedule_with_warmup,
get_polynomial_decay_schedule_with_warmup,
get_scheduler,
)
# Trainer
from .trainer import Trainer
from .trainer_pt_utils import torch_distributed_zero_first
from .trainer_seq2seq import Seq2SeqTrainer
else:
from .utils.dummy_pt_objects import *
# TensorFlow
if is_tf_available():
from .benchmark.benchmark_args_tf import TensorFlowBenchmarkArguments
# Benchmarks
from .benchmark.benchmark_tf import TensorFlowBenchmark
from .generation_tf_utils import tf_top_k_top_p_filtering
from .modeling_tf_layoutlm import (
TF_LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST,
TFLayoutLMForMaskedLM,
TFLayoutLMForSequenceClassification,
TFLayoutLMForTokenClassification,
TFLayoutLMMainLayer,
TFLayoutLMModel,
TFLayoutLMPreTrainedModel,
)
from .modeling_tf_utils import TFPreTrainedModel, TFSequenceSummary, TFSharedEmbeddings, shape_list
from .models.albert import (
TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFAlbertForMaskedLM,
TFAlbertForMultipleChoice,
TFAlbertForPreTraining,
TFAlbertForQuestionAnswering,
TFAlbertForSequenceClassification,
TFAlbertForTokenClassification,
TFAlbertMainLayer,
TFAlbertModel,
TFAlbertPreTrainedModel,
)
from .models.auto import (
TF_MODEL_FOR_CAUSAL_LM_MAPPING,
TF_MODEL_FOR_MASKED_LM_MAPPING,
TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING,
TF_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING,
TF_MODEL_FOR_PRETRAINING_MAPPING,
TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING,
TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING,
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING,
TF_MODEL_MAPPING,
TF_MODEL_WITH_LM_HEAD_MAPPING,
TFAutoModel,
TFAutoModelForCausalLM,
TFAutoModelForMaskedLM,
TFAutoModelForMultipleChoice,
TFAutoModelForPreTraining,
TFAutoModelForQuestionAnswering,
TFAutoModelForSeq2SeqLM,
TFAutoModelForSequenceClassification,
TFAutoModelForTokenClassification,
TFAutoModelWithLMHead,
)
from .models.bart import TFBartForConditionalGeneration, TFBartModel, TFBartPretrainedModel
from .models.bert import (
TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFBertEmbeddings,
TFBertForMaskedLM,
TFBertForMultipleChoice,
TFBertForNextSentencePrediction,
TFBertForPreTraining,
TFBertForQuestionAnswering,
TFBertForSequenceClassification,
TFBertForTokenClassification,
TFBertLMHeadModel,
TFBertMainLayer,
TFBertModel,
TFBertPreTrainedModel,
)
from .models.blenderbot import TFBlenderbotForConditionalGeneration, TFBlenderbotModel
from .models.blenderbot_small import TFBlenderbotSmallForConditionalGeneration, TFBlenderbotSmallModel
from .models.camembert import (
TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFCamembertForMaskedLM,
TFCamembertForMultipleChoice,
TFCamembertForQuestionAnswering,
TFCamembertForSequenceClassification,
TFCamembertForTokenClassification,
TFCamembertModel,
)
from .models.convbert import (
TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFConvBertForMaskedLM,
TFConvBertForMultipleChoice,
TFConvBertForQuestionAnswering,
TFConvBertForSequenceClassification,
TFConvBertForTokenClassification,
TFConvBertLayer,
TFConvBertModel,
TFConvBertPreTrainedModel,
)
from .models.ctrl import (
TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST,
TFCTRLForSequenceClassification,
TFCTRLLMHeadModel,
TFCTRLModel,
TFCTRLPreTrainedModel,
)
from .models.distilbert import (
TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFDistilBertForMaskedLM,
TFDistilBertForMultipleChoice,
TFDistilBertForQuestionAnswering,
TFDistilBertForSequenceClassification,
TFDistilBertForTokenClassification,
TFDistilBertMainLayer,
TFDistilBertModel,
TFDistilBertPreTrainedModel,
)
from .models.dpr import (
TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
TF_DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
TF_DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST,
TFDPRContextEncoder,
TFDPRPretrainedContextEncoder,
TFDPRPretrainedQuestionEncoder,
TFDPRPretrainedReader,
TFDPRQuestionEncoder,
TFDPRReader,
)
from .models.electra import (
TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST,
TFElectraForMaskedLM,
TFElectraForMultipleChoice,
TFElectraForPreTraining,
TFElectraForQuestionAnswering,
TFElectraForSequenceClassification,
TFElectraForTokenClassification,
TFElectraModel,
TFElectraPreTrainedModel,
)
from .models.flaubert import (
TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFFlaubertForMultipleChoice,
TFFlaubertForQuestionAnsweringSimple,
TFFlaubertForSequenceClassification,
TFFlaubertForTokenClassification,
TFFlaubertModel,
TFFlaubertWithLMHeadModel,
)
from .models.funnel import (
TF_FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST,
TFFunnelBaseModel,
TFFunnelForMaskedLM,
TFFunnelForMultipleChoice,
TFFunnelForPreTraining,
TFFunnelForQuestionAnswering,
TFFunnelForSequenceClassification,
TFFunnelForTokenClassification,
TFFunnelModel,
)
from .models.gpt2 import (
TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST,
TFGPT2DoubleHeadsModel,
TFGPT2ForSequenceClassification,
TFGPT2LMHeadModel,
TFGPT2MainLayer,
TFGPT2Model,
TFGPT2PreTrainedModel,
)
from .models.led import TFLEDForConditionalGeneration, TFLEDModel, TFLEDPreTrainedModel
from .models.longformer import (
TF_LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
TFLongformerForMaskedLM,
TFLongformerForMultipleChoice,
TFLongformerForQuestionAnswering,
TFLongformerForSequenceClassification,
TFLongformerForTokenClassification,
TFLongformerModel,
TFLongformerSelfAttention,
)
from .models.lxmert import (
TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFLxmertForPreTraining,
TFLxmertMainLayer,
TFLxmertModel,
TFLxmertPreTrainedModel,
TFLxmertVisualFeatureEncoder,
)
from .models.marian import TFMarianModel, TFMarianMTModel
from .models.mbart import TFMBartForConditionalGeneration, TFMBartModel
from .models.mobilebert import (
TF_MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFMobileBertForMaskedLM,
TFMobileBertForMultipleChoice,
TFMobileBertForNextSentencePrediction,
TFMobileBertForPreTraining,
TFMobileBertForQuestionAnswering,
TFMobileBertForSequenceClassification,
TFMobileBertForTokenClassification,
TFMobileBertMainLayer,
TFMobileBertModel,
TFMobileBertPreTrainedModel,
)
from .models.mpnet import (
TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST,
TFMPNetForMaskedLM,
TFMPNetForMultipleChoice,
TFMPNetForQuestionAnswering,
TFMPNetForSequenceClassification,
TFMPNetForTokenClassification,
TFMPNetMainLayer,
TFMPNetModel,
TFMPNetPreTrainedModel,
)
from .models.mt5 import TFMT5EncoderModel, TFMT5ForConditionalGeneration, TFMT5Model
from .models.openai import (
TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFOpenAIGPTDoubleHeadsModel,
TFOpenAIGPTForSequenceClassification,
TFOpenAIGPTLMHeadModel,
TFOpenAIGPTMainLayer,
TFOpenAIGPTModel,
TFOpenAIGPTPreTrainedModel,
)
from .models.pegasus import TFPegasusForConditionalGeneration, TFPegasusModel
from .models.rag import TFRagModel, TFRagSequenceForGeneration, TFRagTokenForGeneration
from .models.roberta import (
TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
TFRobertaForMaskedLM,
TFRobertaForMultipleChoice,
TFRobertaForQuestionAnswering,
TFRobertaForSequenceClassification,
TFRobertaForTokenClassification,
TFRobertaMainLayer,
TFRobertaModel,
TFRobertaPreTrainedModel,
)
from .models.t5 import (
TF_T5_PRETRAINED_MODEL_ARCHIVE_LIST,
TFT5EncoderModel,
TFT5ForConditionalGeneration,
TFT5Model,
TFT5PreTrainedModel,
)
from .models.transfo_xl import (
TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST,
TFAdaptiveEmbedding,
TFTransfoXLForSequenceClassification,
TFTransfoXLLMHeadModel,
TFTransfoXLMainLayer,
TFTransfoXLModel,
TFTransfoXLPreTrainedModel,
)
from .models.xlm import (
TF_XLM_PRETRAINED_MODEL_ARCHIVE_LIST,
TFXLMForMultipleChoice,
TFXLMForQuestionAnsweringSimple,
TFXLMForSequenceClassification,
TFXLMForTokenClassification,
TFXLMMainLayer,
TFXLMModel,
TFXLMPreTrainedModel,
TFXLMWithLMHeadModel,
)
from .models.xlm_roberta import (
TF_XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
TFXLMRobertaForMaskedLM,
TFXLMRobertaForMultipleChoice,
TFXLMRobertaForQuestionAnswering,
TFXLMRobertaForSequenceClassification,
TFXLMRobertaForTokenClassification,
TFXLMRobertaModel,
)
from .models.xlnet import (
TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST,
TFXLNetForMultipleChoice,
TFXLNetForQuestionAnsweringSimple,
TFXLNetForSequenceClassification,
TFXLNetForTokenClassification,
TFXLNetLMHeadModel,
TFXLNetMainLayer,
TFXLNetModel,
TFXLNetPreTrainedModel,
)
# Optimization
from .optimization_tf import AdamWeightDecay, GradientAccumulator, WarmUp, create_optimizer
# Trainer
from .trainer_tf import TFTrainer
else:
# Import the same objects as dummies to get them in the namespace.
# They will raise an import error if the user tries to instantiate / use them.
from .utils.dummy_tf_objects import *
if is_flax_available():
from .modeling_flax_utils import FlaxPreTrainedModel
from .models.auto import (
FLAX_MODEL_FOR_MASKED_LM_MAPPING,
FLAX_MODEL_FOR_MULTIPLE_CHOICE_MAPPING,
FLAX_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING,
FLAX_MODEL_FOR_PRETRAINING_MAPPING,
FLAX_MODEL_FOR_QUESTION_ANSWERING_MAPPING,
FLAX_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
FLAX_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING,
FLAX_MODEL_MAPPING,
FlaxAutoModel,
FlaxAutoModelForMaskedLM,
FlaxAutoModelForMultipleChoice,
FlaxAutoModelForNextSentencePrediction,
FlaxAutoModelForPreTraining,
FlaxAutoModelForQuestionAnswering,
FlaxAutoModelForSequenceClassification,
FlaxAutoModelForTokenClassification,
)
from .models.bert import (
FlaxBertForMaskedLM,
FlaxBertForMultipleChoice,
FlaxBertForNextSentencePrediction,
FlaxBertForPreTraining,
FlaxBertForQuestionAnswering,
FlaxBertForSequenceClassification,
FlaxBertForTokenClassification,
FlaxBertModel,
FlaxBertPreTrainedModel,
)
from .models.roberta import FlaxRobertaModel
else:
# Import the same objects as dummies to get them in the namespace.
# They will raise an import error if the user tries to instantiate / use them.
from .utils.dummy_flax_objects import *
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
def __getattr__(self, name: str):
# Special handling for the version, which is a constant from this module and not imported in a submodule.
if name == "__version__":
return __version__
return super().__getattr__(name)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
if not is_tf_available() and not is_torch_available() and not is_flax_available():
logger.warning(
"None of PyTorch, TensorFlow >= 2.0, or Flax have been found. "
"Models won't be available and only tokenizers, configuration "
"and file/data utilities can be used."
)
| 38.540467 | 119 | 0.66243 |
# module, but to preserve other warnings. So, don't check this module at all.
__version__ = "4.6.0.dev0"
# default Python logging output behavior when present.
# see: https://github.com/abseil/abseil-py/issues/99
# and: https://github.com/tensorflow/tensorflow/issues/26691#issuecomment-500369493
try:
import absl.logging
except ImportError:
pass
else:
absl.logging.set_verbosity("info")
absl.logging.set_stderrthreshold("info")
absl.logging._warn_preinit_stderr = False
from typing import TYPE_CHECKING
# Check the dependencies satisfy the minimal versions required.
from . import dependency_versions_check
from .file_utils import (
_BaseLazyModule,
is_flax_available,
is_sentencepiece_available,
is_speech_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
is_vision_available,
)
from .utils import logging
logger = logging.get_logger(__name__) # pylint: disable=invalid-name
# Base objects, independent of any specific backend
_import_structure = {
"configuration_utils": ["PretrainedConfig"],
"data": [
"DataProcessor",
"InputExample",
"InputFeatures",
"SingleSentenceClassificationProcessor",
"SquadExample",
"SquadFeatures",
"SquadV1Processor",
"SquadV2Processor",
"glue_compute_metrics",
"glue_convert_examples_to_features",
"glue_output_modes",
"glue_processors",
"glue_tasks_num_labels",
"squad_convert_examples_to_features",
"xnli_compute_metrics",
"xnli_output_modes",
"xnli_processors",
"xnli_tasks_num_labels",
],
"feature_extraction_sequence_utils": ["BatchFeature", "SequenceFeatureExtractor"],
"file_utils": [
"CONFIG_NAME",
"MODEL_CARD_NAME",
"PYTORCH_PRETRAINED_BERT_CACHE",
"PYTORCH_TRANSFORMERS_CACHE",
"SPIECE_UNDERLINE",
"TF2_WEIGHTS_NAME",
"TF_WEIGHTS_NAME",
"TRANSFORMERS_CACHE",
"WEIGHTS_NAME",
"TensorType",
"add_end_docstrings",
"add_start_docstrings",
"cached_path",
"is_apex_available",
"is_datasets_available",
"is_faiss_available",
"is_flax_available",
"is_psutil_available",
"is_py3nvml_available",
"is_sentencepiece_available",
"is_sklearn_available",
"is_speech_available",
"is_tf_available",
"is_tokenizers_available",
"is_torch_available",
"is_torch_tpu_available",
"is_vision_available",
],
"hf_argparser": ["HfArgumentParser"],
"integrations": [
"is_comet_available",
"is_optuna_available",
"is_ray_available",
"is_ray_tune_available",
"is_tensorboard_available",
"is_wandb_available",
],
"modelcard": ["ModelCard"],
"modeling_tf_pytorch_utils": [
"convert_tf_weight_name_to_pt_weight_name",
"load_pytorch_checkpoint_in_tf2_model",
"load_pytorch_model_in_tf2_model",
"load_pytorch_weights_in_tf2_model",
"load_tf2_checkpoint_in_pytorch_model",
"load_tf2_model_in_pytorch_model",
"load_tf2_weights_in_pytorch_model",
],
# Models
"models": [],
"models.albert": ["ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "AlbertConfig"],
"models.auto": [
"ALL_PRETRAINED_CONFIG_ARCHIVE_MAP",
"CONFIG_MAPPING",
"FEATURE_EXTRACTOR_MAPPING",
"MODEL_NAMES_MAPPING",
"TOKENIZER_MAPPING",
"AutoConfig",
"AutoFeatureExtractor",
"AutoTokenizer",
],
"models.bart": ["BartConfig", "BartTokenizer"],
"models.barthez": [],
"models.bert": [
"BERT_PRETRAINED_CONFIG_ARCHIVE_MAP",
"BasicTokenizer",
"BertConfig",
"BertTokenizer",
"WordpieceTokenizer",
],
"models.bert_generation": ["BertGenerationConfig"],
"models.bert_japanese": ["BertJapaneseTokenizer", "CharacterTokenizer", "MecabTokenizer"],
"models.bertweet": ["BertweetTokenizer"],
"models.big_bird": ["BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP", "BigBirdConfig", "BigBirdTokenizer"],
"models.blenderbot": ["BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BlenderbotConfig", "BlenderbotTokenizer"],
"models.blenderbot_small": [
"BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP",
"BlenderbotSmallConfig",
"BlenderbotSmallTokenizer",
],
"models.camembert": ["CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "CamembertConfig"],
"models.convbert": ["CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "ConvBertConfig", "ConvBertTokenizer"],
"models.cpm": ["CpmTokenizer"],
"models.ctrl": ["CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP", "CTRLConfig", "CTRLTokenizer"],
"models.deberta": ["DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP", "DebertaConfig", "DebertaTokenizer"],
"models.deberta_v2": ["DEBERTA_V2_PRETRAINED_CONFIG_ARCHIVE_MAP", "DebertaV2Config"],
"models.distilbert": ["DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "DistilBertConfig", "DistilBertTokenizer"],
"models.dpr": [
"DPR_PRETRAINED_CONFIG_ARCHIVE_MAP",
"DPRConfig",
"DPRContextEncoderTokenizer",
"DPRQuestionEncoderTokenizer",
"DPRReaderOutput",
"DPRReaderTokenizer",
],
"models.electra": ["ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP", "ElectraConfig", "ElectraTokenizer"],
"models.encoder_decoder": ["EncoderDecoderConfig"],
"models.flaubert": ["FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "FlaubertConfig", "FlaubertTokenizer"],
"models.fsmt": ["FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP", "FSMTConfig", "FSMTTokenizer"],
"models.funnel": ["FUNNEL_PRETRAINED_CONFIG_ARCHIVE_MAP", "FunnelConfig", "FunnelTokenizer"],
"models.gpt2": ["GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP", "GPT2Config", "GPT2Tokenizer"],
"models.gpt_neo": ["GPT_NEO_PRETRAINED_CONFIG_ARCHIVE_MAP", "GPTNeoConfig"],
"models.herbert": ["HerbertTokenizer"],
"models.ibert": ["IBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "IBertConfig"],
"models.layoutlm": ["LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP", "LayoutLMConfig", "LayoutLMTokenizer"],
"models.led": ["LED_PRETRAINED_CONFIG_ARCHIVE_MAP", "LEDConfig", "LEDTokenizer"],
"models.longformer": ["LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "LongformerConfig", "LongformerTokenizer"],
"models.lxmert": ["LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "LxmertConfig", "LxmertTokenizer"],
"models.m2m_100": ["M2M_100_PRETRAINED_CONFIG_ARCHIVE_MAP", "M2M100Config"],
"models.marian": ["MarianConfig"],
"models.mbart": ["MBartConfig"],
"models.megatron_bert": ["MEGATRON_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "MegatronBertConfig"],
"models.mmbt": ["MMBTConfig"],
"models.mobilebert": ["MOBILEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "MobileBertConfig", "MobileBertTokenizer"],
"models.mpnet": ["MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "MPNetConfig", "MPNetTokenizer"],
"models.mt5": ["MT5Config"],
"models.openai": ["OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP", "OpenAIGPTConfig", "OpenAIGPTTokenizer"],
"models.pegasus": ["PegasusConfig"],
"models.phobert": ["PhobertTokenizer"],
"models.prophetnet": ["PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "ProphetNetConfig", "ProphetNetTokenizer"],
"models.rag": ["RagConfig", "RagRetriever", "RagTokenizer"],
"models.reformer": ["REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "ReformerConfig"],
"models.retribert": ["RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "RetriBertConfig", "RetriBertTokenizer"],
"models.roberta": ["ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP", "RobertaConfig", "RobertaTokenizer"],
"models.speech_to_text": [
"SPEECH_TO_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP",
"Speech2TextConfig",
],
"models.squeezebert": ["SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "SqueezeBertConfig", "SqueezeBertTokenizer"],
"models.t5": ["T5_PRETRAINED_CONFIG_ARCHIVE_MAP", "T5Config"],
"models.tapas": ["TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP", "TapasConfig", "TapasTokenizer"],
"models.transfo_xl": [
"TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP",
"TransfoXLConfig",
"TransfoXLCorpus",
"TransfoXLTokenizer",
],
"models.vit": ["VIT_PRETRAINED_CONFIG_ARCHIVE_MAP", "ViTConfig"],
"models.wav2vec2": [
"WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP",
"Wav2Vec2Config",
"Wav2Vec2CTCTokenizer",
"Wav2Vec2FeatureExtractor",
"Wav2Vec2Processor",
"Wav2Vec2Tokenizer",
],
"models.xlm": ["XLM_PRETRAINED_CONFIG_ARCHIVE_MAP", "XLMConfig", "XLMTokenizer"],
"models.xlm_prophetnet": ["XLM_PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "XLMProphetNetConfig"],
"models.xlm_roberta": ["XLM_ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP", "XLMRobertaConfig"],
"models.xlnet": ["XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "XLNetConfig"],
"pipelines": [
"Conversation",
"ConversationalPipeline",
"CsvPipelineDataFormat",
"FeatureExtractionPipeline",
"FillMaskPipeline",
"JsonPipelineDataFormat",
"NerPipeline",
"PipedPipelineDataFormat",
"Pipeline",
"PipelineDataFormat",
"QuestionAnsweringPipeline",
"SummarizationPipeline",
"TableQuestionAnsweringPipeline",
"Text2TextGenerationPipeline",
"TextClassificationPipeline",
"TextGenerationPipeline",
"TokenClassificationPipeline",
"TranslationPipeline",
"ZeroShotClassificationPipeline",
"pipeline",
],
"tokenization_utils": ["PreTrainedTokenizer"],
"tokenization_utils_base": [
"AddedToken",
"BatchEncoding",
"CharSpan",
"PreTrainedTokenizerBase",
"SpecialTokensMixin",
"TokenSpan",
],
"trainer_callback": [
"DefaultFlowCallback",
"EarlyStoppingCallback",
"PrinterCallback",
"ProgressCallback",
"TrainerCallback",
"TrainerControl",
"TrainerState",
],
"trainer_utils": ["EvalPrediction", "IntervalStrategy", "SchedulerType", "set_seed"],
"training_args": ["TrainingArguments"],
"training_args_seq2seq": ["Seq2SeqTrainingArguments"],
"training_args_tf": ["TFTrainingArguments"],
"utils": ["logging"],
}
# sentencepiece-backed objects
if is_sentencepiece_available():
_import_structure["models.albert"].append("AlbertTokenizer")
_import_structure["models.barthez"].append("BarthezTokenizer")
_import_structure["models.bert_generation"].append("BertGenerationTokenizer")
_import_structure["models.camembert"].append("CamembertTokenizer")
_import_structure["models.deberta_v2"].append("DebertaV2Tokenizer")
_import_structure["models.m2m_100"].append("M2M100Tokenizer")
_import_structure["models.marian"].append("MarianTokenizer")
_import_structure["models.mbart"].append("MBartTokenizer")
_import_structure["models.mbart"].append("MBart50Tokenizer")
_import_structure["models.mt5"].append("MT5Tokenizer")
_import_structure["models.pegasus"].append("PegasusTokenizer")
_import_structure["models.reformer"].append("ReformerTokenizer")
_import_structure["models.speech_to_text"].append("Speech2TextTokenizer")
_import_structure["models.t5"].append("T5Tokenizer")
_import_structure["models.xlm_prophetnet"].append("XLMProphetNetTokenizer")
_import_structure["models.xlm_roberta"].append("XLMRobertaTokenizer")
_import_structure["models.xlnet"].append("XLNetTokenizer")
else:
from .utils import dummy_sentencepiece_objects
_import_structure["utils.dummy_sentencepiece_objects"] = [
name for name in dir(dummy_sentencepiece_objects) if not name.startswith("_")
]
# tokenizers-backed objects
if is_tokenizers_available():
# Fast tokenizers
_import_structure["models.convbert"].append("ConvBertTokenizerFast")
_import_structure["models.albert"].append("AlbertTokenizerFast")
_import_structure["models.bart"].append("BartTokenizerFast")
_import_structure["models.barthez"].append("BarthezTokenizerFast")
_import_structure["models.bert"].append("BertTokenizerFast")
_import_structure["models.camembert"].append("CamembertTokenizerFast")
_import_structure["models.distilbert"].append("DistilBertTokenizerFast")
_import_structure["models.dpr"].extend(
["DPRContextEncoderTokenizerFast", "DPRQuestionEncoderTokenizerFast", "DPRReaderTokenizerFast"]
)
_import_structure["models.electra"].append("ElectraTokenizerFast")
_import_structure["models.funnel"].append("FunnelTokenizerFast")
_import_structure["models.gpt2"].append("GPT2TokenizerFast")
_import_structure["models.herbert"].append("HerbertTokenizerFast")
_import_structure["models.layoutlm"].append("LayoutLMTokenizerFast")
_import_structure["models.led"].append("LEDTokenizerFast")
_import_structure["models.longformer"].append("LongformerTokenizerFast")
_import_structure["models.lxmert"].append("LxmertTokenizerFast")
_import_structure["models.mbart"].append("MBartTokenizerFast")
_import_structure["models.mbart"].append("MBart50TokenizerFast")
_import_structure["models.mobilebert"].append("MobileBertTokenizerFast")
_import_structure["models.mpnet"].append("MPNetTokenizerFast")
_import_structure["models.mt5"].append("MT5TokenizerFast")
_import_structure["models.openai"].append("OpenAIGPTTokenizerFast")
_import_structure["models.pegasus"].append("PegasusTokenizerFast")
_import_structure["models.reformer"].append("ReformerTokenizerFast")
_import_structure["models.retribert"].append("RetriBertTokenizerFast")
_import_structure["models.roberta"].append("RobertaTokenizerFast")
_import_structure["models.squeezebert"].append("SqueezeBertTokenizerFast")
_import_structure["models.t5"].append("T5TokenizerFast")
_import_structure["models.xlm_roberta"].append("XLMRobertaTokenizerFast")
_import_structure["models.xlnet"].append("XLNetTokenizerFast")
_import_structure["tokenization_utils_fast"] = ["PreTrainedTokenizerFast"]
else:
from .utils import dummy_tokenizers_objects
_import_structure["utils.dummy_tokenizers_objects"] = [
name for name in dir(dummy_tokenizers_objects) if not name.startswith("_")
]
if is_sentencepiece_available() and is_tokenizers_available():
_import_structure["convert_slow_tokenizer"] = ["SLOW_TO_FAST_CONVERTERS", "convert_slow_tokenizer"]
else:
from .utils import dummy_sentencepiece_and_tokenizers_objects
_import_structure["utils.dummy_sentencepiece_and_tokenizers_objects"] = [
name for name in dir(dummy_sentencepiece_and_tokenizers_objects) if not name.startswith("_")
]
# Speech-specific objects
if is_speech_available():
_import_structure["models.speech_to_text"].append("Speech2TextFeatureExtractor")
else:
from .utils import dummy_speech_objects
_import_structure["utils.dummy_speech_objects"] = [
name for name in dir(dummy_speech_objects) if not name.startswith("_")
]
if is_sentencepiece_available() and is_speech_available():
_import_structure["models.speech_to_text"].append("Speech2TextProcessor")
else:
from .utils import dummy_sentencepiece_and_speech_objects
_import_structure["utils.dummy_sentencepiece_and_speech_objects"] = [
name for name in dir(dummy_sentencepiece_and_speech_objects) if not name.startswith("_")
]
# Vision-specific objects
if is_vision_available():
_import_structure["image_utils"] = ["ImageFeatureExtractionMixin"]
_import_structure["models.vit"].append("ViTFeatureExtractor")
else:
from .utils import dummy_vision_objects
_import_structure["utils.dummy_vision_objects"] = [
name for name in dir(dummy_vision_objects) if not name.startswith("_")
]
# PyTorch-backed objects
if is_torch_available():
_import_structure["benchmark.benchmark"] = ["PyTorchBenchmark"]
_import_structure["benchmark.benchmark_args"] = ["PyTorchBenchmarkArguments"]
_import_structure["data.data_collator"] = [
"DataCollator",
"DataCollatorForLanguageModeling",
"DataCollatorForPermutationLanguageModeling",
"DataCollatorForSeq2Seq",
"DataCollatorForSOP",
"DataCollatorForTokenClassification",
"DataCollatorForWholeWordMask",
"DataCollatorWithPadding",
"default_data_collator",
]
_import_structure["data.datasets"] = [
"GlueDataset",
"GlueDataTrainingArguments",
"LineByLineTextDataset",
"LineByLineWithRefDataset",
"LineByLineWithSOPTextDataset",
"SquadDataset",
"SquadDataTrainingArguments",
"TextDataset",
"TextDatasetForNextSentencePrediction",
]
_import_structure["generation_beam_search"] = ["BeamScorer", "BeamSearchScorer"]
_import_structure["generation_logits_process"] = [
"ForcedBOSTokenLogitsProcessor",
"ForcedEOSTokenLogitsProcessor",
"HammingDiversityLogitsProcessor",
"InfNanRemoveLogitsProcessor",
"LogitsProcessor",
"LogitsProcessorList",
"LogitsWarper",
"MinLengthLogitsProcessor",
"NoBadWordsLogitsProcessor",
"NoRepeatNGramLogitsProcessor",
"PrefixConstrainedLogitsProcessor",
"RepetitionPenaltyLogitsProcessor",
"TemperatureLogitsWarper",
"TopKLogitsWarper",
"TopPLogitsWarper",
]
_import_structure["generation_stopping_criteria"] = [
"MaxLengthCriteria",
"MaxTimeCriteria",
"StoppingCriteria",
"StoppingCriteriaList",
]
_import_structure["generation_utils"] = ["top_k_top_p_filtering"]
_import_structure["modeling_utils"] = ["Conv1D", "PreTrainedModel", "apply_chunking_to_forward", "prune_layer"]
# PyTorch models structure
_import_structure["models.albert"].extend(
[
"ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"AlbertForMaskedLM",
"AlbertForMultipleChoice",
"AlbertForPreTraining",
"AlbertForQuestionAnswering",
"AlbertForSequenceClassification",
"AlbertForTokenClassification",
"AlbertModel",
"AlbertPreTrainedModel",
"load_tf_weights_in_albert",
]
)
_import_structure["models.auto"].extend(
[
"MODEL_FOR_CAUSAL_LM_MAPPING",
"MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING",
"MODEL_FOR_MASKED_LM_MAPPING",
"MODEL_FOR_MULTIPLE_CHOICE_MAPPING",
"MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING",
"MODEL_FOR_PRETRAINING_MAPPING",
"MODEL_FOR_QUESTION_ANSWERING_MAPPING",
"MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING",
"MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING",
"MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING",
"MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING",
"MODEL_MAPPING",
"MODEL_WITH_LM_HEAD_MAPPING",
"AutoModel",
"AutoModelForCausalLM",
"AutoModelForMaskedLM",
"AutoModelForMultipleChoice",
"AutoModelForNextSentencePrediction",
"AutoModelForPreTraining",
"AutoModelForQuestionAnswering",
"AutoModelForSeq2SeqLM",
"AutoModelForSequenceClassification",
"AutoModelForTableQuestionAnswering",
"AutoModelForTokenClassification",
"AutoModelWithLMHead",
]
)
_import_structure["models.bart"].extend(
[
"BART_PRETRAINED_MODEL_ARCHIVE_LIST",
"BartForCausalLM",
"BartForConditionalGeneration",
"BartForQuestionAnswering",
"BartForSequenceClassification",
"BartModel",
"BartPretrainedModel",
"PretrainedBartModel",
]
)
_import_structure["models.bert"].extend(
[
"BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BertForMaskedLM",
"BertForMultipleChoice",
"BertForNextSentencePrediction",
"BertForPreTraining",
"BertForQuestionAnswering",
"BertForSequenceClassification",
"BertForTokenClassification",
"BertLayer",
"BertLMHeadModel",
"BertModel",
"BertPreTrainedModel",
"load_tf_weights_in_bert",
]
)
_import_structure["models.bert_generation"].extend(
[
"BertGenerationDecoder",
"BertGenerationEncoder",
"load_tf_weights_in_bert_generation",
]
)
_import_structure["models.big_bird"].extend(
[
"BIG_BIRD_PRETRAINED_MODEL_ARCHIVE_LIST",
"BigBirdForCausalLM",
"BigBirdForMaskedLM",
"BigBirdForMultipleChoice",
"BigBirdForPreTraining",
"BigBirdForQuestionAnswering",
"BigBirdForSequenceClassification",
"BigBirdForTokenClassification",
"BigBirdLayer",
"BigBirdModel",
"BigBirdPreTrainedModel",
"load_tf_weights_in_big_bird",
]
)
_import_structure["models.blenderbot"].extend(
[
"BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BlenderbotForCausalLM",
"BlenderbotForConditionalGeneration",
"BlenderbotModel",
]
)
_import_structure["models.blenderbot_small"].extend(
[
"BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST",
"BlenderbotSmallForCausalLM",
"BlenderbotSmallForConditionalGeneration",
"BlenderbotSmallModel",
]
)
_import_structure["models.camembert"].extend(
[
"CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"CamembertForCausalLM",
"CamembertForMaskedLM",
"CamembertForMultipleChoice",
"CamembertForQuestionAnswering",
"CamembertForSequenceClassification",
"CamembertForTokenClassification",
"CamembertModel",
]
)
_import_structure["models.convbert"].extend(
[
"CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"ConvBertForMaskedLM",
"ConvBertForMultipleChoice",
"ConvBertForQuestionAnswering",
"ConvBertForSequenceClassification",
"ConvBertForTokenClassification",
"ConvBertLayer",
"ConvBertModel",
"ConvBertPreTrainedModel",
"load_tf_weights_in_convbert",
]
)
_import_structure["models.ctrl"].extend(
[
"CTRL_PRETRAINED_MODEL_ARCHIVE_LIST",
"CTRLForSequenceClassification",
"CTRLLMHeadModel",
"CTRLModel",
"CTRLPreTrainedModel",
]
)
_import_structure["models.deberta"].extend(
[
"DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"DebertaForMaskedLM",
"DebertaForQuestionAnswering",
"DebertaForSequenceClassification",
"DebertaForTokenClassification",
"DebertaModel",
"DebertaPreTrainedModel",
]
)
_import_structure["models.deberta_v2"].extend(
[
"DEBERTA_V2_PRETRAINED_MODEL_ARCHIVE_LIST",
"DebertaV2ForMaskedLM",
"DebertaV2ForQuestionAnswering",
"DebertaV2ForSequenceClassification",
"DebertaV2ForTokenClassification",
"DebertaV2Model",
"DebertaV2PreTrainedModel",
]
)
_import_structure["models.distilbert"].extend(
[
"DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"DistilBertForMaskedLM",
"DistilBertForMultipleChoice",
"DistilBertForQuestionAnswering",
"DistilBertForSequenceClassification",
"DistilBertForTokenClassification",
"DistilBertModel",
"DistilBertPreTrainedModel",
]
)
_import_structure["models.dpr"].extend(
[
"DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST",
"DPRContextEncoder",
"DPRPretrainedContextEncoder",
"DPRPretrainedQuestionEncoder",
"DPRPretrainedReader",
"DPRQuestionEncoder",
"DPRReader",
]
)
_import_structure["models.electra"].extend(
[
"ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST",
"ElectraForMaskedLM",
"ElectraForMultipleChoice",
"ElectraForPreTraining",
"ElectraForQuestionAnswering",
"ElectraForSequenceClassification",
"ElectraForTokenClassification",
"ElectraModel",
"ElectraPreTrainedModel",
"load_tf_weights_in_electra",
]
)
_import_structure["models.encoder_decoder"].append("EncoderDecoderModel")
_import_structure["models.flaubert"].extend(
[
"FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"FlaubertForMultipleChoice",
"FlaubertForQuestionAnswering",
"FlaubertForQuestionAnsweringSimple",
"FlaubertForSequenceClassification",
"FlaubertForTokenClassification",
"FlaubertModel",
"FlaubertWithLMHeadModel",
]
)
_import_structure["models.fsmt"].extend(["FSMTForConditionalGeneration", "FSMTModel", "PretrainedFSMTModel"])
_import_structure["models.funnel"].extend(
[
"FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST",
"FunnelBaseModel",
"FunnelForMaskedLM",
"FunnelForMultipleChoice",
"FunnelForPreTraining",
"FunnelForQuestionAnswering",
"FunnelForSequenceClassification",
"FunnelForTokenClassification",
"FunnelModel",
"load_tf_weights_in_funnel",
]
)
_import_structure["models.gpt2"].extend(
[
"GPT2_PRETRAINED_MODEL_ARCHIVE_LIST",
"GPT2DoubleHeadsModel",
"GPT2ForSequenceClassification",
"GPT2LMHeadModel",
"GPT2Model",
"GPT2PreTrainedModel",
"load_tf_weights_in_gpt2",
]
)
_import_structure["models.gpt_neo"].extend(
[
"GPT_NEO_PRETRAINED_MODEL_ARCHIVE_LIST",
"GPTNeoForCausalLM",
"GPTNeoModel",
"GPTNeoPreTrainedModel",
"load_tf_weights_in_gpt_neo",
]
)
_import_structure["models.ibert"].extend(
[
"IBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"IBertForMaskedLM",
"IBertForMultipleChoice",
"IBertForQuestionAnswering",
"IBertForSequenceClassification",
"IBertForTokenClassification",
"IBertModel",
"IBertPreTrainedModel",
]
)
_import_structure["models.layoutlm"].extend(
[
"LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"LayoutLMForMaskedLM",
"LayoutLMForSequenceClassification",
"LayoutLMForTokenClassification",
"LayoutLMModel",
]
)
_import_structure["models.led"].extend(
[
"LED_PRETRAINED_MODEL_ARCHIVE_LIST",
"LEDForConditionalGeneration",
"LEDForQuestionAnswering",
"LEDForSequenceClassification",
"LEDModel",
]
)
_import_structure["models.longformer"].extend(
[
"LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"LongformerForMaskedLM",
"LongformerForMultipleChoice",
"LongformerForQuestionAnswering",
"LongformerForSequenceClassification",
"LongformerForTokenClassification",
"LongformerModel",
"LongformerSelfAttention",
]
)
_import_structure["models.lxmert"].extend(
[
"LxmertEncoder",
"LxmertForPreTraining",
"LxmertForQuestionAnswering",
"LxmertModel",
"LxmertPreTrainedModel",
"LxmertVisualFeatureEncoder",
"LxmertXLayer",
]
)
_import_structure["models.m2m_100"].extend(
[
"M2M_100_PRETRAINED_MODEL_ARCHIVE_LIST",
"M2M100ForConditionalGeneration",
"M2M100Model",
]
)
_import_structure["models.marian"].extend(["MarianForCausalLM", "MarianModel", "MarianMTModel"])
_import_structure["models.mbart"].extend(
[
"MBartForCausalLM",
"MBartForConditionalGeneration",
"MBartForQuestionAnswering",
"MBartForSequenceClassification",
"MBartModel",
]
)
_import_structure["models.megatron_bert"].extend(
[
"MEGATRON_BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"MegatronBertForCausalLM",
"MegatronBertForMaskedLM",
"MegatronBertForMultipleChoice",
"MegatronBertForNextSentencePrediction",
"MegatronBertForPreTraining",
"MegatronBertForQuestionAnswering",
"MegatronBertForSequenceClassification",
"MegatronBertForTokenClassification",
"MegatronBertModel",
]
)
_import_structure["models.mmbt"].extend(["MMBTForClassification", "MMBTModel", "ModalEmbeddings"])
_import_structure["models.mobilebert"].extend(
[
"MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"MobileBertForMaskedLM",
"MobileBertForMultipleChoice",
"MobileBertForNextSentencePrediction",
"MobileBertForPreTraining",
"MobileBertForQuestionAnswering",
"MobileBertForSequenceClassification",
"MobileBertForTokenClassification",
"MobileBertLayer",
"MobileBertModel",
"MobileBertPreTrainedModel",
"load_tf_weights_in_mobilebert",
]
)
_import_structure["models.mpnet"].extend(
[
"MPNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"MPNetForMaskedLM",
"MPNetForMultipleChoice",
"MPNetForQuestionAnswering",
"MPNetForSequenceClassification",
"MPNetForTokenClassification",
"MPNetLayer",
"MPNetModel",
"MPNetPreTrainedModel",
]
)
_import_structure["models.mt5"].extend(["MT5EncoderModel", "MT5ForConditionalGeneration", "MT5Model"])
_import_structure["models.openai"].extend(
[
"OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST",
"OpenAIGPTDoubleHeadsModel",
"OpenAIGPTForSequenceClassification",
"OpenAIGPTLMHeadModel",
"OpenAIGPTModel",
"OpenAIGPTPreTrainedModel",
"load_tf_weights_in_openai_gpt",
]
)
_import_structure["models.pegasus"].extend(
["PegasusForCausalLM", "PegasusForConditionalGeneration", "PegasusModel"]
)
_import_structure["models.prophetnet"].extend(
[
"PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"ProphetNetDecoder",
"ProphetNetEncoder",
"ProphetNetForCausalLM",
"ProphetNetForConditionalGeneration",
"ProphetNetModel",
"ProphetNetPreTrainedModel",
]
)
_import_structure["models.rag"].extend(["RagModel", "RagSequenceForGeneration", "RagTokenForGeneration"])
_import_structure["models.reformer"].extend(
[
"REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"ReformerAttention",
"ReformerForMaskedLM",
"ReformerForQuestionAnswering",
"ReformerForSequenceClassification",
"ReformerLayer",
"ReformerModel",
"ReformerModelWithLMHead",
]
)
_import_structure["models.retribert"].extend(
["RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "RetriBertModel", "RetriBertPreTrainedModel"]
)
_import_structure["models.roberta"].extend(
[
"ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"RobertaForCausalLM",
"RobertaForMaskedLM",
"RobertaForMultipleChoice",
"RobertaForQuestionAnswering",
"RobertaForSequenceClassification",
"RobertaForTokenClassification",
"RobertaModel",
]
)
_import_structure["models.speech_to_text"].extend(
[
"SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST",
"Speech2TextForConditionalGeneration",
"Speech2TextModel",
]
)
_import_structure["models.squeezebert"].extend(
[
"SQUEEZEBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"SqueezeBertForMaskedLM",
"SqueezeBertForMultipleChoice",
"SqueezeBertForQuestionAnswering",
"SqueezeBertForSequenceClassification",
"SqueezeBertForTokenClassification",
"SqueezeBertModel",
"SqueezeBertModule",
"SqueezeBertPreTrainedModel",
]
)
_import_structure["models.t5"].extend(
[
"T5_PRETRAINED_MODEL_ARCHIVE_LIST",
"T5EncoderModel",
"T5ForConditionalGeneration",
"T5Model",
"T5PreTrainedModel",
"load_tf_weights_in_t5",
]
)
_import_structure["models.tapas"].extend(
[
"TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST",
"TapasForMaskedLM",
"TapasForQuestionAnswering",
"TapasForSequenceClassification",
"TapasModel",
]
)
_import_structure["models.transfo_xl"].extend(
[
"TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST",
"AdaptiveEmbedding",
"TransfoXLForSequenceClassification",
"TransfoXLLMHeadModel",
"TransfoXLModel",
"TransfoXLPreTrainedModel",
"load_tf_weights_in_transfo_xl",
]
)
_import_structure["models.vit"].extend(
[
"VIT_PRETRAINED_MODEL_ARCHIVE_LIST",
"ViTForImageClassification",
"ViTModel",
"ViTPreTrainedModel",
]
)
_import_structure["models.wav2vec2"].extend(
[
"WAV_2_VEC_2_PRETRAINED_MODEL_ARCHIVE_LIST",
"Wav2Vec2ForCTC",
"Wav2Vec2ForMaskedLM",
"Wav2Vec2Model",
"Wav2Vec2PreTrainedModel",
]
)
_import_structure["models.xlm"].extend(
[
"XLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"XLMForMultipleChoice",
"XLMForQuestionAnswering",
"XLMForQuestionAnsweringSimple",
"XLMForSequenceClassification",
"XLMForTokenClassification",
"XLMModel",
"XLMPreTrainedModel",
"XLMWithLMHeadModel",
]
)
_import_structure["models.xlm_prophetnet"].extend(
[
"XLM_PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"XLMProphetNetDecoder",
"XLMProphetNetEncoder",
"XLMProphetNetForCausalLM",
"XLMProphetNetForConditionalGeneration",
"XLMProphetNetModel",
]
)
_import_structure["models.xlm_roberta"].extend(
[
"XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"XLMRobertaForCausalLM",
"XLMRobertaForMaskedLM",
"XLMRobertaForMultipleChoice",
"XLMRobertaForQuestionAnswering",
"XLMRobertaForSequenceClassification",
"XLMRobertaForTokenClassification",
"XLMRobertaModel",
]
)
_import_structure["models.xlnet"].extend(
[
"XLNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"XLNetForMultipleChoice",
"XLNetForQuestionAnswering",
"XLNetForQuestionAnsweringSimple",
"XLNetForSequenceClassification",
"XLNetForTokenClassification",
"XLNetLMHeadModel",
"XLNetModel",
"XLNetPreTrainedModel",
"load_tf_weights_in_xlnet",
]
)
_import_structure["optimization"] = [
"Adafactor",
"AdamW",
"get_constant_schedule",
"get_constant_schedule_with_warmup",
"get_cosine_schedule_with_warmup",
"get_cosine_with_hard_restarts_schedule_with_warmup",
"get_linear_schedule_with_warmup",
"get_polynomial_decay_schedule_with_warmup",
"get_scheduler",
]
_import_structure["trainer"] = ["Trainer"]
_import_structure["trainer_pt_utils"] = ["torch_distributed_zero_first"]
_import_structure["trainer_seq2seq"] = ["Seq2SeqTrainer"]
else:
from .utils import dummy_pt_objects
_import_structure["utils.dummy_pt_objects"] = [name for name in dir(dummy_pt_objects) if not name.startswith("_")]
# TensorFlow-backed objects
if is_tf_available():
_import_structure["benchmark.benchmark_args_tf"] = ["TensorFlowBenchmarkArguments"]
_import_structure["benchmark.benchmark_tf"] = ["TensorFlowBenchmark"]
_import_structure["generation_tf_utils"] = ["tf_top_k_top_p_filtering"]
_import_structure["modeling_tf_utils"] = [
"TFPreTrainedModel",
"TFSequenceSummary",
"TFSharedEmbeddings",
"shape_list",
]
# TensorFlow models structure
_import_structure["models.albert"].extend(
[
"TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFAlbertForMaskedLM",
"TFAlbertForMultipleChoice",
"TFAlbertForPreTraining",
"TFAlbertForQuestionAnswering",
"TFAlbertForSequenceClassification",
"TFAlbertForTokenClassification",
"TFAlbertMainLayer",
"TFAlbertModel",
"TFAlbertPreTrainedModel",
]
)
_import_structure["models.auto"].extend(
[
"TF_MODEL_FOR_CAUSAL_LM_MAPPING",
"TF_MODEL_FOR_MASKED_LM_MAPPING",
"TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING",
"TF_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING",
"TF_MODEL_FOR_PRETRAINING_MAPPING",
"TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING",
"TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING",
"TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING",
"TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING",
"TF_MODEL_MAPPING",
"TF_MODEL_WITH_LM_HEAD_MAPPING",
"TFAutoModel",
"TFAutoModelForCausalLM",
"TFAutoModelForMaskedLM",
"TFAutoModelForMultipleChoice",
"TFAutoModelForPreTraining",
"TFAutoModelForQuestionAnswering",
"TFAutoModelForSeq2SeqLM",
"TFAutoModelForSequenceClassification",
"TFAutoModelForTokenClassification",
"TFAutoModelWithLMHead",
]
)
_import_structure["models.bart"].extend(["TFBartForConditionalGeneration", "TFBartModel", "TFBartPretrainedModel"])
_import_structure["models.bert"].extend(
[
"TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFBertEmbeddings",
"TFBertForMaskedLM",
"TFBertForMultipleChoice",
"TFBertForNextSentencePrediction",
"TFBertForPreTraining",
"TFBertForQuestionAnswering",
"TFBertForSequenceClassification",
"TFBertForTokenClassification",
"TFBertLMHeadModel",
"TFBertMainLayer",
"TFBertModel",
"TFBertPreTrainedModel",
]
)
_import_structure["models.blenderbot"].extend(["TFBlenderbotForConditionalGeneration", "TFBlenderbotModel"])
_import_structure["models.blenderbot_small"].extend(
["TFBlenderbotSmallForConditionalGeneration", "TFBlenderbotSmallModel"]
)
_import_structure["models.camembert"].extend(
[
"TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFCamembertForMaskedLM",
"TFCamembertForMultipleChoice",
"TFCamembertForQuestionAnswering",
"TFCamembertForSequenceClassification",
"TFCamembertForTokenClassification",
"TFCamembertModel",
]
)
_import_structure["models.convbert"].extend(
[
"TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFConvBertForMaskedLM",
"TFConvBertForMultipleChoice",
"TFConvBertForQuestionAnswering",
"TFConvBertForSequenceClassification",
"TFConvBertForTokenClassification",
"TFConvBertLayer",
"TFConvBertModel",
"TFConvBertPreTrainedModel",
]
)
_import_structure["models.ctrl"].extend(
[
"TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFCTRLForSequenceClassification",
"TFCTRLLMHeadModel",
"TFCTRLModel",
"TFCTRLPreTrainedModel",
]
)
_import_structure["models.distilbert"].extend(
[
"TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFDistilBertForMaskedLM",
"TFDistilBertForMultipleChoice",
"TFDistilBertForQuestionAnswering",
"TFDistilBertForSequenceClassification",
"TFDistilBertForTokenClassification",
"TFDistilBertMainLayer",
"TFDistilBertModel",
"TFDistilBertPreTrainedModel",
]
)
_import_structure["models.dpr"].extend(
[
"TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TF_DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TF_DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFDPRContextEncoder",
"TFDPRPretrainedContextEncoder",
"TFDPRPretrainedQuestionEncoder",
"TFDPRPretrainedReader",
"TFDPRQuestionEncoder",
"TFDPRReader",
]
)
_import_structure["models.electra"].extend(
[
"TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFElectraForMaskedLM",
"TFElectraForMultipleChoice",
"TFElectraForPreTraining",
"TFElectraForQuestionAnswering",
"TFElectraForSequenceClassification",
"TFElectraForTokenClassification",
"TFElectraModel",
"TFElectraPreTrainedModel",
]
)
_import_structure["models.flaubert"].extend(
[
"TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFFlaubertForMultipleChoice",
"TFFlaubertForQuestionAnsweringSimple",
"TFFlaubertForSequenceClassification",
"TFFlaubertForTokenClassification",
"TFFlaubertModel",
"TFFlaubertWithLMHeadModel",
]
)
_import_structure["models.funnel"].extend(
[
"TF_FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFFunnelBaseModel",
"TFFunnelForMaskedLM",
"TFFunnelForMultipleChoice",
"TFFunnelForPreTraining",
"TFFunnelForQuestionAnswering",
"TFFunnelForSequenceClassification",
"TFFunnelForTokenClassification",
"TFFunnelModel",
]
)
_import_structure["models.gpt2"].extend(
[
"TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFGPT2DoubleHeadsModel",
"TFGPT2ForSequenceClassification",
"TFGPT2LMHeadModel",
"TFGPT2MainLayer",
"TFGPT2Model",
"TFGPT2PreTrainedModel",
]
)
_import_structure["models.layoutlm"].extend(
[
"TF_LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFLayoutLMForMaskedLM",
"TFLayoutLMForSequenceClassification",
"TFLayoutLMForTokenClassification",
"TFLayoutLMMainLayer",
"TFLayoutLMModel",
"TFLayoutLMPreTrainedModel",
]
)
_import_structure["models.led"].extend(["TFLEDForConditionalGeneration", "TFLEDModel", "TFLEDPreTrainedModel"])
_import_structure["models.longformer"].extend(
[
"TF_LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFLongformerForMaskedLM",
"TFLongformerForMultipleChoice",
"TFLongformerForQuestionAnswering",
"TFLongformerForSequenceClassification",
"TFLongformerForTokenClassification",
"TFLongformerModel",
"TFLongformerSelfAttention",
]
)
_import_structure["models.lxmert"].extend(
[
"TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFLxmertForPreTraining",
"TFLxmertMainLayer",
"TFLxmertModel",
"TFLxmertPreTrainedModel",
"TFLxmertVisualFeatureEncoder",
]
)
_import_structure["models.marian"].extend(["TFMarianModel", "TFMarianMTModel"])
_import_structure["models.mbart"].extend(["TFMBartForConditionalGeneration", "TFMBartModel"])
_import_structure["models.mobilebert"].extend(
[
"TF_MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFMobileBertForMaskedLM",
"TFMobileBertForMultipleChoice",
"TFMobileBertForNextSentencePrediction",
"TFMobileBertForPreTraining",
"TFMobileBertForQuestionAnswering",
"TFMobileBertForSequenceClassification",
"TFMobileBertForTokenClassification",
"TFMobileBertMainLayer",
"TFMobileBertModel",
"TFMobileBertPreTrainedModel",
]
)
_import_structure["models.mpnet"].extend(
[
"TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFMPNetForMaskedLM",
"TFMPNetForMultipleChoice",
"TFMPNetForQuestionAnswering",
"TFMPNetForSequenceClassification",
"TFMPNetForTokenClassification",
"TFMPNetMainLayer",
"TFMPNetModel",
"TFMPNetPreTrainedModel",
]
)
_import_structure["models.mt5"].extend(["TFMT5EncoderModel", "TFMT5ForConditionalGeneration", "TFMT5Model"])
_import_structure["models.openai"].extend(
[
"TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFOpenAIGPTDoubleHeadsModel",
"TFOpenAIGPTForSequenceClassification",
"TFOpenAIGPTLMHeadModel",
"TFOpenAIGPTMainLayer",
"TFOpenAIGPTModel",
"TFOpenAIGPTPreTrainedModel",
]
)
_import_structure["models.pegasus"].extend(["TFPegasusForConditionalGeneration", "TFPegasusModel"])
_import_structure["models.rag"].extend(
[
"TFRagModel",
"TFRagSequenceForGeneration",
"TFRagTokenForGeneration",
]
)
_import_structure["models.roberta"].extend(
[
"TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFRobertaForMaskedLM",
"TFRobertaForMultipleChoice",
"TFRobertaForQuestionAnswering",
"TFRobertaForSequenceClassification",
"TFRobertaForTokenClassification",
"TFRobertaMainLayer",
"TFRobertaModel",
"TFRobertaPreTrainedModel",
]
)
_import_structure["models.t5"].extend(
[
"TF_T5_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFT5EncoderModel",
"TFT5ForConditionalGeneration",
"TFT5Model",
"TFT5PreTrainedModel",
]
)
_import_structure["models.transfo_xl"].extend(
[
"TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFAdaptiveEmbedding",
"TFTransfoXLForSequenceClassification",
"TFTransfoXLLMHeadModel",
"TFTransfoXLMainLayer",
"TFTransfoXLModel",
"TFTransfoXLPreTrainedModel",
]
)
_import_structure["models.xlm"].extend(
[
"TF_XLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFXLMForMultipleChoice",
"TFXLMForQuestionAnsweringSimple",
"TFXLMForSequenceClassification",
"TFXLMForTokenClassification",
"TFXLMMainLayer",
"TFXLMModel",
"TFXLMPreTrainedModel",
"TFXLMWithLMHeadModel",
]
)
_import_structure["models.xlm_roberta"].extend(
[
"TF_XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFXLMRobertaForMaskedLM",
"TFXLMRobertaForMultipleChoice",
"TFXLMRobertaForQuestionAnswering",
"TFXLMRobertaForSequenceClassification",
"TFXLMRobertaForTokenClassification",
"TFXLMRobertaModel",
]
)
_import_structure["models.xlnet"].extend(
[
"TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFXLNetForMultipleChoice",
"TFXLNetForQuestionAnsweringSimple",
"TFXLNetForSequenceClassification",
"TFXLNetForTokenClassification",
"TFXLNetLMHeadModel",
"TFXLNetMainLayer",
"TFXLNetModel",
"TFXLNetPreTrainedModel",
]
)
_import_structure["optimization_tf"] = ["AdamWeightDecay", "GradientAccumulator", "WarmUp", "create_optimizer"]
_import_structure["trainer_tf"] = ["TFTrainer"]
else:
from .utils import dummy_tf_objects
_import_structure["utils.dummy_tf_objects"] = [name for name in dir(dummy_tf_objects) if not name.startswith("_")]
# FLAX-backed objects
if is_flax_available():
_import_structure["modeling_flax_utils"] = ["FlaxPreTrainedModel"]
_import_structure["models.auto"].extend(
[
"FLAX_MODEL_FOR_MASKED_LM_MAPPING",
"FLAX_MODEL_FOR_MULTIPLE_CHOICE_MAPPING",
"FLAX_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING",
"FLAX_MODEL_FOR_PRETRAINING_MAPPING",
"FLAX_MODEL_FOR_QUESTION_ANSWERING_MAPPING",
"FLAX_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING",
"FLAX_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING",
"FLAX_MODEL_MAPPING",
"FlaxAutoModel",
"FlaxAutoModelForMaskedLM",
"FlaxAutoModelForMultipleChoice",
"FlaxAutoModelForNextSentencePrediction",
"FlaxAutoModelForPreTraining",
"FlaxAutoModelForQuestionAnswering",
"FlaxAutoModelForSequenceClassification",
"FlaxAutoModelForTokenClassification",
]
)
_import_structure["models.bert"].extend(
[
"FlaxBertForMaskedLM",
"FlaxBertForMultipleChoice",
"FlaxBertForNextSentencePrediction",
"FlaxBertForPreTraining",
"FlaxBertForQuestionAnswering",
"FlaxBertForSequenceClassification",
"FlaxBertForTokenClassification",
"FlaxBertModel",
"FlaxBertPreTrainedModel",
]
)
_import_structure["models.roberta"].append("FlaxRobertaModel")
else:
from .utils import dummy_flax_objects
_import_structure["utils.dummy_flax_objects"] = [
name for name in dir(dummy_flax_objects) if not name.startswith("_")
]
# Direct imports for type-checking
if TYPE_CHECKING:
# Configuration
from .configuration_utils import PretrainedConfig
# Data
from .data import (
DataProcessor,
InputExample,
InputFeatures,
SingleSentenceClassificationProcessor,
SquadExample,
SquadFeatures,
SquadV1Processor,
SquadV2Processor,
glue_compute_metrics,
glue_convert_examples_to_features,
glue_output_modes,
glue_processors,
glue_tasks_num_labels,
squad_convert_examples_to_features,
xnli_compute_metrics,
xnli_output_modes,
xnli_processors,
xnli_tasks_num_labels,
)
# Feature Extractor
from .feature_extraction_utils import BatchFeature, SequenceFeatureExtractor
# Files and general utilities
from .file_utils import (
CONFIG_NAME,
MODEL_CARD_NAME,
PYTORCH_PRETRAINED_BERT_CACHE,
PYTORCH_TRANSFORMERS_CACHE,
SPIECE_UNDERLINE,
TF2_WEIGHTS_NAME,
TF_WEIGHTS_NAME,
TRANSFORMERS_CACHE,
WEIGHTS_NAME,
TensorType,
add_end_docstrings,
add_start_docstrings,
cached_path,
is_apex_available,
is_datasets_available,
is_faiss_available,
is_flax_available,
is_psutil_available,
is_py3nvml_available,
is_sentencepiece_available,
is_sklearn_available,
is_speech_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
is_torch_tpu_available,
is_vision_available,
)
from .hf_argparser import HfArgumentParser
# Integrations
from .integrations import (
is_comet_available,
is_optuna_available,
is_ray_available,
is_ray_tune_available,
is_tensorboard_available,
is_wandb_available,
)
# Model Cards
from .modelcard import ModelCard
# TF 2.0 <=> PyTorch conversion utilities
from .modeling_tf_pytorch_utils import (
convert_tf_weight_name_to_pt_weight_name,
load_pytorch_checkpoint_in_tf2_model,
load_pytorch_model_in_tf2_model,
load_pytorch_weights_in_tf2_model,
load_tf2_checkpoint_in_pytorch_model,
load_tf2_model_in_pytorch_model,
load_tf2_weights_in_pytorch_model,
)
from .models.albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig
from .models.auto import (
ALL_PRETRAINED_CONFIG_ARCHIVE_MAP,
CONFIG_MAPPING,
FEATURE_EXTRACTOR_MAPPING,
MODEL_NAMES_MAPPING,
TOKENIZER_MAPPING,
AutoConfig,
AutoFeatureExtractor,
AutoTokenizer,
)
from .models.bart import BartConfig, BartTokenizer
from .models.bert import (
BERT_PRETRAINED_CONFIG_ARCHIVE_MAP,
BasicTokenizer,
BertConfig,
BertTokenizer,
WordpieceTokenizer,
)
from .models.bert_generation import BertGenerationConfig
from .models.bert_japanese import BertJapaneseTokenizer, CharacterTokenizer, MecabTokenizer
from .models.bertweet import BertweetTokenizer
from .models.big_bird import BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP, BigBirdConfig, BigBirdTokenizer
from .models.blenderbot import BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotConfig, BlenderbotTokenizer
from .models.blenderbot_small import (
BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP,
BlenderbotSmallConfig,
BlenderbotSmallTokenizer,
)
from .models.camembert import CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, CamembertConfig
from .models.convbert import CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, ConvBertConfig, ConvBertTokenizer
from .models.cpm import CpmTokenizer
from .models.ctrl import CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP, CTRLConfig, CTRLTokenizer
from .models.deberta import DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, DebertaConfig, DebertaTokenizer
from .models.deberta_v2 import DEBERTA_V2_PRETRAINED_CONFIG_ARCHIVE_MAP, DebertaV2Config
from .models.distilbert import DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, DistilBertConfig, DistilBertTokenizer
from .models.dpr import (
DPR_PRETRAINED_CONFIG_ARCHIVE_MAP,
DPRConfig,
DPRContextEncoderTokenizer,
DPRQuestionEncoderTokenizer,
DPRReaderOutput,
DPRReaderTokenizer,
)
from .models.electra import ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP, ElectraConfig, ElectraTokenizer
from .models.encoder_decoder import EncoderDecoderConfig
from .models.flaubert import FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, FlaubertConfig, FlaubertTokenizer
from .models.fsmt import FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP, FSMTConfig, FSMTTokenizer
from .models.funnel import FUNNEL_PRETRAINED_CONFIG_ARCHIVE_MAP, FunnelConfig, FunnelTokenizer
from .models.gpt2 import GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP, GPT2Config, GPT2Tokenizer
from .models.gpt_neo import GPT_NEO_PRETRAINED_CONFIG_ARCHIVE_MAP, GPTNeoConfig
from .models.herbert import HerbertTokenizer
from .models.ibert import IBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, IBertConfig
from .models.layoutlm import LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP, LayoutLMConfig, LayoutLMTokenizer
from .models.led import LED_PRETRAINED_CONFIG_ARCHIVE_MAP, LEDConfig, LEDTokenizer
from .models.longformer import LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, LongformerConfig, LongformerTokenizer
from .models.lxmert import LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP, LxmertConfig, LxmertTokenizer
from .models.m2m_100 import M2M_100_PRETRAINED_CONFIG_ARCHIVE_MAP, M2M100Config
from .models.marian import MarianConfig
from .models.mbart import MBartConfig
from .models.megatron_bert import MEGATRON_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP, MegatronBertConfig
from .models.mmbt import MMBTConfig
from .models.mobilebert import MOBILEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, MobileBertConfig, MobileBertTokenizer
from .models.mpnet import MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP, MPNetConfig, MPNetTokenizer
from .models.mt5 import MT5Config
from .models.openai import OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenAIGPTConfig, OpenAIGPTTokenizer
from .models.pegasus import PegasusConfig
from .models.phobert import PhobertTokenizer
from .models.prophetnet import PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP, ProphetNetConfig, ProphetNetTokenizer
from .models.rag import RagConfig, RagRetriever, RagTokenizer
from .models.reformer import REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, ReformerConfig
from .models.retribert import RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, RetriBertConfig, RetriBertTokenizer
from .models.roberta import ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, RobertaConfig, RobertaTokenizer
from .models.speech_to_text import SPEECH_TO_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP, Speech2TextConfig
from .models.squeezebert import SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, SqueezeBertConfig, SqueezeBertTokenizer
from .models.t5 import T5_PRETRAINED_CONFIG_ARCHIVE_MAP, T5Config
from .models.tapas import TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP, TapasConfig, TapasTokenizer
from .models.transfo_xl import (
TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP,
TransfoXLConfig,
TransfoXLCorpus,
TransfoXLTokenizer,
)
from .models.vit import VIT_PRETRAINED_CONFIG_ARCHIVE_MAP, ViTConfig
from .models.wav2vec2 import (
WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP,
Wav2Vec2Config,
Wav2Vec2CTCTokenizer,
Wav2Vec2FeatureExtractor,
Wav2Vec2Processor,
Wav2Vec2Tokenizer,
)
from .models.xlm import XLM_PRETRAINED_CONFIG_ARCHIVE_MAP, XLMConfig, XLMTokenizer
from .models.xlm_prophetnet import XLM_PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP, XLMProphetNetConfig
from .models.xlm_roberta import XLM_ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, XLMRobertaConfig
from .models.xlnet import XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP, XLNetConfig
# Pipelines
from .pipelines import (
Conversation,
ConversationalPipeline,
CsvPipelineDataFormat,
FeatureExtractionPipeline,
FillMaskPipeline,
JsonPipelineDataFormat,
NerPipeline,
PipedPipelineDataFormat,
Pipeline,
PipelineDataFormat,
QuestionAnsweringPipeline,
SummarizationPipeline,
TableQuestionAnsweringPipeline,
Text2TextGenerationPipeline,
TextClassificationPipeline,
TextGenerationPipeline,
TokenClassificationPipeline,
TranslationPipeline,
ZeroShotClassificationPipeline,
pipeline,
)
# Tokenization
from .tokenization_utils import PreTrainedTokenizer
from .tokenization_utils_base import (
AddedToken,
BatchEncoding,
CharSpan,
PreTrainedTokenizerBase,
SpecialTokensMixin,
TokenSpan,
)
# Trainer
from .trainer_callback import (
DefaultFlowCallback,
EarlyStoppingCallback,
PrinterCallback,
ProgressCallback,
TrainerCallback,
TrainerControl,
TrainerState,
)
from .trainer_utils import EvalPrediction, IntervalStrategy, SchedulerType, set_seed
from .training_args import TrainingArguments
from .training_args_seq2seq import Seq2SeqTrainingArguments
from .training_args_tf import TFTrainingArguments
from .utils import logging
if is_sentencepiece_available():
from .models.albert import AlbertTokenizer
from .models.barthez import BarthezTokenizer
from .models.bert_generation import BertGenerationTokenizer
from .models.camembert import CamembertTokenizer
from .models.deberta_v2 import DebertaV2Tokenizer
from .models.m2m_100 import M2M100Tokenizer
from .models.marian import MarianTokenizer
from .models.mbart import MBart50Tokenizer, MBartTokenizer
from .models.mt5 import MT5Tokenizer
from .models.pegasus import PegasusTokenizer
from .models.reformer import ReformerTokenizer
from .models.speech_to_text import Speech2TextTokenizer
from .models.t5 import T5Tokenizer
from .models.xlm_prophetnet import XLMProphetNetTokenizer
from .models.xlm_roberta import XLMRobertaTokenizer
from .models.xlnet import XLNetTokenizer
else:
from .utils.dummy_sentencepiece_objects import *
if is_tokenizers_available():
from .models.albert import AlbertTokenizerFast
from .models.bart import BartTokenizerFast
from .models.barthez import BarthezTokenizerFast
from .models.bert import BertTokenizerFast
from .models.camembert import CamembertTokenizerFast
from .models.convbert import ConvBertTokenizerFast
from .models.distilbert import DistilBertTokenizerFast
from .models.dpr import DPRContextEncoderTokenizerFast, DPRQuestionEncoderTokenizerFast, DPRReaderTokenizerFast
from .models.electra import ElectraTokenizerFast
from .models.funnel import FunnelTokenizerFast
from .models.gpt2 import GPT2TokenizerFast
from .models.herbert import HerbertTokenizerFast
from .models.layoutlm import LayoutLMTokenizerFast
from .models.led import LEDTokenizerFast
from .models.longformer import LongformerTokenizerFast
from .models.lxmert import LxmertTokenizerFast
from .models.mbart import MBart50TokenizerFast, MBartTokenizerFast
from .models.mobilebert import MobileBertTokenizerFast
from .models.mpnet import MPNetTokenizerFast
from .models.mt5 import MT5TokenizerFast
from .models.openai import OpenAIGPTTokenizerFast
from .models.pegasus import PegasusTokenizerFast
from .models.reformer import ReformerTokenizerFast
from .models.retribert import RetriBertTokenizerFast
from .models.roberta import RobertaTokenizerFast
from .models.squeezebert import SqueezeBertTokenizerFast
from .models.t5 import T5TokenizerFast
from .models.xlm_roberta import XLMRobertaTokenizerFast
from .models.xlnet import XLNetTokenizerFast
from .tokenization_utils_fast import PreTrainedTokenizerFast
else:
from .utils.dummy_tokenizers_objects import *
if is_sentencepiece_available() and is_tokenizers_available():
from .convert_slow_tokenizer import SLOW_TO_FAST_CONVERTERS, convert_slow_tokenizer
else:
from .utils.dummies_sentencepiece_and_tokenizers_objects import *
if is_speech_available():
from .models.speech_to_text import Speech2TextFeatureExtractor
else:
from .utils.dummy_speech_objects import *
if is_speech_available() and is_sentencepiece_available():
from .models.speech_to_text import Speech2TextProcessor
else:
from .utils.dummy_sentencepiece_and_speech_objects import *
if is_vision_available():
from .image_utils import ImageFeatureExtractionMixin
from .models.vit import ViTFeatureExtractor
else:
from .utils.dummy_vision_objects import *
# Modeling
if is_torch_available():
# Benchmarks
from .benchmark.benchmark import PyTorchBenchmark
from .benchmark.benchmark_args import PyTorchBenchmarkArguments
from .data.data_collator import (
DataCollator,
DataCollatorForLanguageModeling,
DataCollatorForPermutationLanguageModeling,
DataCollatorForSeq2Seq,
DataCollatorForSOP,
DataCollatorForTokenClassification,
DataCollatorForWholeWordMask,
DataCollatorWithPadding,
default_data_collator,
)
from .data.datasets import (
GlueDataset,
GlueDataTrainingArguments,
LineByLineTextDataset,
LineByLineWithRefDataset,
LineByLineWithSOPTextDataset,
SquadDataset,
SquadDataTrainingArguments,
TextDataset,
TextDatasetForNextSentencePrediction,
)
from .generation_beam_search import BeamScorer, BeamSearchScorer
from .generation_logits_process import (
ForcedBOSTokenLogitsProcessor,
ForcedEOSTokenLogitsProcessor,
HammingDiversityLogitsProcessor,
InfNanRemoveLogitsProcessor,
LogitsProcessor,
LogitsProcessorList,
LogitsWarper,
MinLengthLogitsProcessor,
NoBadWordsLogitsProcessor,
NoRepeatNGramLogitsProcessor,
PrefixConstrainedLogitsProcessor,
RepetitionPenaltyLogitsProcessor,
TemperatureLogitsWarper,
TopKLogitsWarper,
TopPLogitsWarper,
)
from .generation_stopping_criteria import (
MaxLengthCriteria,
MaxTimeCriteria,
StoppingCriteria,
StoppingCriteriaList,
)
from .generation_utils import top_k_top_p_filtering
from .modeling_utils import Conv1D, PreTrainedModel, apply_chunking_to_forward, prune_layer
from .models.albert import (
ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
AlbertForMaskedLM,
AlbertForMultipleChoice,
AlbertForPreTraining,
AlbertForQuestionAnswering,
AlbertForSequenceClassification,
AlbertForTokenClassification,
AlbertModel,
AlbertPreTrainedModel,
load_tf_weights_in_albert,
)
from .models.auto import (
MODEL_FOR_CAUSAL_LM_MAPPING,
MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING,
MODEL_FOR_MASKED_LM_MAPPING,
MODEL_FOR_MULTIPLE_CHOICE_MAPPING,
MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING,
MODEL_FOR_PRETRAINING_MAPPING,
MODEL_FOR_QUESTION_ANSWERING_MAPPING,
MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING,
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING,
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING,
MODEL_MAPPING,
MODEL_WITH_LM_HEAD_MAPPING,
AutoModel,
AutoModelForCausalLM,
AutoModelForMaskedLM,
AutoModelForMultipleChoice,
AutoModelForNextSentencePrediction,
AutoModelForPreTraining,
AutoModelForQuestionAnswering,
AutoModelForSeq2SeqLM,
AutoModelForSequenceClassification,
AutoModelForTableQuestionAnswering,
AutoModelForTokenClassification,
AutoModelWithLMHead,
)
from .models.bart import (
BART_PRETRAINED_MODEL_ARCHIVE_LIST,
BartForCausalLM,
BartForConditionalGeneration,
BartForQuestionAnswering,
BartForSequenceClassification,
BartModel,
BartPretrainedModel,
PretrainedBartModel,
)
from .models.bert import (
BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
BertForMaskedLM,
BertForMultipleChoice,
BertForNextSentencePrediction,
BertForPreTraining,
BertForQuestionAnswering,
BertForSequenceClassification,
BertForTokenClassification,
BertLayer,
BertLMHeadModel,
BertModel,
BertPreTrainedModel,
load_tf_weights_in_bert,
)
from .models.bert_generation import (
BertGenerationDecoder,
BertGenerationEncoder,
load_tf_weights_in_bert_generation,
)
from .models.big_bird import (
BIG_BIRD_PRETRAINED_MODEL_ARCHIVE_LIST,
BigBirdForCausalLM,
BigBirdForMaskedLM,
BigBirdForMultipleChoice,
BigBirdForPreTraining,
BigBirdForQuestionAnswering,
BigBirdForSequenceClassification,
BigBirdForTokenClassification,
BigBirdLayer,
BigBirdModel,
BigBirdPreTrainedModel,
load_tf_weights_in_big_bird,
)
from .models.blenderbot import (
BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST,
BlenderbotForCausalLM,
BlenderbotForConditionalGeneration,
BlenderbotModel,
)
from .models.blenderbot_small import (
BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST,
BlenderbotSmallForCausalLM,
BlenderbotSmallForConditionalGeneration,
BlenderbotSmallModel,
)
from .models.camembert import (
CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
CamembertForCausalLM,
CamembertForMaskedLM,
CamembertForMultipleChoice,
CamembertForQuestionAnswering,
CamembertForSequenceClassification,
CamembertForTokenClassification,
CamembertModel,
)
from .models.convbert import (
CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
ConvBertForMaskedLM,
ConvBertForMultipleChoice,
ConvBertForQuestionAnswering,
ConvBertForSequenceClassification,
ConvBertForTokenClassification,
ConvBertLayer,
ConvBertModel,
ConvBertPreTrainedModel,
load_tf_weights_in_convbert,
)
from .models.ctrl import (
CTRL_PRETRAINED_MODEL_ARCHIVE_LIST,
CTRLForSequenceClassification,
CTRLLMHeadModel,
CTRLModel,
CTRLPreTrainedModel,
)
from .models.deberta import (
DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
DebertaForMaskedLM,
DebertaForQuestionAnswering,
DebertaForSequenceClassification,
DebertaForTokenClassification,
DebertaModel,
DebertaPreTrainedModel,
)
from .models.deberta_v2 import (
DEBERTA_V2_PRETRAINED_MODEL_ARCHIVE_LIST,
DebertaV2ForMaskedLM,
DebertaV2ForQuestionAnswering,
DebertaV2ForSequenceClassification,
DebertaV2ForTokenClassification,
DebertaV2Model,
DebertaV2PreTrainedModel,
)
from .models.distilbert import (
DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
DistilBertForMaskedLM,
DistilBertForMultipleChoice,
DistilBertForQuestionAnswering,
DistilBertForSequenceClassification,
DistilBertForTokenClassification,
DistilBertModel,
DistilBertPreTrainedModel,
)
from .models.dpr import (
DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST,
DPRContextEncoder,
DPRPretrainedContextEncoder,
DPRPretrainedQuestionEncoder,
DPRPretrainedReader,
DPRQuestionEncoder,
DPRReader,
)
from .models.electra import (
ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST,
ElectraForMaskedLM,
ElectraForMultipleChoice,
ElectraForPreTraining,
ElectraForQuestionAnswering,
ElectraForSequenceClassification,
ElectraForTokenClassification,
ElectraModel,
ElectraPreTrainedModel,
load_tf_weights_in_electra,
)
from .models.encoder_decoder import EncoderDecoderModel
from .models.flaubert import (
FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
FlaubertForMultipleChoice,
FlaubertForQuestionAnswering,
FlaubertForQuestionAnsweringSimple,
FlaubertForSequenceClassification,
FlaubertForTokenClassification,
FlaubertModel,
FlaubertWithLMHeadModel,
)
from .models.fsmt import FSMTForConditionalGeneration, FSMTModel, PretrainedFSMTModel
from .models.funnel import (
FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST,
FunnelBaseModel,
FunnelForMaskedLM,
FunnelForMultipleChoice,
FunnelForPreTraining,
FunnelForQuestionAnswering,
FunnelForSequenceClassification,
FunnelForTokenClassification,
FunnelModel,
load_tf_weights_in_funnel,
)
from .models.gpt2 import (
GPT2_PRETRAINED_MODEL_ARCHIVE_LIST,
GPT2DoubleHeadsModel,
GPT2ForSequenceClassification,
GPT2LMHeadModel,
GPT2Model,
GPT2PreTrainedModel,
load_tf_weights_in_gpt2,
)
from .models.gpt_neo import (
GPT_NEO_PRETRAINED_MODEL_ARCHIVE_LIST,
GPTNeoForCausalLM,
GPTNeoModel,
GPTNeoPreTrainedModel,
load_tf_weights_in_gpt_neo,
)
from .models.ibert import (
IBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
IBertForMaskedLM,
IBertForMultipleChoice,
IBertForQuestionAnswering,
IBertForSequenceClassification,
IBertForTokenClassification,
IBertModel,
IBertPreTrainedModel,
)
from .models.layoutlm import (
LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST,
LayoutLMForMaskedLM,
LayoutLMForSequenceClassification,
LayoutLMForTokenClassification,
LayoutLMModel,
)
from .models.led import (
LED_PRETRAINED_MODEL_ARCHIVE_LIST,
LEDForConditionalGeneration,
LEDForQuestionAnswering,
LEDForSequenceClassification,
LEDModel,
)
from .models.longformer import (
LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
LongformerForMaskedLM,
LongformerForMultipleChoice,
LongformerForQuestionAnswering,
LongformerForSequenceClassification,
LongformerForTokenClassification,
LongformerModel,
LongformerSelfAttention,
)
from .models.lxmert import (
LxmertEncoder,
LxmertForPreTraining,
LxmertForQuestionAnswering,
LxmertModel,
LxmertPreTrainedModel,
LxmertVisualFeatureEncoder,
LxmertXLayer,
)
from .models.m2m_100 import M2M_100_PRETRAINED_MODEL_ARCHIVE_LIST, M2M100ForConditionalGeneration, M2M100Model
from .models.marian import MarianForCausalLM, MarianModel, MarianMTModel
from .models.mbart import (
MBartForCausalLM,
MBartForConditionalGeneration,
MBartForQuestionAnswering,
MBartForSequenceClassification,
MBartModel,
)
from .models.megatron_bert import (
MEGATRON_BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
MegatronBertForCausalLM,
MegatronBertForMaskedLM,
MegatronBertForMultipleChoice,
MegatronBertForNextSentencePrediction,
MegatronBertForPreTraining,
MegatronBertForQuestionAnswering,
MegatronBertForSequenceClassification,
MegatronBertForTokenClassification,
MegatronBertModel,
)
from .models.mmbt import MMBTForClassification, MMBTModel, ModalEmbeddings
from .models.mobilebert import (
MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
MobileBertForMaskedLM,
MobileBertForMultipleChoice,
MobileBertForNextSentencePrediction,
MobileBertForPreTraining,
MobileBertForQuestionAnswering,
MobileBertForSequenceClassification,
MobileBertForTokenClassification,
MobileBertLayer,
MobileBertModel,
MobileBertPreTrainedModel,
load_tf_weights_in_mobilebert,
)
from .models.mpnet import (
MPNET_PRETRAINED_MODEL_ARCHIVE_LIST,
MPNetForMaskedLM,
MPNetForMultipleChoice,
MPNetForQuestionAnswering,
MPNetForSequenceClassification,
MPNetForTokenClassification,
MPNetLayer,
MPNetModel,
MPNetPreTrainedModel,
)
from .models.mt5 import MT5EncoderModel, MT5ForConditionalGeneration, MT5Model
from .models.openai import (
OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
OpenAIGPTDoubleHeadsModel,
OpenAIGPTForSequenceClassification,
OpenAIGPTLMHeadModel,
OpenAIGPTModel,
OpenAIGPTPreTrainedModel,
load_tf_weights_in_openai_gpt,
)
from .models.pegasus import PegasusForCausalLM, PegasusForConditionalGeneration, PegasusModel
from .models.prophetnet import (
PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST,
ProphetNetDecoder,
ProphetNetEncoder,
ProphetNetForCausalLM,
ProphetNetForConditionalGeneration,
ProphetNetModel,
ProphetNetPreTrainedModel,
)
from .models.rag import RagModel, RagSequenceForGeneration, RagTokenForGeneration
from .models.reformer import (
REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
ReformerAttention,
ReformerForMaskedLM,
ReformerForQuestionAnswering,
ReformerForSequenceClassification,
ReformerLayer,
ReformerModel,
ReformerModelWithLMHead,
)
from .models.retribert import RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST, RetriBertModel, RetriBertPreTrainedModel
from .models.roberta import (
ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
RobertaForCausalLM,
RobertaForMaskedLM,
RobertaForMultipleChoice,
RobertaForQuestionAnswering,
RobertaForSequenceClassification,
RobertaForTokenClassification,
RobertaModel,
)
from .models.speech_to_text import (
SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST,
Speech2TextForConditionalGeneration,
Speech2TextModel,
)
from .models.squeezebert import (
SQUEEZEBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
SqueezeBertForMaskedLM,
SqueezeBertForMultipleChoice,
SqueezeBertForQuestionAnswering,
SqueezeBertForSequenceClassification,
SqueezeBertForTokenClassification,
SqueezeBertModel,
SqueezeBertModule,
SqueezeBertPreTrainedModel,
)
from .models.t5 import (
T5_PRETRAINED_MODEL_ARCHIVE_LIST,
T5EncoderModel,
T5ForConditionalGeneration,
T5Model,
T5PreTrainedModel,
load_tf_weights_in_t5,
)
from .models.tapas import (
TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST,
TapasForMaskedLM,
TapasForQuestionAnswering,
TapasForSequenceClassification,
TapasModel,
)
from .models.transfo_xl import (
TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST,
AdaptiveEmbedding,
TransfoXLForSequenceClassification,
TransfoXLLMHeadModel,
TransfoXLModel,
TransfoXLPreTrainedModel,
load_tf_weights_in_transfo_xl,
)
from .models.vit import (
VIT_PRETRAINED_MODEL_ARCHIVE_LIST,
ViTForImageClassification,
ViTModel,
ViTPreTrainedModel,
)
from .models.wav2vec2 import (
WAV_2_VEC_2_PRETRAINED_MODEL_ARCHIVE_LIST,
Wav2Vec2ForCTC,
Wav2Vec2ForMaskedLM,
Wav2Vec2Model,
Wav2Vec2PreTrainedModel,
)
from .models.xlm import (
XLM_PRETRAINED_MODEL_ARCHIVE_LIST,
XLMForMultipleChoice,
XLMForQuestionAnswering,
XLMForQuestionAnsweringSimple,
XLMForSequenceClassification,
XLMForTokenClassification,
XLMModel,
XLMPreTrainedModel,
XLMWithLMHeadModel,
)
from .models.xlm_prophetnet import (
XLM_PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST,
XLMProphetNetDecoder,
XLMProphetNetEncoder,
XLMProphetNetForCausalLM,
XLMProphetNetForConditionalGeneration,
XLMProphetNetModel,
)
from .models.xlm_roberta import (
XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
XLMRobertaForCausalLM,
XLMRobertaForMaskedLM,
XLMRobertaForMultipleChoice,
XLMRobertaForQuestionAnswering,
XLMRobertaForSequenceClassification,
XLMRobertaForTokenClassification,
XLMRobertaModel,
)
from .models.xlnet import (
XLNET_PRETRAINED_MODEL_ARCHIVE_LIST,
XLNetForMultipleChoice,
XLNetForQuestionAnswering,
XLNetForQuestionAnsweringSimple,
XLNetForSequenceClassification,
XLNetForTokenClassification,
XLNetLMHeadModel,
XLNetModel,
XLNetPreTrainedModel,
load_tf_weights_in_xlnet,
)
# Optimization
from .optimization import (
Adafactor,
AdamW,
get_constant_schedule,
get_constant_schedule_with_warmup,
get_cosine_schedule_with_warmup,
get_cosine_with_hard_restarts_schedule_with_warmup,
get_linear_schedule_with_warmup,
get_polynomial_decay_schedule_with_warmup,
get_scheduler,
)
# Trainer
from .trainer import Trainer
from .trainer_pt_utils import torch_distributed_zero_first
from .trainer_seq2seq import Seq2SeqTrainer
else:
from .utils.dummy_pt_objects import *
# TensorFlow
if is_tf_available():
from .benchmark.benchmark_args_tf import TensorFlowBenchmarkArguments
# Benchmarks
from .benchmark.benchmark_tf import TensorFlowBenchmark
from .generation_tf_utils import tf_top_k_top_p_filtering
from .modeling_tf_layoutlm import (
TF_LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST,
TFLayoutLMForMaskedLM,
TFLayoutLMForSequenceClassification,
TFLayoutLMForTokenClassification,
TFLayoutLMMainLayer,
TFLayoutLMModel,
TFLayoutLMPreTrainedModel,
)
from .modeling_tf_utils import TFPreTrainedModel, TFSequenceSummary, TFSharedEmbeddings, shape_list
from .models.albert import (
TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFAlbertForMaskedLM,
TFAlbertForMultipleChoice,
TFAlbertForPreTraining,
TFAlbertForQuestionAnswering,
TFAlbertForSequenceClassification,
TFAlbertForTokenClassification,
TFAlbertMainLayer,
TFAlbertModel,
TFAlbertPreTrainedModel,
)
from .models.auto import (
TF_MODEL_FOR_CAUSAL_LM_MAPPING,
TF_MODEL_FOR_MASKED_LM_MAPPING,
TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING,
TF_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING,
TF_MODEL_FOR_PRETRAINING_MAPPING,
TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING,
TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING,
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING,
TF_MODEL_MAPPING,
TF_MODEL_WITH_LM_HEAD_MAPPING,
TFAutoModel,
TFAutoModelForCausalLM,
TFAutoModelForMaskedLM,
TFAutoModelForMultipleChoice,
TFAutoModelForPreTraining,
TFAutoModelForQuestionAnswering,
TFAutoModelForSeq2SeqLM,
TFAutoModelForSequenceClassification,
TFAutoModelForTokenClassification,
TFAutoModelWithLMHead,
)
from .models.bart import TFBartForConditionalGeneration, TFBartModel, TFBartPretrainedModel
from .models.bert import (
TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFBertEmbeddings,
TFBertForMaskedLM,
TFBertForMultipleChoice,
TFBertForNextSentencePrediction,
TFBertForPreTraining,
TFBertForQuestionAnswering,
TFBertForSequenceClassification,
TFBertForTokenClassification,
TFBertLMHeadModel,
TFBertMainLayer,
TFBertModel,
TFBertPreTrainedModel,
)
from .models.blenderbot import TFBlenderbotForConditionalGeneration, TFBlenderbotModel
from .models.blenderbot_small import TFBlenderbotSmallForConditionalGeneration, TFBlenderbotSmallModel
from .models.camembert import (
TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFCamembertForMaskedLM,
TFCamembertForMultipleChoice,
TFCamembertForQuestionAnswering,
TFCamembertForSequenceClassification,
TFCamembertForTokenClassification,
TFCamembertModel,
)
from .models.convbert import (
TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFConvBertForMaskedLM,
TFConvBertForMultipleChoice,
TFConvBertForQuestionAnswering,
TFConvBertForSequenceClassification,
TFConvBertForTokenClassification,
TFConvBertLayer,
TFConvBertModel,
TFConvBertPreTrainedModel,
)
from .models.ctrl import (
TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST,
TFCTRLForSequenceClassification,
TFCTRLLMHeadModel,
TFCTRLModel,
TFCTRLPreTrainedModel,
)
from .models.distilbert import (
TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFDistilBertForMaskedLM,
TFDistilBertForMultipleChoice,
TFDistilBertForQuestionAnswering,
TFDistilBertForSequenceClassification,
TFDistilBertForTokenClassification,
TFDistilBertMainLayer,
TFDistilBertModel,
TFDistilBertPreTrainedModel,
)
from .models.dpr import (
TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
TF_DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
TF_DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST,
TFDPRContextEncoder,
TFDPRPretrainedContextEncoder,
TFDPRPretrainedQuestionEncoder,
TFDPRPretrainedReader,
TFDPRQuestionEncoder,
TFDPRReader,
)
from .models.electra import (
TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST,
TFElectraForMaskedLM,
TFElectraForMultipleChoice,
TFElectraForPreTraining,
TFElectraForQuestionAnswering,
TFElectraForSequenceClassification,
TFElectraForTokenClassification,
TFElectraModel,
TFElectraPreTrainedModel,
)
from .models.flaubert import (
TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFFlaubertForMultipleChoice,
TFFlaubertForQuestionAnsweringSimple,
TFFlaubertForSequenceClassification,
TFFlaubertForTokenClassification,
TFFlaubertModel,
TFFlaubertWithLMHeadModel,
)
from .models.funnel import (
TF_FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST,
TFFunnelBaseModel,
TFFunnelForMaskedLM,
TFFunnelForMultipleChoice,
TFFunnelForPreTraining,
TFFunnelForQuestionAnswering,
TFFunnelForSequenceClassification,
TFFunnelForTokenClassification,
TFFunnelModel,
)
from .models.gpt2 import (
TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST,
TFGPT2DoubleHeadsModel,
TFGPT2ForSequenceClassification,
TFGPT2LMHeadModel,
TFGPT2MainLayer,
TFGPT2Model,
TFGPT2PreTrainedModel,
)
from .models.led import TFLEDForConditionalGeneration, TFLEDModel, TFLEDPreTrainedModel
from .models.longformer import (
TF_LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
TFLongformerForMaskedLM,
TFLongformerForMultipleChoice,
TFLongformerForQuestionAnswering,
TFLongformerForSequenceClassification,
TFLongformerForTokenClassification,
TFLongformerModel,
TFLongformerSelfAttention,
)
from .models.lxmert import (
TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFLxmertForPreTraining,
TFLxmertMainLayer,
TFLxmertModel,
TFLxmertPreTrainedModel,
TFLxmertVisualFeatureEncoder,
)
from .models.marian import TFMarianModel, TFMarianMTModel
from .models.mbart import TFMBartForConditionalGeneration, TFMBartModel
from .models.mobilebert import (
TF_MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFMobileBertForMaskedLM,
TFMobileBertForMultipleChoice,
TFMobileBertForNextSentencePrediction,
TFMobileBertForPreTraining,
TFMobileBertForQuestionAnswering,
TFMobileBertForSequenceClassification,
TFMobileBertForTokenClassification,
TFMobileBertMainLayer,
TFMobileBertModel,
TFMobileBertPreTrainedModel,
)
from .models.mpnet import (
TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST,
TFMPNetForMaskedLM,
TFMPNetForMultipleChoice,
TFMPNetForQuestionAnswering,
TFMPNetForSequenceClassification,
TFMPNetForTokenClassification,
TFMPNetMainLayer,
TFMPNetModel,
TFMPNetPreTrainedModel,
)
from .models.mt5 import TFMT5EncoderModel, TFMT5ForConditionalGeneration, TFMT5Model
from .models.openai import (
TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFOpenAIGPTDoubleHeadsModel,
TFOpenAIGPTForSequenceClassification,
TFOpenAIGPTLMHeadModel,
TFOpenAIGPTMainLayer,
TFOpenAIGPTModel,
TFOpenAIGPTPreTrainedModel,
)
from .models.pegasus import TFPegasusForConditionalGeneration, TFPegasusModel
from .models.rag import TFRagModel, TFRagSequenceForGeneration, TFRagTokenForGeneration
from .models.roberta import (
TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
TFRobertaForMaskedLM,
TFRobertaForMultipleChoice,
TFRobertaForQuestionAnswering,
TFRobertaForSequenceClassification,
TFRobertaForTokenClassification,
TFRobertaMainLayer,
TFRobertaModel,
TFRobertaPreTrainedModel,
)
from .models.t5 import (
TF_T5_PRETRAINED_MODEL_ARCHIVE_LIST,
TFT5EncoderModel,
TFT5ForConditionalGeneration,
TFT5Model,
TFT5PreTrainedModel,
)
from .models.transfo_xl import (
TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST,
TFAdaptiveEmbedding,
TFTransfoXLForSequenceClassification,
TFTransfoXLLMHeadModel,
TFTransfoXLMainLayer,
TFTransfoXLModel,
TFTransfoXLPreTrainedModel,
)
from .models.xlm import (
TF_XLM_PRETRAINED_MODEL_ARCHIVE_LIST,
TFXLMForMultipleChoice,
TFXLMForQuestionAnsweringSimple,
TFXLMForSequenceClassification,
TFXLMForTokenClassification,
TFXLMMainLayer,
TFXLMModel,
TFXLMPreTrainedModel,
TFXLMWithLMHeadModel,
)
from .models.xlm_roberta import (
TF_XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
TFXLMRobertaForMaskedLM,
TFXLMRobertaForMultipleChoice,
TFXLMRobertaForQuestionAnswering,
TFXLMRobertaForSequenceClassification,
TFXLMRobertaForTokenClassification,
TFXLMRobertaModel,
)
from .models.xlnet import (
TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST,
TFXLNetForMultipleChoice,
TFXLNetForQuestionAnsweringSimple,
TFXLNetForSequenceClassification,
TFXLNetForTokenClassification,
TFXLNetLMHeadModel,
TFXLNetMainLayer,
TFXLNetModel,
TFXLNetPreTrainedModel,
)
# Optimization
from .optimization_tf import AdamWeightDecay, GradientAccumulator, WarmUp, create_optimizer
# Trainer
from .trainer_tf import TFTrainer
else:
# Import the same objects as dummies to get them in the namespace.
# They will raise an import error if the user tries to instantiate / use them.
from .utils.dummy_tf_objects import *
if is_flax_available():
from .modeling_flax_utils import FlaxPreTrainedModel
from .models.auto import (
FLAX_MODEL_FOR_MASKED_LM_MAPPING,
FLAX_MODEL_FOR_MULTIPLE_CHOICE_MAPPING,
FLAX_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING,
FLAX_MODEL_FOR_PRETRAINING_MAPPING,
FLAX_MODEL_FOR_QUESTION_ANSWERING_MAPPING,
FLAX_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
FLAX_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING,
FLAX_MODEL_MAPPING,
FlaxAutoModel,
FlaxAutoModelForMaskedLM,
FlaxAutoModelForMultipleChoice,
FlaxAutoModelForNextSentencePrediction,
FlaxAutoModelForPreTraining,
FlaxAutoModelForQuestionAnswering,
FlaxAutoModelForSequenceClassification,
FlaxAutoModelForTokenClassification,
)
from .models.bert import (
FlaxBertForMaskedLM,
FlaxBertForMultipleChoice,
FlaxBertForNextSentencePrediction,
FlaxBertForPreTraining,
FlaxBertForQuestionAnswering,
FlaxBertForSequenceClassification,
FlaxBertForTokenClassification,
FlaxBertModel,
FlaxBertPreTrainedModel,
)
from .models.roberta import FlaxRobertaModel
else:
# Import the same objects as dummies to get them in the namespace.
# They will raise an import error if the user tries to instantiate / use them.
from .utils.dummy_flax_objects import *
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
def __getattr__(self, name: str):
# Special handling for the version, which is a constant from this module and not imported in a submodule.
if name == "__version__":
return __version__
return super().__getattr__(name)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
if not is_tf_available() and not is_torch_available() and not is_flax_available():
logger.warning(
"None of PyTorch, TensorFlow >= 2.0, or Flax have been found. "
"Models won't be available and only tokenizers, configuration "
"and file/data utilities can be used."
)
| true | true |
f71e07df46a6343270e73af46c0fd66d91b2a5c8 | 11,601 | py | Python | smartsheet/sights.py | bromic007/smartsheet-python-sdk | ef256b7421a65a56a7138dc2b3eb5d69a1a06590 | [
"Apache-2.0"
] | 106 | 2015-02-21T14:26:32.000Z | 2022-03-31T05:56:53.000Z | smartsheet/sights.py | bromic007/smartsheet-python-sdk | ef256b7421a65a56a7138dc2b3eb5d69a1a06590 | [
"Apache-2.0"
] | 94 | 2015-02-09T13:16:00.000Z | 2022-03-16T06:37:41.000Z | smartsheet/sights.py | bromic007/smartsheet-python-sdk | ef256b7421a65a56a7138dc2b3eb5d69a1a06590 | [
"Apache-2.0"
] | 85 | 2015-02-06T22:05:25.000Z | 2022-03-16T06:22:59.000Z | # pylint: disable=C0111,R0902,R0913
# Smartsheet Python SDK.
#
# Copyright 2017 Smartsheet.com, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from . import fresh_operation
from datetime import datetime
class Sights(object):
"""Class for handling Sights operations."""
def __init__(self, smartsheet_obj):
"""Init Sights with base Smartsheet object."""
self._base = smartsheet_obj
self._log = logging.getLogger(__name__)
def list_sights(self, page_size=None, page=None,
include_all=None, modified_since=None):
"""Get the list of all Sights the User has access to, in alphabetical
order, by name.
Args:
page_size (int): The maximum number of items to
return per page.
page (int): Which page to return.
include_all (bool): If true, include all results
(i.e. do not paginate).
modified_since(datetime): return sights modified since datetime
Returns:
IndexResult
"""
_op = fresh_operation('list_sights')
_op['method'] = 'GET'
_op['path'] = '/sights'
_op['query_params']['pageSize'] = page_size
_op['query_params']['page'] = page
_op['query_params']['includeAll'] = include_all
if isinstance(modified_since, datetime):
_op['query_params']['modifiedSince'] = modified_since.isoformat()
expected = ['IndexResult', 'Sight']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def get_sight(self, sight_id, level=None, include=None):
"""Get the specified Sight.
Args:
sight_id (int): Sight ID
level (int): compatibility level
include (list[str]): optional include parameters
Returns:
Sight
"""
_op = fresh_operation('get_sight')
_op['method'] = 'GET'
_op['path'] = '/sights/' + str(sight_id)
_op['query_params']['include'] = include
_op['query_params']['level'] = level
expected = 'Sight'
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def update_sight(self, sight_id, sight_obj):
"""Updates the specified Sight.
Args:
sight_id (int): Sight ID
sight_obj (Sight): Sight object.
Returns:
Result
"""
_op = fresh_operation('update_sight')
_op['method'] = 'PUT'
_op['path'] = '/sights/' + str(sight_id)
_op['json'] = sight_obj
expected = ['Result', 'Sight']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def delete_sight(self, sight_id):
"""Delete the specified Sight.
Args:
sight_id (int): Sight ID
Returns:
Result
"""
_op = fresh_operation('delete_sight')
_op['method'] = 'DELETE'
_op['path'] = '/sights/' + str(sight_id)
expected = ['Result', None]
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def copy_sight(self, sight_id, container_destination_obj):
"""Creates a copy of the specified Sight
Args:
sight_id (int): Sight ID
container_destination_obj
(ContainerDestination): Container Destination object.
Returns:
Result
"""
_op = fresh_operation('copy_sight')
_op['method'] = 'POST'
_op['path'] = '/sights/' + str(sight_id) + '/copy'
_op['json'] = container_destination_obj
expected = ['Result', 'Sight']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def move_sight(self, sight_id, container_destination_obj):
"""Creates a copy of the specified Sight
Args:
sight_id (int): Sight ID
container_destination_obj
(ContainerDestination): Container Destination object.
Returns:
Result
"""
_op = fresh_operation('move_sight')
_op['method'] = 'POST'
_op['path'] = '/sights/' + str(sight_id) + '/move'
_op['json'] = container_destination_obj
expected = ['Result', 'Sight']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def list_shares(self, sight_id, page_size=None, page=None,
include_all=None, include_workspace_shares=False):
"""Get the list of all Users and Groups to whom the specified Sight is
shared, and their access level.
Args:
sight_id (int): Sight ID
page_size (int): The maximum number of items to
return per page.
page (int): Which page to return.
include_all (bool): If true, include all results
(i.e. do not paginate).
include_workspace_shares(bool): Include Workspace shares
Returns:
IndexResult
"""
_op = fresh_operation('list_shares')
_op['method'] = 'GET'
_op['path'] = '/sights/' + str(sight_id) + '/shares'
_op['query_params']['pageSize'] = page_size
_op['query_params']['page'] = page
_op['query_params']['includeAll'] = include_all
if include_workspace_shares:
_op['query_params']['include'] = 'workspaceShares'
expected = ['IndexResult', 'Share']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def get_share(self, sight_id, share_id):
"""Get the specified Share.
Args:
sight_id (int): Sight ID
share_id (str): Share ID
Returns:
Share
"""
_op = fresh_operation('get_share')
_op['method'] = 'GET'
_op['path'] = '/sights/' + str(sight_id) + '/shares/' + str(share_id)
expected = 'Share'
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def share_sight(self, sight_id, share_obj, send_email=False):
"""Share the specified Sight.
Share the specified Sight with the specified Users and
Groups.
Args:
sight_id (int): Sight ID
share_obj (Share): Share object.
send_email (bool): Either true or false to
indicate whether or not to notify the user by email. Default
is false.
Returns:
Result
"""
_op = fresh_operation('share_sight')
_op['method'] = 'POST'
_op['path'] = '/sights/' + str(sight_id) + '/shares'
_op['query_params']['sendEmail'] = send_email
_op['json'] = share_obj
expected = ['Result', 'Share']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def update_share(self, sight_id, share_id, share_obj):
"""Update the access level of a User or Group for the specified Sight.
Args:
sight_id (int): Sight ID
share_id (str): Share ID
share_obj (Share): Share object.
Returns:
Result
"""
if not all(val is not None for val in ['sight_id', 'share_id',
'share_obj']):
raise ValueError(
('One or more required values '
'are missing from call to ' + __name__))
_op = fresh_operation('update_share')
_op['method'] = 'PUT'
_op['path'] = '/sights/' + str(sight_id) + '/shares/' + str(share_id)
_op['json'] = share_obj
expected = ['Result', 'Share']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def delete_share(self, sight_id, share_id):
"""Delete the specified Share.
Args:
sight_id (int): Sight ID
share_id (str): Share ID
Returns:
Result
"""
_op = fresh_operation('delete_share')
_op['method'] = 'DELETE'
_op['path'] = '/sights/' + str(sight_id) + '/shares/' + str(share_id)
expected = ['Result', None]
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def get_publish_status(self, sight_id):
"""Get the Publish status of the Sight.
Get the status of the Publish settings of the Sight,
including URLs of any enabled publishings.
Args:
sight_id (int): Sight ID
Returns:
SightPublish
"""
_op = fresh_operation('get_publish_status')
_op['method'] = 'GET'
_op['path'] = '/sights/' + str(sight_id) + '/publish'
expected = 'SightPublish'
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def set_publish_status(self, sight_id, sight_publish_obj):
"""Set the publish status of the Sight and returns the new status,
including the URLs of any enabled publishings.
Args:
sight_id (int): Sight ID
sight_publish_obj (SightPublish): SightPublish object.
Returns:
Result
"""
attributes = ['read_only_full_enabled', 'read_only_full_accessible_by']
fetch_first = False
# check for incompleteness, fill in from current status if necessary
for attribute in attributes:
val = getattr(sight_publish_obj, attribute, None)
if val is None:
fetch_first = True
break
if fetch_first:
current_status = self.get_publish_status(sight_id).to_dict()
current_status.update(sight_publish_obj.to_dict())
sight_publish_obj = self._base.models.SightPublish(current_status)
_op = fresh_operation('set_publish_status')
_op['method'] = 'PUT'
_op['path'] = '/sights/' + str(sight_id) + '/publish'
_op['json'] = sight_publish_obj
expected = ['Result', 'SightPublish']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
| 31.870879 | 79 | 0.594259 |
import logging
from . import fresh_operation
from datetime import datetime
class Sights(object):
def __init__(self, smartsheet_obj):
self._base = smartsheet_obj
self._log = logging.getLogger(__name__)
def list_sights(self, page_size=None, page=None,
include_all=None, modified_since=None):
_op = fresh_operation('list_sights')
_op['method'] = 'GET'
_op['path'] = '/sights'
_op['query_params']['pageSize'] = page_size
_op['query_params']['page'] = page
_op['query_params']['includeAll'] = include_all
if isinstance(modified_since, datetime):
_op['query_params']['modifiedSince'] = modified_since.isoformat()
expected = ['IndexResult', 'Sight']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def get_sight(self, sight_id, level=None, include=None):
_op = fresh_operation('get_sight')
_op['method'] = 'GET'
_op['path'] = '/sights/' + str(sight_id)
_op['query_params']['include'] = include
_op['query_params']['level'] = level
expected = 'Sight'
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def update_sight(self, sight_id, sight_obj):
_op = fresh_operation('update_sight')
_op['method'] = 'PUT'
_op['path'] = '/sights/' + str(sight_id)
_op['json'] = sight_obj
expected = ['Result', 'Sight']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def delete_sight(self, sight_id):
_op = fresh_operation('delete_sight')
_op['method'] = 'DELETE'
_op['path'] = '/sights/' + str(sight_id)
expected = ['Result', None]
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def copy_sight(self, sight_id, container_destination_obj):
_op = fresh_operation('copy_sight')
_op['method'] = 'POST'
_op['path'] = '/sights/' + str(sight_id) + '/copy'
_op['json'] = container_destination_obj
expected = ['Result', 'Sight']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def move_sight(self, sight_id, container_destination_obj):
_op = fresh_operation('move_sight')
_op['method'] = 'POST'
_op['path'] = '/sights/' + str(sight_id) + '/move'
_op['json'] = container_destination_obj
expected = ['Result', 'Sight']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def list_shares(self, sight_id, page_size=None, page=None,
include_all=None, include_workspace_shares=False):
_op = fresh_operation('list_shares')
_op['method'] = 'GET'
_op['path'] = '/sights/' + str(sight_id) + '/shares'
_op['query_params']['pageSize'] = page_size
_op['query_params']['page'] = page
_op['query_params']['includeAll'] = include_all
if include_workspace_shares:
_op['query_params']['include'] = 'workspaceShares'
expected = ['IndexResult', 'Share']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def get_share(self, sight_id, share_id):
_op = fresh_operation('get_share')
_op['method'] = 'GET'
_op['path'] = '/sights/' + str(sight_id) + '/shares/' + str(share_id)
expected = 'Share'
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def share_sight(self, sight_id, share_obj, send_email=False):
_op = fresh_operation('share_sight')
_op['method'] = 'POST'
_op['path'] = '/sights/' + str(sight_id) + '/shares'
_op['query_params']['sendEmail'] = send_email
_op['json'] = share_obj
expected = ['Result', 'Share']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def update_share(self, sight_id, share_id, share_obj):
if not all(val is not None for val in ['sight_id', 'share_id',
'share_obj']):
raise ValueError(
('One or more required values '
'are missing from call to ' + __name__))
_op = fresh_operation('update_share')
_op['method'] = 'PUT'
_op['path'] = '/sights/' + str(sight_id) + '/shares/' + str(share_id)
_op['json'] = share_obj
expected = ['Result', 'Share']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def delete_share(self, sight_id, share_id):
_op = fresh_operation('delete_share')
_op['method'] = 'DELETE'
_op['path'] = '/sights/' + str(sight_id) + '/shares/' + str(share_id)
expected = ['Result', None]
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def get_publish_status(self, sight_id):
_op = fresh_operation('get_publish_status')
_op['method'] = 'GET'
_op['path'] = '/sights/' + str(sight_id) + '/publish'
expected = 'SightPublish'
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
def set_publish_status(self, sight_id, sight_publish_obj):
attributes = ['read_only_full_enabled', 'read_only_full_accessible_by']
fetch_first = False
for attribute in attributes:
val = getattr(sight_publish_obj, attribute, None)
if val is None:
fetch_first = True
break
if fetch_first:
current_status = self.get_publish_status(sight_id).to_dict()
current_status.update(sight_publish_obj.to_dict())
sight_publish_obj = self._base.models.SightPublish(current_status)
_op = fresh_operation('set_publish_status')
_op['method'] = 'PUT'
_op['path'] = '/sights/' + str(sight_id) + '/publish'
_op['json'] = sight_publish_obj
expected = ['Result', 'SightPublish']
prepped_request = self._base.prepare_request(_op)
response = self._base.request(prepped_request, expected, _op)
return response
| true | true |
f71e084c0beb0fbe683824c90590846603386172 | 4,113 | py | Python | alipay/aop/api/request/AlipayMarketingCashlessvoucherTemplateModifyRequest.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/request/AlipayMarketingCashlessvoucherTemplateModifyRequest.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/request/AlipayMarketingCashlessvoucherTemplateModifyRequest.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayMarketingCashlessvoucherTemplateModifyModel import AlipayMarketingCashlessvoucherTemplateModifyModel
class AlipayMarketingCashlessvoucherTemplateModifyRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayMarketingCashlessvoucherTemplateModifyModel):
self._biz_content = value
else:
self._biz_content = AlipayMarketingCashlessvoucherTemplateModifyModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.marketing.cashlessvoucher.template.modify'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), use_decimal=True, ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), use_decimal=True, ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| 28.365517 | 166 | 0.654996 |
import simplejson as json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayMarketingCashlessvoucherTemplateModifyModel import AlipayMarketingCashlessvoucherTemplateModifyModel
class AlipayMarketingCashlessvoucherTemplateModifyRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayMarketingCashlessvoucherTemplateModifyModel):
self._biz_content = value
else:
self._biz_content = AlipayMarketingCashlessvoucherTemplateModifyModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.marketing.cashlessvoucher.template.modify'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), use_decimal=True, ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), use_decimal=True, ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| true | true |
f71e08a94e9f047dfa54cd7ce41c674a5cd1f8bd | 15,999 | py | Python | game.py | theDrinkMD/twibbage | c0aba60bd2df50f0a5688db4a01048ea1efd1a45 | [
"MIT"
] | null | null | null | game.py | theDrinkMD/twibbage | c0aba60bd2df50f0a5688db4a01048ea1efd1a45 | [
"MIT"
] | null | null | null | game.py | theDrinkMD/twibbage | c0aba60bd2df50f0a5688db4a01048ea1efd1a45 | [
"MIT"
] | null | null | null | from flask import Flask, request
from twilio.twiml.messaging_response import MessagingResponse
from gameIdGenerator import createNewGameId
from models import Game, Player, Player_Answers, Question
from flask_sqlalchemy import SQLAlchemy
from datetime import datetime
import dbManager
import logging
import gameManager
import messageSender
import os
from os.path import join, dirname
from dotenv import load_dotenv
app = Flask(__name__)
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(dotenv_path)
PRODUCTION_DATABASE_URL = os.environ.get("PRODUCTION_DATABASE_URL")
app.config['SQLALCHEMY_DATABASE_URI'] = PRODUCTION_DATABASE_URL
#app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://localhost/twibbage_db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
@app.route("/", methods=['GET', 'POST'])
def twibbage_game():
#INITIALIZE
from_number = request.values.get('From', None)
msg_body = request.values.get('Body', None)
lcase_msg_body = ''
if from_number is not None and msg_body is not None:
lcase_msg_body = unicode.encode(msg_body.lower())
lcase_msg_body = lcase_msg_body.strip()
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(dotenv_path)
ACCOUNT_SID = os.environ.get("TWILIO_ACCOUNT_SID")
AUTH_TOKEN = os.environ.get("TWILIO_AUTH_TOKEN")
#Gamestate Variables for testing
already_in_game = True
current_question_id = 1
max_questions = 4
max_players = 2
game_state = "fakeanswertime"
response_string = ""
points_for_correct_guess = 200
points_for_fakeout = 100
someone_just_jerked_around = False
resp = MessagingResponse()
#resp.message("something is wrong")
if lcase_msg_body.startswith("newgame"):
#checkGameState(from_number) or checkInGame()
# Check if from number is already in a game.
if dbManager.checkIfMdnInGame(from_number) == 2:
response_string = "You're already in a game. To exit that game, respond with \"exitgame\""
else:
#lets parse the message and get the max_players and max questions
game_settings = msg_body.split()
player_alias = str(from_number)
try:
max_players = int(game_settings[1])
print("{} requested a max number of players of {}".format(from_number, max_players))
except IndexError:
max_players = 3
print("{} Did not request a maximum number of questions, defaulting to {}".format(from_number, max_players))
try:
max_questions = int(game_settings[2])
print("{} requested a max number of questions of {}".format(from_number, max_questions))
except IndexError:
max_questions = 3
print("{} Did not request a maximum number of questions, defaulting to {}".format(from_number, max_questions))
try:
player_alias = game_settings[3]
print("{} requested a new name of {}".format(from_number, player_alias))
except IndexError:
player_alias = str(from_number)
print("{} Did not request an alias... defaulting to {}".format(from_number, from_number))
#max_questions = msg_body[9:9]
#max_players = msg_body[11:]
#createGame(from_number, num_questions)
new_game = gameManager.createGame(from_number, max_questions, max_players, player_alias)
# creates a new game object, returns
#gameId = "A1B2"
response_string = "\r Starting a new game... \r - Game ID: {} \r - {} Questions \r - {} Players. " \
"\r Tell your so-called friends to text {} to this number to join. Text rules for... rules.".format(new_game, max_questions, max_players, new_game)
#send rules to host.
#gameManager.sendRules(from_number)
elif lcase_msg_body.startswith("exitgame"):
print("********** {} requested to exit the game. Removing player from game.".format(from_number))
#call exitGame(from_number) which should remove the person from a game
player_id = gameManager.removePlayerFromGame(from_number)
#now lets double check to make sure that this person Wasn't the game host.
#dbManager.updateGameState()
if player_id != 0:
#Check to see if the player ID is a host of an active game
if dbManager.isActiveHost(player_id):
print("********** {} was game host. Fully ending the game.".format(from_number))
ended_game = gameManager.endGameByPlayer(player_id)
response_string = "You have been removed. You were host and ended game too."
else:
response_string = "You have been removed from your current game. Bye!"
else:
response_string = "You asked to be removed, but we couldn't find you!"
elif (lcase_msg_body.startswith("rules") or lcase_msg_body.startswith("info")):
#send rules to host.
gameManager.sendRules(from_number)
else:
# So it's not a new game, which means this can be one of 4 things
#1. First we should check to see if the person is in a game
usr_status = dbManager.checkIfMdnInGame(from_number)
#if the user is either not found, or found, but not in a game,
#lets see what they've written
if usr_status == 0 or usr_status ==1:
#we assume the person is joining a game, so lets get the first 5 bytes
game_settings = lcase_msg_body.split()
try:
player_alias = game_settings[1]
print("{} requested a max number of players of {}".format(from_number, max_players))
except IndexError:
#if we're here that means they only entered 1 thing, the game ID
player_alias = from_number
print("{} Did not request a maximum number of questions, defaulting to {}".format(from_number, max_players))
response_string = gameManager.handleGameJoin(lcase_msg_body[:5].upper(),usr_status,from_number,player_alias)
#gameManager.sendRules(from_number)
#ITS AT THIS POINT WELL WANT TO CHECK TO SEE HOW MANY PLAYERS ARE NOW IN ONCE IVE Joined
my_game = dbManager.getActiveGameByPlayerNumber(from_number)
max_players = my_game.max_players
my_game_token = my_game.game_id
my_game_id = my_game.id
player_diff = max_players - dbManager.getPlayerCount(my_game_token)
if player_diff == 0 :
#holy shit it is timeee
resp.message(response_string)
response_string = "OHHH YEAH We're READY TO START THE GAME"
gameManager.startGame(my_game_id)
#if we've joined, and we're now the last player, then lets start the game
else:
#lets get this person's game object.
my_game = dbManager.getActiveGameByPlayerNumber(from_number)
max_players = my_game.max_players
my_game_token = my_game.game_id
my_player = dbManager.getPlayerByMdn(from_number)
#if we're here, then there are 3 possibilities for game state
#1. In The Lobby
if my_game.game_state == "lobby" :
# Still waiitng for pepole to join something = 1
player_diff = max_players - dbManager.getPlayerCount(my_game_token)
response_string = "\r Still waiting for {} player(s). Text rules for... rules".format(player_diff)
# Store off their fake answer in a DB with Question #, Game ID, from_number, realAnswer ==false
elif my_game.game_state == "fakeanswers":
#if it is fake answer time, we should be expecting questions here. So we'll want to store off people's answers
# 0. First lets make sure that I haven't already answered this question
print("Player About to Answer - My current q seq: {}".format(str(my_game.current_question_sequence_number)))
if dbManager.checkIfPlayerAlreadyAnswered(my_game.id, my_game.current_question_sequence_number,my_player.id):
print("Player Already Answered - My current q seq: {}".format(str(my_game.current_question_sequence_number)))
response_string = "You already answered!"
else:
#print("Not Yet Answered - My current q seq: {}".format(str(my_game.current_question_sequence_number)))
#Check if person faked the right answer like a jerkface
if gameManager.fakeAnswerIsRealAnswer(my_game.current_question_id, lcase_msg_body):
response_string = "Well done hotshot... You selected the correct answer. Please reply with a FAKE answer..."
print("{} tried faking the correct answer...".format(from_number))
else:
print("")
# 1. Store off fake answer
dbManager.addPlayerAnswer(my_game.id, my_game.current_question_sequence_number,my_player.id,lcase_msg_body)
response_string = ""
messageSender.sendMessage(from_number, "Thanks for your fake answer! Waiting for other Players to enter theirs...")
#2. Check if I'm the last to answer
answer_count = dbManager.checkNumberPlayerAnswers(my_game.id,my_game.current_question_sequence_number)
player_count = dbManager.getPlayerCount(my_game_token)
answers_missing = player_count - answer_count
print("answers missing: " + str(answers_missing))
# If I'm last to answer,
if answers_missing == 0:
gameManager.moveToGuessTime(my_game.id)
elif my_game.game_state == "guesstime" :
#Get a person's Guess and store a person's guess
player_guess = lcase_msg_body
#check if the person already answered
if dbManager.checkIfPlayerAlreadyGuessed(my_game.id, my_game.current_question_sequence_number,my_player.id):
print("Player Already Guessed - My current q seq: {}".format(str(my_game.current_question_sequence_number)))
response_string = "\r You already Guessed!"
else:
#So this person hasn't submitted a valid guess yet...
#0. Lets get my curent player answer
my_player_answer = dbManager.getPlayerAnswer(my_game.id, my_game.current_question_sequence_number,my_player.id)
#If no, give the person Whos response was selected, a point
guessed_player_answer = dbManager.getPlayerAnswerByGuessId(my_game.id, my_game.current_question_sequence_number, player_guess[:1])
#is this person being an ass?
if lcase_msg_body == my_player_answer.fake_answer_guess_id:
response_string = "Come on now, you can't guess your own answer. Please sumbit another answer."
#is this an invalid answer?
elif lcase_msg_body.isdigit() == False:
response_string = "You just need to enter the NUMBER of the guess you wish to make. Try again. Like 1, or maybe 2!"
else:
#1. Finally... we can Store off guess
dbManager.updatePlayerAnswerGuess(my_player_answer.id, player_guess)
#Is this person's guess the right answer?
if dbManager.checkIfGuessRightAnswer(my_game.current_question_id, player_guess):
dbManager.updatePlayerScore(my_player.id, points_for_correct_guess)
messageSender.sendMessage(from_number, "\r Yay you got it correct! +{} points!".format(str(points_for_correct_guess)))
#Is this not even a valid response number?
elif guessed_player_answer is None:
#well shit, we already allowed him to save off his shit. we should undo thats
dbManager.updatePlayerAnswerGuess(my_player_answer.id, None)
someone_just_jerked_around = True
else:
dbManager.updatePlayerScore(guessed_player_answer.player_id, points_for_fakeout)
#message guesser saying "WRONG"
messageSender.sendMessage(from_number, "\r WRONG! You guessed someone else's fake answer!")
guessed_player_answer_mdn = dbManager.getPlayerMdnById(guessed_player_answer.player_id)
guessed_player_alias = dbManager.getPlayerById(guessed_player_answer.player_id)
#message faker saying someone guessed your shit! +x Points
#messageSender.sendMessage(guessed_player_answer_mdn, "HAHAHAHA. {} guessed your answer! +{} for fakeout!".format(from_number,points_for_fakeout))
messageSender.sendMessage(guessed_player_answer_mdn, "HAHAHAHA. {} guessed your answer! +{} for fakeout!".format(guessed_player_alias.player_name,points_for_fakeout))
if someone_just_jerked_around:
response_string = "You selected an invalid answer. Sry Bro"
else:
#now lets check whether i was the last to answer, then send scoreboard, and shift Gamestate
num_guesses = dbManager.getTotalGuesses(my_game.id,my_game.current_question_sequence_number)
total_players = dbManager.getPlayerCount(my_game_token)
if num_guesses == total_players:
#its time to change game state and send out results of the round
gameManager.sendResults(my_game.id)
game_continuing = gameManager.nextRound(my_game.id)
if not game_continuing:
response_string = "GAME OVER"
else:
response_string = ""
else:
#do nothing really - we're still waiting on other people
response_string = "Waiting for others to guess..."
else:
response_string = ""
return("<h1>Welcome to Twibbage</h1><br/><p>To play, text newgame q p to the number, whwere q is the number of quesitons, and p is the number of players you want in a game.</p>")
#finally, respond.
resp.message(response_string)
return str(resp)
if __name__ == "__main__":
app.run(debug=True)
| 59.475836 | 198 | 0.586349 | from flask import Flask, request
from twilio.twiml.messaging_response import MessagingResponse
from gameIdGenerator import createNewGameId
from models import Game, Player, Player_Answers, Question
from flask_sqlalchemy import SQLAlchemy
from datetime import datetime
import dbManager
import logging
import gameManager
import messageSender
import os
from os.path import join, dirname
from dotenv import load_dotenv
app = Flask(__name__)
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(dotenv_path)
PRODUCTION_DATABASE_URL = os.environ.get("PRODUCTION_DATABASE_URL")
app.config['SQLALCHEMY_DATABASE_URI'] = PRODUCTION_DATABASE_URL
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
@app.route("/", methods=['GET', 'POST'])
def twibbage_game():
from_number = request.values.get('From', None)
msg_body = request.values.get('Body', None)
lcase_msg_body = ''
if from_number is not None and msg_body is not None:
lcase_msg_body = unicode.encode(msg_body.lower())
lcase_msg_body = lcase_msg_body.strip()
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(dotenv_path)
ACCOUNT_SID = os.environ.get("TWILIO_ACCOUNT_SID")
AUTH_TOKEN = os.environ.get("TWILIO_AUTH_TOKEN")
already_in_game = True
current_question_id = 1
max_questions = 4
max_players = 2
game_state = "fakeanswertime"
response_string = ""
points_for_correct_guess = 200
points_for_fakeout = 100
someone_just_jerked_around = False
resp = MessagingResponse()
if lcase_msg_body.startswith("newgame"):
if dbManager.checkIfMdnInGame(from_number) == 2:
response_string = "You're already in a game. To exit that game, respond with \"exitgame\""
else:
#lets parse the message and get the max_players and max questions
game_settings = msg_body.split()
player_alias = str(from_number)
try:
max_players = int(game_settings[1])
print("{} requested a max number of players of {}".format(from_number, max_players))
except IndexError:
max_players = 3
print("{} Did not request a maximum number of questions, defaulting to {}".format(from_number, max_players))
try:
max_questions = int(game_settings[2])
print("{} requested a max number of questions of {}".format(from_number, max_questions))
except IndexError:
max_questions = 3
print("{} Did not request a maximum number of questions, defaulting to {}".format(from_number, max_questions))
try:
player_alias = game_settings[3]
print("{} requested a new name of {}".format(from_number, player_alias))
except IndexError:
player_alias = str(from_number)
print("{} Did not request an alias... defaulting to {}".format(from_number, from_number))
#max_questions = msg_body[9:9]
#max_players = msg_body[11:]
#createGame(from_number, num_questions)
new_game = gameManager.createGame(from_number, max_questions, max_players, player_alias)
# creates a new game object, returns
#gameId = "A1B2"
response_string = "\r Starting a new game... \r - Game ID: {} \r - {} Questions \r - {} Players. " \
"\r Tell your so-called friends to text {} to this number to join. Text rules for... rules.".format(new_game, max_questions, max_players, new_game)
#send rules to host.
#gameManager.sendRules(from_number)
elif lcase_msg_body.startswith("exitgame"):
print("********** {} requested to exit the game. Removing player from game.".format(from_number))
#call exitGame(from_number) which should remove the person from a game
player_id = gameManager.removePlayerFromGame(from_number)
#now lets double check to make sure that this person Wasn't the game host.
if player_id != 0:
if dbManager.isActiveHost(player_id):
print("********** {} was game host. Fully ending the game.".format(from_number))
ended_game = gameManager.endGameByPlayer(player_id)
response_string = "You have been removed. You were host and ended game too."
else:
response_string = "You have been removed from your current game. Bye!"
else:
response_string = "You asked to be removed, but we couldn't find you!"
elif (lcase_msg_body.startswith("rules") or lcase_msg_body.startswith("info")):
#send rules to host.
gameManager.sendRules(from_number)
else:
# So it's not a new game, which means this can be one of 4 things
usr_status = dbManager.checkIfMdnInGame(from_number)
if usr_status == 0 or usr_status ==1:
#we assume the person is joining a game, so lets get the first 5 bytes
game_settings = lcase_msg_body.split()
try:
player_alias = game_settings[1]
print("{} requested a max number of players of {}".format(from_number, max_players))
except IndexError:
#if we're here that means they only entered 1 thing, the game ID
player_alias = from_number
print("{} Did not request a maximum number of questions, defaulting to {}".format(from_number, max_players))
response_string = gameManager.handleGameJoin(lcase_msg_body[:5].upper(),usr_status,from_number,player_alias)
my_game = dbManager.getActiveGameByPlayerNumber(from_number)
max_players = my_game.max_players
my_game_token = my_game.game_id
my_game_id = my_game.id
player_diff = max_players - dbManager.getPlayerCount(my_game_token)
if player_diff == 0 :
resp.message(response_string)
response_string = "OHHH YEAH We're READY TO START THE GAME"
gameManager.startGame(my_game_id)
#if we've joined, and we're now the last player, then lets start the game
else:
#lets get this person's game object.
my_game = dbManager.getActiveGameByPlayerNumber(from_number)
max_players = my_game.max_players
my_game_token = my_game.game_id
my_player = dbManager.getPlayerByMdn(from_number)
#1. In The Lobby
if my_game.game_state == "lobby" :
# Still waiitng for pepole to join something = 1
player_diff = max_players - dbManager.getPlayerCount(my_game_token)
response_string = "\r Still waiting for {} player(s). Text rules for... rules".format(player_diff)
# Store off their fake answer in a DB with Question #, Game ID, from_number, realAnswer ==false
elif my_game.game_state == "fakeanswers":
#if it is fake answer time, we should be expecting questions here. So we'll want to store off people's answers
# 0. First lets make sure that I haven't already answered this question
print("Player About to Answer - My current q seq: {}".format(str(my_game.current_question_sequence_number)))
if dbManager.checkIfPlayerAlreadyAnswered(my_game.id, my_game.current_question_sequence_number,my_player.id):
print("Player Already Answered - My current q seq: {}".format(str(my_game.current_question_sequence_number)))
response_string = "You already answered!"
else:
if gameManager.fakeAnswerIsRealAnswer(my_game.current_question_id, lcase_msg_body):
response_string = "Well done hotshot... You selected the correct answer. Please reply with a FAKE answer..."
print("{} tried faking the correct answer...".format(from_number))
else:
print("")
dbManager.addPlayerAnswer(my_game.id, my_game.current_question_sequence_number,my_player.id,lcase_msg_body)
response_string = ""
messageSender.sendMessage(from_number, "Thanks for your fake answer! Waiting for other Players to enter theirs...")
answer_count = dbManager.checkNumberPlayerAnswers(my_game.id,my_game.current_question_sequence_number)
player_count = dbManager.getPlayerCount(my_game_token)
answers_missing = player_count - answer_count
print("answers missing: " + str(answers_missing))
# If I'm last to answer,
if answers_missing == 0:
gameManager.moveToGuessTime(my_game.id)
elif my_game.game_state == "guesstime" :
player_guess = lcase_msg_body
if dbManager.checkIfPlayerAlreadyGuessed(my_game.id, my_game.current_question_sequence_number,my_player.id):
print("Player Already Guessed - My current q seq: {}".format(str(my_game.current_question_sequence_number)))
response_string = "\r You already Guessed!"
else:
#0. Lets get my curent player answer
my_player_answer = dbManager.getPlayerAnswer(my_game.id, my_game.current_question_sequence_number,my_player.id)
#If no, give the person Whos response was selected, a point
guessed_player_answer = dbManager.getPlayerAnswerByGuessId(my_game.id, my_game.current_question_sequence_number, player_guess[:1])
#is this person being an ass?
if lcase_msg_body == my_player_answer.fake_answer_guess_id:
response_string = "Come on now, you can't guess your own answer. Please sumbit another answer."
elif lcase_msg_body.isdigit() == False:
response_string = "You just need to enter the NUMBER of the guess you wish to make. Try again. Like 1, or maybe 2!"
else:
dbManager.updatePlayerAnswerGuess(my_player_answer.id, player_guess)
if dbManager.checkIfGuessRightAnswer(my_game.current_question_id, player_guess):
dbManager.updatePlayerScore(my_player.id, points_for_correct_guess)
messageSender.sendMessage(from_number, "\r Yay you got it correct! +{} points!".format(str(points_for_correct_guess)))
#Is this not even a valid response number?
elif guessed_player_answer is None:
#well shit, we already allowed him to save off his shit. we should undo thats
dbManager.updatePlayerAnswerGuess(my_player_answer.id, None)
someone_just_jerked_around = True
else:
dbManager.updatePlayerScore(guessed_player_answer.player_id, points_for_fakeout)
#message guesser saying "WRONG"
messageSender.sendMessage(from_number, "\r WRONG! You guessed someone else's fake answer!")
guessed_player_answer_mdn = dbManager.getPlayerMdnById(guessed_player_answer.player_id)
guessed_player_alias = dbManager.getPlayerById(guessed_player_answer.player_id)
messageSender.sendMessage(guessed_player_answer_mdn, "HAHAHAHA. {} guessed your answer! +{} for fakeout!".format(guessed_player_alias.player_name,points_for_fakeout))
if someone_just_jerked_around:
response_string = "You selected an invalid answer. Sry Bro"
else:
num_guesses = dbManager.getTotalGuesses(my_game.id,my_game.current_question_sequence_number)
total_players = dbManager.getPlayerCount(my_game_token)
if num_guesses == total_players:
gameManager.sendResults(my_game.id)
game_continuing = gameManager.nextRound(my_game.id)
if not game_continuing:
response_string = "GAME OVER"
else:
response_string = ""
else:
response_string = "Waiting for others to guess..."
else:
response_string = ""
return("<h1>Welcome to Twibbage</h1><br/><p>To play, text newgame q p to the number, whwere q is the number of quesitons, and p is the number of players you want in a game.</p>")
#finally, respond.
resp.message(response_string)
return str(resp)
if __name__ == "__main__":
app.run(debug=True)
| true | true |
f71e08b870a9f5ce84946e6c88096ad74de04bfa | 478 | py | Python | lectures/07-python-dictionaries/examples/dna9.py | mattmiller899/biosys-analytics | ab24a4c7206ed9a865e896daa57cee3c4e62df1f | [
"MIT"
] | 4 | 2019-01-10T17:12:37.000Z | 2019-03-01T18:25:07.000Z | lectures/07-python-dictionaries/examples/dna9.py | mattmiller899/biosys-analytics | ab24a4c7206ed9a865e896daa57cee3c4e62df1f | [
"MIT"
] | null | null | null | lectures/07-python-dictionaries/examples/dna9.py | mattmiller899/biosys-analytics | ab24a4c7206ed9a865e896daa57cee3c4e62df1f | [
"MIT"
] | 33 | 2019-01-05T17:03:47.000Z | 2019-11-11T20:48:24.000Z | #!/usr/bin/env python3
"""Tetra-nucleotide counter"""
import sys
import os
from collections import defaultdict
args = sys.argv[1:]
if len(args) != 1:
print('Usage: {} DNA'.format(os.path.basename(sys.argv[0])))
sys.exit(1)
arg = args[0]
dna = ''
if os.path.isfile(arg):
dna = ''.join(open(arg).read().splitlines())
else:
dna = arg
count = defaultdict(int)
for base in dna.lower():
count[base] += 1
print(' '.join(map(lambda b: str(count[b]), "acgt")))
| 18.384615 | 64 | 0.631799 |
import sys
import os
from collections import defaultdict
args = sys.argv[1:]
if len(args) != 1:
print('Usage: {} DNA'.format(os.path.basename(sys.argv[0])))
sys.exit(1)
arg = args[0]
dna = ''
if os.path.isfile(arg):
dna = ''.join(open(arg).read().splitlines())
else:
dna = arg
count = defaultdict(int)
for base in dna.lower():
count[base] += 1
print(' '.join(map(lambda b: str(count[b]), "acgt")))
| true | true |
f71e096d2762faf84d33b6662ba06af6325fa085 | 27,311 | py | Python | manim/mobject/opengl_geometry.py | aburousan/manim | c11b649e9aed34976844e6a131fb12e2a30c7bc8 | [
"MIT"
] | null | null | null | manim/mobject/opengl_geometry.py | aburousan/manim | c11b649e9aed34976844e6a131fb12e2a30c7bc8 | [
"MIT"
] | null | null | null | manim/mobject/opengl_geometry.py | aburousan/manim | c11b649e9aed34976844e6a131fb12e2a30c7bc8 | [
"MIT"
] | null | null | null | import numpy as np
from .. import logger
from ..constants import *
from ..mobject.mobject import Mobject
from ..mobject.types.opengl_vectorized_mobject import (
OpenGLDashedVMobject,
OpenGLVGroup,
OpenGLVMobject,
)
from ..utils.color import *
from ..utils.deprecation import deprecated_params
from ..utils.iterables import adjacent_n_tuples, adjacent_pairs
from ..utils.simple_functions import clip, fdiv
from ..utils.space_ops import (
angle_between_vectors,
angle_of_vector,
compass_directions,
find_intersection,
normalize,
rotate_vector,
rotation_matrix_transpose,
)
DEFAULT_DOT_RADIUS = 0.08
DEFAULT_SMALL_DOT_RADIUS = 0.04
DEFAULT_DASH_LENGTH = 0.05
DEFAULT_ARROW_TIP_LENGTH = 0.35
DEFAULT_ARROW_TIP_WIDTH = 0.35
class OpenGLTipableVMobject(OpenGLVMobject):
"""
Meant for shared functionality between Arc and Line.
Functionality can be classified broadly into these groups:
* Adding, Creating, Modifying tips
- add_tip calls create_tip, before pushing the new tip
into the TipableVMobject's list of submobjects
- stylistic and positional configuration
* Checking for tips
- Boolean checks for whether the TipableVMobject has a tip
and a starting tip
* Getters
- Straightforward accessors, returning information pertaining
to the TipableVMobject instance's tip(s), its length etc
"""
# Adding, Creating, Modifying tips
def __init__(
self,
tip_length=DEFAULT_ARROW_TIP_LENGTH,
normal_vector=OUT,
tip_config={},
**kwargs
):
self.tip_length = tip_length
self.normal_vector = normal_vector
self.tip_config = tip_config
OpenGLVMobject.__init__(self, **kwargs)
def add_tip(self, at_start=False, **kwargs):
"""
Adds a tip to the TipableVMobject instance, recognising
that the endpoints might need to be switched if it's
a 'starting tip' or not.
"""
tip = self.create_tip(at_start, **kwargs)
self.reset_endpoints_based_on_tip(tip, at_start)
self.asign_tip_attr(tip, at_start)
self.add(tip)
return self
def create_tip(self, at_start=False, **kwargs):
"""
Stylises the tip, positions it spacially, and returns
the newly instantiated tip to the caller.
"""
tip = self.get_unpositioned_tip(**kwargs)
self.position_tip(tip, at_start)
return tip
def get_unpositioned_tip(self, **kwargs):
"""
Returns a tip that has been stylistically configured,
but has not yet been given a position in space.
"""
config = {}
config.update(self.tip_config)
config.update(kwargs)
return OpenGLArrowTip(**config)
def position_tip(self, tip, at_start=False):
# Last two control points, defining both
# the end, and the tangency direction
if at_start:
anchor = self.get_start()
handle = self.get_first_handle()
else:
handle = self.get_last_handle()
anchor = self.get_end()
tip.rotate(angle_of_vector(handle - anchor) - PI - tip.get_angle())
tip.shift(anchor - tip.get_tip_point())
return tip
def reset_endpoints_based_on_tip(self, tip, at_start):
if self.get_length() == 0:
# Zero length, put_start_and_end_on wouldn't
# work
return self
if at_start:
start = tip.get_base()
end = self.get_end()
else:
start = self.get_start()
end = tip.get_base()
self.put_start_and_end_on(start, end)
return self
def asign_tip_attr(self, tip, at_start):
if at_start:
self.start_tip = tip
else:
self.tip = tip
return self
# Checking for tips
def has_tip(self):
return hasattr(self, "tip") and self.tip in self
def has_start_tip(self):
return hasattr(self, "start_tip") and self.start_tip in self
# Getters
def pop_tips(self):
start, end = self.get_start_and_end()
result = OpenGLVGroup()
if self.has_tip():
result.add(self.tip)
self.remove(self.tip)
if self.has_start_tip():
result.add(self.start_tip)
self.remove(self.start_tip)
self.put_start_and_end_on(start, end)
return result
def get_tips(self):
"""
Returns a VGroup (collection of VMobjects) containing
the TipableVMObject instance's tips.
"""
result = OpenGLVGroup()
if hasattr(self, "tip"):
result.add(self.tip)
if hasattr(self, "start_tip"):
result.add(self.start_tip)
return result
def get_tip(self):
"""Returns the TipableVMobject instance's (first) tip,
otherwise throws an exception."""
tips = self.get_tips()
if len(tips) == 0:
raise Exception("tip not found")
else:
return tips[0]
def get_default_tip_length(self):
return self.tip_length
def get_first_handle(self):
return self.points[1]
def get_last_handle(self):
return self.points[-2]
def get_end(self):
if self.has_tip():
return self.tip.get_start()
else:
return OpenGLVMobject.get_end(self)
def get_start(self):
if self.has_start_tip():
return self.start_tip.get_start()
else:
return OpenGLVMobject.get_start(self)
def get_length(self):
start, end = self.get_start_and_end()
return np.linalg.norm(start - end)
class OpenGLArc(OpenGLTipableVMobject):
def __init__(
self,
start_angle=0,
angle=TAU / 4,
radius=1.0,
n_components=8,
arc_center=ORIGIN,
**kwargs
):
self.start_angle = start_angle
self.angle = angle
self.radius = radius
self.n_components = n_components
self.arc_center = arc_center
super().__init__(self, **kwargs)
self.orientation = -1
def init_points(self):
self.set_points(
OpenGLArc.create_quadratic_bezier_points(
angle=self.angle,
start_angle=self.start_angle,
n_components=self.n_components,
)
)
# To maintain proper orientation for fill shaders.
self.scale(self.radius, about_point=ORIGIN)
self.shift(self.arc_center)
@staticmethod
def create_quadratic_bezier_points(angle, start_angle=0, n_components=8):
samples = np.array(
[
[np.cos(a), np.sin(a), 0]
for a in np.linspace(
start_angle,
start_angle + angle,
2 * n_components + 1,
)
]
)
theta = angle / n_components
samples[1::2] /= np.cos(theta / 2)
points = np.zeros((3 * n_components, 3))
points[0::3] = samples[0:-1:2]
points[1::3] = samples[1::2]
points[2::3] = samples[2::2]
return points
def get_arc_center(self):
"""
Looks at the normals to the first two
anchors, and finds their intersection points
"""
# First two anchors and handles
a1, h, a2 = self.points[:3]
# Tangent vectors
t1 = h - a1
t2 = h - a2
# Normals
n1 = rotate_vector(t1, TAU / 4)
n2 = rotate_vector(t2, TAU / 4)
return find_intersection(a1, n1, a2, n2)
def get_start_angle(self):
angle = angle_of_vector(self.get_start() - self.get_arc_center())
return angle % TAU
def get_stop_angle(self):
angle = angle_of_vector(self.get_end() - self.get_arc_center())
return angle % TAU
def move_arc_center_to(self, point):
self.shift(point - self.get_arc_center())
return self
class OpenGLArcBetweenPoints(OpenGLArc):
def __init__(self, start, end, angle=TAU / 4, **kwargs):
super().__init__(angle=angle, **kwargs)
if angle == 0:
self.set_points_as_corners([LEFT, RIGHT])
self.put_start_and_end_on(start, end)
class OpenGLCurvedArrow(OpenGLArcBetweenPoints):
def __init__(self, start_point, end_point, **kwargs):
OpenGLArcBetweenPoints.__init__(self, start_point, end_point, **kwargs)
self.add_tip()
class OpenGLCurvedDoubleArrow(OpenGLCurvedArrow):
def __init__(self, start_point, end_point, **kwargs):
OpenGLCurvedArrow.__init__(self, start_point, end_point, **kwargs)
self.add_tip(at_start=True)
class OpenGLCircle(OpenGLArc):
def __init__(self, color=RED, **kwargs):
OpenGLArc.__init__(self, 0, TAU, color=color, **kwargs)
def surround(self, mobject, dim_to_match=0, stretch=False, buff=MED_SMALL_BUFF):
# Ignores dim_to_match and stretch; result will always be a circle
# TODO: Perhaps create an ellipse class to handle singele-dimension stretching
self.replace(mobject, dim_to_match, stretch)
self.stretch((self.get_width() + 2 * buff) / self.get_width(), 0)
self.stretch((self.get_height() + 2 * buff) / self.get_height(), 1)
def point_at_angle(self, angle):
start_angle = self.get_start_angle()
return self.point_from_proportion((angle - start_angle) / TAU)
class OpenGLDot(OpenGLCircle):
def __init__(
self,
point=ORIGIN,
radius=DEFAULT_DOT_RADIUS,
stroke_width=0,
fill_opacity=1.0,
color=WHITE,
**kwargs
):
super().__init__(
arc_center=point,
radius=radius,
stroke_width=stroke_width,
fill_opacity=fill_opacity,
color=color,
**kwargs
)
class OpenGLEllipse(OpenGLCircle):
def __init__(self, width=2, height=1, **kwargs):
super().__init__(**kwargs)
self.set_width(width, stretch=True)
self.set_height(height, stretch=True)
class OpenGLAnnularSector(OpenGLArc):
def __init__(
self,
inner_radius=1,
outer_radius=2,
angle=TAU / 4,
start_angle=0,
fill_opacity=1,
stroke_width=0,
color=WHITE,
**kwargs
):
self.inner_radius = inner_radius
self.outer_radius = outer_radius
OpenGLArc.__init__(
self,
start_angle=start_angle,
angle=angle,
fill_opacity=fill_opacity,
stroke_width=stroke_width,
color=color,
**kwargs
)
def init_points(self):
inner_arc, outer_arc = (
OpenGLArc(
start_angle=self.start_angle,
angle=self.angle,
radius=radius,
arc_center=self.arc_center,
)
for radius in (self.inner_radius, self.outer_radius)
)
outer_arc.reverse_points()
self.append_points(inner_arc.points)
self.add_line_to(outer_arc.points[0])
self.append_points(outer_arc.points)
self.add_line_to(inner_arc.points[0])
class OpenGLSector(OpenGLAnnularSector):
def __init__(self, outer_radius=1, inner_radius=0, **kwargs):
OpenGLAnnularSector.__init__(
self, inner_radius=inner_radius, outer_radius=outer_radius, **kwargs
)
class OpenGLAnnulus(OpenGLCircle):
def __init__(
self,
inner_radius=1,
outer_radius=2,
fill_opacity=1,
stroke_width=0,
color=WHITE,
mark_paths_closed=False,
**kwargs
):
self.mark_paths_closed = mark_paths_closed # is this even used?
self.inner_radius = inner_radius
self.outer_radius = outer_radius
OpenGLCircle.__init__(
self,
fill_opacity=fill_opacity,
stroke_width=stroke_width,
color=color,
**kwargs
)
def init_points(self):
self.radius = self.outer_radius
outer_circle = OpenGLCircle(radius=self.outer_radius)
inner_circle = OpenGLCircle(radius=self.inner_radius)
inner_circle.reverse_points()
self.append_points(outer_circle.points)
self.append_points(inner_circle.points)
self.shift(self.arc_center)
class OpenGLLine(OpenGLTipableVMobject):
def __init__(self, start=LEFT, end=RIGHT, buff=0, path_arc=0, **kwargs):
self.dim = 3
self.buff = buff
self.path_arc = path_arc
self.set_start_and_end_attrs(start, end)
super().__init__(**kwargs)
def init_points(self):
self.set_points_by_ends(self.start, self.end, self.buff, self.path_arc)
def set_points_by_ends(self, start, end, buff=0, path_arc=0):
if path_arc:
self.set_points(OpenGLArc.create_quadratic_bezier_points(path_arc))
self.put_start_and_end_on(start, end)
else:
self.set_points_as_corners([start, end])
self.account_for_buff(self.buff)
def set_path_arc(self, new_value):
self.path_arc = new_value
self.init_points()
def account_for_buff(self, buff):
if buff == 0:
return
#
if self.path_arc == 0:
length = self.get_length()
else:
length = self.get_arc_length()
#
if length < 2 * buff:
return
buff_prop = buff / length
self.pointwise_become_partial(self, buff_prop, 1 - buff_prop)
return self
def set_start_and_end_attrs(self, start, end):
# If either start or end are Mobjects, this
# gives their centers
rough_start = self.pointify(start)
rough_end = self.pointify(end)
vect = normalize(rough_end - rough_start)
# Now that we know the direction between them,
# we can find the appropriate boundary point from
# start and end, if they're mobjects
self.start = self.pointify(start, vect) + self.buff * vect
self.end = self.pointify(end, -vect) - self.buff * vect
def pointify(self, mob_or_point, direction=None):
"""
Take an argument passed into Line (or subclass) and turn
it into a 3d point.
"""
if isinstance(mob_or_point, Mobject):
mob = mob_or_point
if direction is None:
return mob.get_center()
else:
return mob.get_continuous_bounding_box_point(direction)
else:
point = mob_or_point
result = np.zeros(self.dim)
result[: len(point)] = point
return result
def put_start_and_end_on(self, start, end):
curr_start, curr_end = self.get_start_and_end()
if (curr_start == curr_end).all():
self.set_points_by_ends(start, end, self.path_arc)
return super().put_start_and_end_on(start, end)
def get_vector(self):
return self.get_end() - self.get_start()
def get_unit_vector(self):
return normalize(self.get_vector())
def get_angle(self):
return angle_of_vector(self.get_vector())
def get_projection(self, point):
"""
Return projection of a point onto the line
"""
unit_vect = self.get_unit_vector()
start = self.get_start()
return start + np.dot(point - start, unit_vect) * unit_vect
def get_slope(self):
return np.tan(self.get_angle())
def set_angle(self, angle, about_point=None):
if about_point is None:
about_point = self.get_start()
self.rotate(
angle - self.get_angle(),
about_point=about_point,
)
return self
def set_length(self, length):
self.scale(length / self.get_length())
class OpenGLDashedLine(OpenGLLine):
@deprecated_params(
params="positive_space_ratio dash_spacing",
since="v0.9.0",
message="Use dashed_ratio instead of positive_space_ratio.",
)
def __init__(
self, *args, dash_length=DEFAULT_DASH_LENGTH, dashed_ratio=0.5, **kwargs
):
# Simplify with removal of deprecation warning
self.dash_spacing = kwargs.pop("dash_spacing", None) # Unused param
self.dashed_ratio = kwargs.pop("positive_space_ratio", None) or dashed_ratio
self.dash_length = dash_length
super().__init__(*args, **kwargs)
dashed_ratio = self.dashed_ratio
num_dashes = self.calculate_num_dashes(dashed_ratio)
dashes = OpenGLDashedVMobject(
self, num_dashes=num_dashes, dashed_ratio=dashed_ratio
)
self.clear_points()
self.add(*dashes)
def calculate_num_dashes(self, dashed_ratio):
return max(
2, int(np.ceil((self.get_length() / self.dash_length) * dashed_ratio))
)
def get_start(self):
if len(self.submobjects) > 0:
return self.submobjects[0].get_start()
else:
return OpenGLLine.get_start(self)
def get_end(self):
if len(self.submobjects) > 0:
return self.submobjects[-1].get_end()
else:
return OpenGLLine.get_end(self)
def get_first_handle(self):
return self.submobjects[0].points[1]
def get_last_handle(self):
return self.submobjects[-1].points[-2]
class OpenGLTangentLine(OpenGLLine):
def __init__(self, vmob, alpha, length=1, d_alpha=1e-6, **kwargs):
self.length = length
self.d_alpha = d_alpha
da = self.d_alpha
a1 = clip(alpha - da, 0, 1)
a2 = clip(alpha + da, 0, 1)
super().__init__(vmob.pfp(a1), vmob.pfp(a2), **kwargs)
self.scale(self.length / self.get_length())
class OpenGLElbow(OpenGLVMobject):
def __init__(self, width=0.2, angle=0, **kwargs):
self.angle = angle
super().__init__(self, **kwargs)
self.set_points_as_corners([UP, UP + RIGHT, RIGHT])
self.set_width(width, about_point=ORIGIN)
self.rotate(self.angle, about_point=ORIGIN)
class OpenGLArrow(OpenGLLine):
def __init__(
self,
start=LEFT,
end=RIGHT,
path_arc=0,
fill_color=GREY_A,
fill_opacity=1,
stroke_width=0,
buff=MED_SMALL_BUFF,
thickness=0.05,
tip_width_ratio=5,
tip_angle=PI / 3,
max_tip_length_to_length_ratio=0.5,
max_width_to_length_ratio=0.1,
**kwargs
):
self.thickness = thickness
self.tip_width_ratio = tip_width_ratio
self.tip_angle = tip_angle
self.max_tip_length_to_length_ratio = max_tip_length_to_length_ratio
self.max_width_to_length_ratio = max_width_to_length_ratio
super().__init__(
start=start,
end=end,
buff=buff,
path_arc=path_arc,
fill_color=fill_color,
fill_opacity=fill_opacity,
stroke_width=stroke_width,
**kwargs
)
def set_points_by_ends(self, start, end, buff=0, path_arc=0):
# Find the right tip length and thickness
vect = end - start
length = max(np.linalg.norm(vect), 1e-8)
thickness = self.thickness
w_ratio = fdiv(self.max_width_to_length_ratio, fdiv(thickness, length))
if w_ratio < 1:
thickness *= w_ratio
tip_width = self.tip_width_ratio * thickness
tip_length = tip_width / (2 * np.tan(self.tip_angle / 2))
t_ratio = fdiv(self.max_tip_length_to_length_ratio, fdiv(tip_length, length))
if t_ratio < 1:
tip_length *= t_ratio
tip_width *= t_ratio
# Find points for the stem
if path_arc == 0:
points1 = (length - tip_length) * np.array([RIGHT, 0.5 * RIGHT, ORIGIN])
points1 += thickness * UP / 2
points2 = points1[::-1] + thickness * DOWN
else:
# Solve for radius so that the tip-to-tail length matches |end - start|
a = 2 * (1 - np.cos(path_arc))
b = -2 * tip_length * np.sin(path_arc)
c = tip_length ** 2 - length ** 2
R = (-b + np.sqrt(b ** 2 - 4 * a * c)) / (2 * a)
# Find arc points
points1 = OpenGLArc.create_quadratic_bezier_points(path_arc)
points2 = np.array(points1[::-1])
points1 *= R + thickness / 2
points2 *= R - thickness / 2
if path_arc < 0:
tip_length *= -1
rot_T = rotation_matrix_transpose(PI / 2 - path_arc, OUT)
for points in points1, points2:
points[:] = np.dot(points, rot_T)
points += R * DOWN
self.set_points(points1)
# Tip
self.add_line_to(tip_width * UP / 2)
self.add_line_to(tip_length * LEFT)
self.tip_index = len(self.points) - 1
self.add_line_to(tip_width * DOWN / 2)
self.add_line_to(points2[0])
# Close it out
self.append_points(points2)
self.add_line_to(points1[0])
if length > 0:
# Final correction
super().scale(length / self.get_length())
self.rotate(angle_of_vector(vect) - self.get_angle())
self.rotate(
PI / 2 - np.arccos(normalize(vect)[2]),
axis=rotate_vector(self.get_unit_vector(), -PI / 2),
)
self.shift(start - self.get_start())
self.refresh_triangulation()
def reset_points_around_ends(self):
self.set_points_by_ends(
self.get_start(), self.get_end(), path_arc=self.path_arc
)
return self
def get_start(self):
nppc = self.n_points_per_curve
points = self.points
return (points[0] + points[-nppc]) / 2
def get_end(self):
return self.points[self.tip_index]
def put_start_and_end_on(self, start, end):
self.set_points_by_ends(start, end, buff=0, path_arc=self.path_arc)
return self
def scale(self, *args, **kwargs):
super().scale(*args, **kwargs)
self.reset_points_around_ends()
return self
def set_thickness(self, thickness):
self.thickness = thickness
self.reset_points_around_ends()
return self
def set_path_arc(self, path_arc):
self.path_arc = path_arc
self.reset_points_around_ends()
return self
class OpenGLVector(OpenGLArrow):
def __init__(self, direction=RIGHT, buff=0, **kwargs):
self.buff = buff
if len(direction) == 2:
direction = np.hstack([direction, 0])
super().__init__(ORIGIN, direction, buff=buff, **kwargs)
class OpenGLDoubleArrow(OpenGLArrow):
def __init__(self, *args, **kwargs):
OpenGLArrow.__init__(self, *args, **kwargs)
self.add_tip(at_start=True)
class OpenGLCubicBezier(OpenGLVMobject):
def __init__(self, a0, h0, h1, a1, **kwargs):
OpenGLVMobject.__init__(self, **kwargs)
self.add_cubic_bezier_curve(a0, h0, h1, a1)
class OpenGLPolygon(OpenGLVMobject):
def __init__(self, *vertices, **kwargs):
self.vertices = vertices
super().__init__(**kwargs)
def init_points(self):
verts = self.vertices
self.set_points_as_corners([*verts, verts[0]])
def get_vertices(self):
return self.get_start_anchors()
def round_corners(self, radius=0.5):
vertices = self.get_vertices()
arcs = []
for v1, v2, v3 in adjacent_n_tuples(vertices, 3):
vect1 = v2 - v1
vect2 = v3 - v2
unit_vect1 = normalize(vect1)
unit_vect2 = normalize(vect2)
angle = angle_between_vectors(vect1, vect2)
# Negative radius gives concave curves
angle *= np.sign(radius)
# Distance between vertex and start of the arc
cut_off_length = radius * np.tan(angle / 2)
# Determines counterclockwise vs. clockwise
sign = np.sign(np.cross(vect1, vect2)[2])
arc = OpenGLArcBetweenPoints(
v2 - unit_vect1 * cut_off_length,
v2 + unit_vect2 * cut_off_length,
angle=sign * angle,
n_components=2,
)
arcs.append(arc)
self.clear_points()
# To ensure that we loop through starting with last
arcs = [arcs[-1], *arcs[:-1]]
for arc1, arc2 in adjacent_pairs(arcs):
self.append_points(arc1.points)
line = OpenGLLine(arc1.get_end(), arc2.get_start())
# Make sure anchors are evenly distributed
len_ratio = line.get_length() / arc1.get_arc_length()
line.insert_n_curves(int(arc1.get_num_curves() * len_ratio))
self.append_points(line.points)
return self
class OpenGLRegularPolygon(OpenGLPolygon):
def __init__(self, n=6, start_angle=None, **kwargs):
self.start_angle = start_angle
if self.start_angle is None:
if n % 2 == 0:
self.start_angle = 0
else:
self.start_angle = 90 * DEGREES
start_vect = rotate_vector(RIGHT, self.start_angle)
vertices = compass_directions(n, start_vect)
super().__init__(*vertices, **kwargs)
class OpenGLTriangle(OpenGLRegularPolygon):
def __init__(self, **kwargs):
super().__init__(n=3, **kwargs)
class OpenGLArrowTip(OpenGLTriangle):
def __init__(
self,
fill_opacity=1,
fill_color=WHITE,
stroke_width=0,
width=DEFAULT_ARROW_TIP_WIDTH,
length=DEFAULT_ARROW_TIP_LENGTH,
angle=0,
**kwargs
):
OpenGLTriangle.__init__(
self,
start_angle=0,
fill_opacity=fill_opacity,
fill_color=fill_color,
stroke_width=stroke_width,
**kwargs
)
self.set_width(width, stretch=True)
self.set_height(length, stretch=True)
def get_base(self):
return self.point_from_proportion(0.5)
def get_tip_point(self):
return self.points[0]
def get_vector(self):
return self.get_tip_point() - self.get_base()
def get_angle(self):
return angle_of_vector(self.get_vector())
def get_length(self):
return np.linalg.norm(self.get_vector())
class OpenGLRectangle(OpenGLPolygon):
def __init__(self, color=WHITE, width=4.0, height=2.0, **kwargs):
OpenGLPolygon.__init__(self, UR, UL, DL, DR, color=color, **kwargs)
self.set_width(width, stretch=True)
self.set_height(height, stretch=True)
class OpenGLSquare(OpenGLRectangle):
def __init__(self, side_length=2.0, **kwargs):
self.side_length = side_length
super().__init__(height=side_length, width=side_length, **kwargs)
class OpenGLRoundedRectangle(OpenGLRectangle):
def __init__(self, corner_radius=0.5, **kwargs):
self.corner_radius = corner_radius
OpenGLRectangle.__init__(self, **kwargs)
self.round_corners(self.corner_radius)
| 31.500577 | 86 | 0.605763 | import numpy as np
from .. import logger
from ..constants import *
from ..mobject.mobject import Mobject
from ..mobject.types.opengl_vectorized_mobject import (
OpenGLDashedVMobject,
OpenGLVGroup,
OpenGLVMobject,
)
from ..utils.color import *
from ..utils.deprecation import deprecated_params
from ..utils.iterables import adjacent_n_tuples, adjacent_pairs
from ..utils.simple_functions import clip, fdiv
from ..utils.space_ops import (
angle_between_vectors,
angle_of_vector,
compass_directions,
find_intersection,
normalize,
rotate_vector,
rotation_matrix_transpose,
)
DEFAULT_DOT_RADIUS = 0.08
DEFAULT_SMALL_DOT_RADIUS = 0.04
DEFAULT_DASH_LENGTH = 0.05
DEFAULT_ARROW_TIP_LENGTH = 0.35
DEFAULT_ARROW_TIP_WIDTH = 0.35
class OpenGLTipableVMobject(OpenGLVMobject):
def __init__(
self,
tip_length=DEFAULT_ARROW_TIP_LENGTH,
normal_vector=OUT,
tip_config={},
**kwargs
):
self.tip_length = tip_length
self.normal_vector = normal_vector
self.tip_config = tip_config
OpenGLVMobject.__init__(self, **kwargs)
def add_tip(self, at_start=False, **kwargs):
tip = self.create_tip(at_start, **kwargs)
self.reset_endpoints_based_on_tip(tip, at_start)
self.asign_tip_attr(tip, at_start)
self.add(tip)
return self
def create_tip(self, at_start=False, **kwargs):
tip = self.get_unpositioned_tip(**kwargs)
self.position_tip(tip, at_start)
return tip
def get_unpositioned_tip(self, **kwargs):
config = {}
config.update(self.tip_config)
config.update(kwargs)
return OpenGLArrowTip(**config)
def position_tip(self, tip, at_start=False):
if at_start:
anchor = self.get_start()
handle = self.get_first_handle()
else:
handle = self.get_last_handle()
anchor = self.get_end()
tip.rotate(angle_of_vector(handle - anchor) - PI - tip.get_angle())
tip.shift(anchor - tip.get_tip_point())
return tip
def reset_endpoints_based_on_tip(self, tip, at_start):
if self.get_length() == 0:
# work
return self
if at_start:
start = tip.get_base()
end = self.get_end()
else:
start = self.get_start()
end = tip.get_base()
self.put_start_and_end_on(start, end)
return self
def asign_tip_attr(self, tip, at_start):
if at_start:
self.start_tip = tip
else:
self.tip = tip
return self
# Checking for tips
def has_tip(self):
return hasattr(self, "tip") and self.tip in self
def has_start_tip(self):
return hasattr(self, "start_tip") and self.start_tip in self
# Getters
def pop_tips(self):
start, end = self.get_start_and_end()
result = OpenGLVGroup()
if self.has_tip():
result.add(self.tip)
self.remove(self.tip)
if self.has_start_tip():
result.add(self.start_tip)
self.remove(self.start_tip)
self.put_start_and_end_on(start, end)
return result
def get_tips(self):
result = OpenGLVGroup()
if hasattr(self, "tip"):
result.add(self.tip)
if hasattr(self, "start_tip"):
result.add(self.start_tip)
return result
def get_tip(self):
tips = self.get_tips()
if len(tips) == 0:
raise Exception("tip not found")
else:
return tips[0]
def get_default_tip_length(self):
return self.tip_length
def get_first_handle(self):
return self.points[1]
def get_last_handle(self):
return self.points[-2]
def get_end(self):
if self.has_tip():
return self.tip.get_start()
else:
return OpenGLVMobject.get_end(self)
def get_start(self):
if self.has_start_tip():
return self.start_tip.get_start()
else:
return OpenGLVMobject.get_start(self)
def get_length(self):
start, end = self.get_start_and_end()
return np.linalg.norm(start - end)
class OpenGLArc(OpenGLTipableVMobject):
def __init__(
self,
start_angle=0,
angle=TAU / 4,
radius=1.0,
n_components=8,
arc_center=ORIGIN,
**kwargs
):
self.start_angle = start_angle
self.angle = angle
self.radius = radius
self.n_components = n_components
self.arc_center = arc_center
super().__init__(self, **kwargs)
self.orientation = -1
def init_points(self):
self.set_points(
OpenGLArc.create_quadratic_bezier_points(
angle=self.angle,
start_angle=self.start_angle,
n_components=self.n_components,
)
)
# To maintain proper orientation for fill shaders.
self.scale(self.radius, about_point=ORIGIN)
self.shift(self.arc_center)
@staticmethod
def create_quadratic_bezier_points(angle, start_angle=0, n_components=8):
samples = np.array(
[
[np.cos(a), np.sin(a), 0]
for a in np.linspace(
start_angle,
start_angle + angle,
2 * n_components + 1,
)
]
)
theta = angle / n_components
samples[1::2] /= np.cos(theta / 2)
points = np.zeros((3 * n_components, 3))
points[0::3] = samples[0:-1:2]
points[1::3] = samples[1::2]
points[2::3] = samples[2::2]
return points
def get_arc_center(self):
# First two anchors and handles
a1, h, a2 = self.points[:3]
# Tangent vectors
t1 = h - a1
t2 = h - a2
# Normals
n1 = rotate_vector(t1, TAU / 4)
n2 = rotate_vector(t2, TAU / 4)
return find_intersection(a1, n1, a2, n2)
def get_start_angle(self):
angle = angle_of_vector(self.get_start() - self.get_arc_center())
return angle % TAU
def get_stop_angle(self):
angle = angle_of_vector(self.get_end() - self.get_arc_center())
return angle % TAU
def move_arc_center_to(self, point):
self.shift(point - self.get_arc_center())
return self
class OpenGLArcBetweenPoints(OpenGLArc):
def __init__(self, start, end, angle=TAU / 4, **kwargs):
super().__init__(angle=angle, **kwargs)
if angle == 0:
self.set_points_as_corners([LEFT, RIGHT])
self.put_start_and_end_on(start, end)
class OpenGLCurvedArrow(OpenGLArcBetweenPoints):
def __init__(self, start_point, end_point, **kwargs):
OpenGLArcBetweenPoints.__init__(self, start_point, end_point, **kwargs)
self.add_tip()
class OpenGLCurvedDoubleArrow(OpenGLCurvedArrow):
def __init__(self, start_point, end_point, **kwargs):
OpenGLCurvedArrow.__init__(self, start_point, end_point, **kwargs)
self.add_tip(at_start=True)
class OpenGLCircle(OpenGLArc):
def __init__(self, color=RED, **kwargs):
OpenGLArc.__init__(self, 0, TAU, color=color, **kwargs)
def surround(self, mobject, dim_to_match=0, stretch=False, buff=MED_SMALL_BUFF):
# Ignores dim_to_match and stretch; result will always be a circle
# TODO: Perhaps create an ellipse class to handle singele-dimension stretching
self.replace(mobject, dim_to_match, stretch)
self.stretch((self.get_width() + 2 * buff) / self.get_width(), 0)
self.stretch((self.get_height() + 2 * buff) / self.get_height(), 1)
def point_at_angle(self, angle):
start_angle = self.get_start_angle()
return self.point_from_proportion((angle - start_angle) / TAU)
class OpenGLDot(OpenGLCircle):
def __init__(
self,
point=ORIGIN,
radius=DEFAULT_DOT_RADIUS,
stroke_width=0,
fill_opacity=1.0,
color=WHITE,
**kwargs
):
super().__init__(
arc_center=point,
radius=radius,
stroke_width=stroke_width,
fill_opacity=fill_opacity,
color=color,
**kwargs
)
class OpenGLEllipse(OpenGLCircle):
def __init__(self, width=2, height=1, **kwargs):
super().__init__(**kwargs)
self.set_width(width, stretch=True)
self.set_height(height, stretch=True)
class OpenGLAnnularSector(OpenGLArc):
def __init__(
self,
inner_radius=1,
outer_radius=2,
angle=TAU / 4,
start_angle=0,
fill_opacity=1,
stroke_width=0,
color=WHITE,
**kwargs
):
self.inner_radius = inner_radius
self.outer_radius = outer_radius
OpenGLArc.__init__(
self,
start_angle=start_angle,
angle=angle,
fill_opacity=fill_opacity,
stroke_width=stroke_width,
color=color,
**kwargs
)
def init_points(self):
inner_arc, outer_arc = (
OpenGLArc(
start_angle=self.start_angle,
angle=self.angle,
radius=radius,
arc_center=self.arc_center,
)
for radius in (self.inner_radius, self.outer_radius)
)
outer_arc.reverse_points()
self.append_points(inner_arc.points)
self.add_line_to(outer_arc.points[0])
self.append_points(outer_arc.points)
self.add_line_to(inner_arc.points[0])
class OpenGLSector(OpenGLAnnularSector):
def __init__(self, outer_radius=1, inner_radius=0, **kwargs):
OpenGLAnnularSector.__init__(
self, inner_radius=inner_radius, outer_radius=outer_radius, **kwargs
)
class OpenGLAnnulus(OpenGLCircle):
def __init__(
self,
inner_radius=1,
outer_radius=2,
fill_opacity=1,
stroke_width=0,
color=WHITE,
mark_paths_closed=False,
**kwargs
):
self.mark_paths_closed = mark_paths_closed # is this even used?
self.inner_radius = inner_radius
self.outer_radius = outer_radius
OpenGLCircle.__init__(
self,
fill_opacity=fill_opacity,
stroke_width=stroke_width,
color=color,
**kwargs
)
def init_points(self):
self.radius = self.outer_radius
outer_circle = OpenGLCircle(radius=self.outer_radius)
inner_circle = OpenGLCircle(radius=self.inner_radius)
inner_circle.reverse_points()
self.append_points(outer_circle.points)
self.append_points(inner_circle.points)
self.shift(self.arc_center)
class OpenGLLine(OpenGLTipableVMobject):
def __init__(self, start=LEFT, end=RIGHT, buff=0, path_arc=0, **kwargs):
self.dim = 3
self.buff = buff
self.path_arc = path_arc
self.set_start_and_end_attrs(start, end)
super().__init__(**kwargs)
def init_points(self):
self.set_points_by_ends(self.start, self.end, self.buff, self.path_arc)
def set_points_by_ends(self, start, end, buff=0, path_arc=0):
if path_arc:
self.set_points(OpenGLArc.create_quadratic_bezier_points(path_arc))
self.put_start_and_end_on(start, end)
else:
self.set_points_as_corners([start, end])
self.account_for_buff(self.buff)
def set_path_arc(self, new_value):
self.path_arc = new_value
self.init_points()
def account_for_buff(self, buff):
if buff == 0:
return
#
if self.path_arc == 0:
length = self.get_length()
else:
length = self.get_arc_length()
#
if length < 2 * buff:
return
buff_prop = buff / length
self.pointwise_become_partial(self, buff_prop, 1 - buff_prop)
return self
def set_start_and_end_attrs(self, start, end):
# If either start or end are Mobjects, this
# gives their centers
rough_start = self.pointify(start)
rough_end = self.pointify(end)
vect = normalize(rough_end - rough_start)
# Now that we know the direction between them,
# we can find the appropriate boundary point from
# start and end, if they're mobjects
self.start = self.pointify(start, vect) + self.buff * vect
self.end = self.pointify(end, -vect) - self.buff * vect
def pointify(self, mob_or_point, direction=None):
if isinstance(mob_or_point, Mobject):
mob = mob_or_point
if direction is None:
return mob.get_center()
else:
return mob.get_continuous_bounding_box_point(direction)
else:
point = mob_or_point
result = np.zeros(self.dim)
result[: len(point)] = point
return result
def put_start_and_end_on(self, start, end):
curr_start, curr_end = self.get_start_and_end()
if (curr_start == curr_end).all():
self.set_points_by_ends(start, end, self.path_arc)
return super().put_start_and_end_on(start, end)
def get_vector(self):
return self.get_end() - self.get_start()
def get_unit_vector(self):
return normalize(self.get_vector())
def get_angle(self):
return angle_of_vector(self.get_vector())
def get_projection(self, point):
unit_vect = self.get_unit_vector()
start = self.get_start()
return start + np.dot(point - start, unit_vect) * unit_vect
def get_slope(self):
return np.tan(self.get_angle())
def set_angle(self, angle, about_point=None):
if about_point is None:
about_point = self.get_start()
self.rotate(
angle - self.get_angle(),
about_point=about_point,
)
return self
def set_length(self, length):
self.scale(length / self.get_length())
class OpenGLDashedLine(OpenGLLine):
@deprecated_params(
params="positive_space_ratio dash_spacing",
since="v0.9.0",
message="Use dashed_ratio instead of positive_space_ratio.",
)
def __init__(
self, *args, dash_length=DEFAULT_DASH_LENGTH, dashed_ratio=0.5, **kwargs
):
self.dash_spacing = kwargs.pop("dash_spacing", None)
self.dashed_ratio = kwargs.pop("positive_space_ratio", None) or dashed_ratio
self.dash_length = dash_length
super().__init__(*args, **kwargs)
dashed_ratio = self.dashed_ratio
num_dashes = self.calculate_num_dashes(dashed_ratio)
dashes = OpenGLDashedVMobject(
self, num_dashes=num_dashes, dashed_ratio=dashed_ratio
)
self.clear_points()
self.add(*dashes)
def calculate_num_dashes(self, dashed_ratio):
return max(
2, int(np.ceil((self.get_length() / self.dash_length) * dashed_ratio))
)
def get_start(self):
if len(self.submobjects) > 0:
return self.submobjects[0].get_start()
else:
return OpenGLLine.get_start(self)
def get_end(self):
if len(self.submobjects) > 0:
return self.submobjects[-1].get_end()
else:
return OpenGLLine.get_end(self)
def get_first_handle(self):
return self.submobjects[0].points[1]
def get_last_handle(self):
return self.submobjects[-1].points[-2]
class OpenGLTangentLine(OpenGLLine):
def __init__(self, vmob, alpha, length=1, d_alpha=1e-6, **kwargs):
self.length = length
self.d_alpha = d_alpha
da = self.d_alpha
a1 = clip(alpha - da, 0, 1)
a2 = clip(alpha + da, 0, 1)
super().__init__(vmob.pfp(a1), vmob.pfp(a2), **kwargs)
self.scale(self.length / self.get_length())
class OpenGLElbow(OpenGLVMobject):
def __init__(self, width=0.2, angle=0, **kwargs):
self.angle = angle
super().__init__(self, **kwargs)
self.set_points_as_corners([UP, UP + RIGHT, RIGHT])
self.set_width(width, about_point=ORIGIN)
self.rotate(self.angle, about_point=ORIGIN)
class OpenGLArrow(OpenGLLine):
def __init__(
self,
start=LEFT,
end=RIGHT,
path_arc=0,
fill_color=GREY_A,
fill_opacity=1,
stroke_width=0,
buff=MED_SMALL_BUFF,
thickness=0.05,
tip_width_ratio=5,
tip_angle=PI / 3,
max_tip_length_to_length_ratio=0.5,
max_width_to_length_ratio=0.1,
**kwargs
):
self.thickness = thickness
self.tip_width_ratio = tip_width_ratio
self.tip_angle = tip_angle
self.max_tip_length_to_length_ratio = max_tip_length_to_length_ratio
self.max_width_to_length_ratio = max_width_to_length_ratio
super().__init__(
start=start,
end=end,
buff=buff,
path_arc=path_arc,
fill_color=fill_color,
fill_opacity=fill_opacity,
stroke_width=stroke_width,
**kwargs
)
def set_points_by_ends(self, start, end, buff=0, path_arc=0):
vect = end - start
length = max(np.linalg.norm(vect), 1e-8)
thickness = self.thickness
w_ratio = fdiv(self.max_width_to_length_ratio, fdiv(thickness, length))
if w_ratio < 1:
thickness *= w_ratio
tip_width = self.tip_width_ratio * thickness
tip_length = tip_width / (2 * np.tan(self.tip_angle / 2))
t_ratio = fdiv(self.max_tip_length_to_length_ratio, fdiv(tip_length, length))
if t_ratio < 1:
tip_length *= t_ratio
tip_width *= t_ratio
if path_arc == 0:
points1 = (length - tip_length) * np.array([RIGHT, 0.5 * RIGHT, ORIGIN])
points1 += thickness * UP / 2
points2 = points1[::-1] + thickness * DOWN
else:
a = 2 * (1 - np.cos(path_arc))
b = -2 * tip_length * np.sin(path_arc)
c = tip_length ** 2 - length ** 2
R = (-b + np.sqrt(b ** 2 - 4 * a * c)) / (2 * a)
points1 = OpenGLArc.create_quadratic_bezier_points(path_arc)
points2 = np.array(points1[::-1])
points1 *= R + thickness / 2
points2 *= R - thickness / 2
if path_arc < 0:
tip_length *= -1
rot_T = rotation_matrix_transpose(PI / 2 - path_arc, OUT)
for points in points1, points2:
points[:] = np.dot(points, rot_T)
points += R * DOWN
self.set_points(points1)
self.add_line_to(tip_width * UP / 2)
self.add_line_to(tip_length * LEFT)
self.tip_index = len(self.points) - 1
self.add_line_to(tip_width * DOWN / 2)
self.add_line_to(points2[0])
self.append_points(points2)
self.add_line_to(points1[0])
if length > 0:
super().scale(length / self.get_length())
self.rotate(angle_of_vector(vect) - self.get_angle())
self.rotate(
PI / 2 - np.arccos(normalize(vect)[2]),
axis=rotate_vector(self.get_unit_vector(), -PI / 2),
)
self.shift(start - self.get_start())
self.refresh_triangulation()
def reset_points_around_ends(self):
self.set_points_by_ends(
self.get_start(), self.get_end(), path_arc=self.path_arc
)
return self
def get_start(self):
nppc = self.n_points_per_curve
points = self.points
return (points[0] + points[-nppc]) / 2
def get_end(self):
return self.points[self.tip_index]
def put_start_and_end_on(self, start, end):
self.set_points_by_ends(start, end, buff=0, path_arc=self.path_arc)
return self
def scale(self, *args, **kwargs):
super().scale(*args, **kwargs)
self.reset_points_around_ends()
return self
def set_thickness(self, thickness):
self.thickness = thickness
self.reset_points_around_ends()
return self
def set_path_arc(self, path_arc):
self.path_arc = path_arc
self.reset_points_around_ends()
return self
class OpenGLVector(OpenGLArrow):
def __init__(self, direction=RIGHT, buff=0, **kwargs):
self.buff = buff
if len(direction) == 2:
direction = np.hstack([direction, 0])
super().__init__(ORIGIN, direction, buff=buff, **kwargs)
class OpenGLDoubleArrow(OpenGLArrow):
def __init__(self, *args, **kwargs):
OpenGLArrow.__init__(self, *args, **kwargs)
self.add_tip(at_start=True)
class OpenGLCubicBezier(OpenGLVMobject):
def __init__(self, a0, h0, h1, a1, **kwargs):
OpenGLVMobject.__init__(self, **kwargs)
self.add_cubic_bezier_curve(a0, h0, h1, a1)
class OpenGLPolygon(OpenGLVMobject):
def __init__(self, *vertices, **kwargs):
self.vertices = vertices
super().__init__(**kwargs)
def init_points(self):
verts = self.vertices
self.set_points_as_corners([*verts, verts[0]])
def get_vertices(self):
return self.get_start_anchors()
def round_corners(self, radius=0.5):
vertices = self.get_vertices()
arcs = []
for v1, v2, v3 in adjacent_n_tuples(vertices, 3):
vect1 = v2 - v1
vect2 = v3 - v2
unit_vect1 = normalize(vect1)
unit_vect2 = normalize(vect2)
angle = angle_between_vectors(vect1, vect2)
angle *= np.sign(radius)
cut_off_length = radius * np.tan(angle / 2)
sign = np.sign(np.cross(vect1, vect2)[2])
arc = OpenGLArcBetweenPoints(
v2 - unit_vect1 * cut_off_length,
v2 + unit_vect2 * cut_off_length,
angle=sign * angle,
n_components=2,
)
arcs.append(arc)
self.clear_points()
arcs = [arcs[-1], *arcs[:-1]]
for arc1, arc2 in adjacent_pairs(arcs):
self.append_points(arc1.points)
line = OpenGLLine(arc1.get_end(), arc2.get_start())
len_ratio = line.get_length() / arc1.get_arc_length()
line.insert_n_curves(int(arc1.get_num_curves() * len_ratio))
self.append_points(line.points)
return self
class OpenGLRegularPolygon(OpenGLPolygon):
def __init__(self, n=6, start_angle=None, **kwargs):
self.start_angle = start_angle
if self.start_angle is None:
if n % 2 == 0:
self.start_angle = 0
else:
self.start_angle = 90 * DEGREES
start_vect = rotate_vector(RIGHT, self.start_angle)
vertices = compass_directions(n, start_vect)
super().__init__(*vertices, **kwargs)
class OpenGLTriangle(OpenGLRegularPolygon):
def __init__(self, **kwargs):
super().__init__(n=3, **kwargs)
class OpenGLArrowTip(OpenGLTriangle):
def __init__(
self,
fill_opacity=1,
fill_color=WHITE,
stroke_width=0,
width=DEFAULT_ARROW_TIP_WIDTH,
length=DEFAULT_ARROW_TIP_LENGTH,
angle=0,
**kwargs
):
OpenGLTriangle.__init__(
self,
start_angle=0,
fill_opacity=fill_opacity,
fill_color=fill_color,
stroke_width=stroke_width,
**kwargs
)
self.set_width(width, stretch=True)
self.set_height(length, stretch=True)
def get_base(self):
return self.point_from_proportion(0.5)
def get_tip_point(self):
return self.points[0]
def get_vector(self):
return self.get_tip_point() - self.get_base()
def get_angle(self):
return angle_of_vector(self.get_vector())
def get_length(self):
return np.linalg.norm(self.get_vector())
class OpenGLRectangle(OpenGLPolygon):
def __init__(self, color=WHITE, width=4.0, height=2.0, **kwargs):
OpenGLPolygon.__init__(self, UR, UL, DL, DR, color=color, **kwargs)
self.set_width(width, stretch=True)
self.set_height(height, stretch=True)
class OpenGLSquare(OpenGLRectangle):
def __init__(self, side_length=2.0, **kwargs):
self.side_length = side_length
super().__init__(height=side_length, width=side_length, **kwargs)
class OpenGLRoundedRectangle(OpenGLRectangle):
def __init__(self, corner_radius=0.5, **kwargs):
self.corner_radius = corner_radius
OpenGLRectangle.__init__(self, **kwargs)
self.round_corners(self.corner_radius)
| true | true |
f71e0a0f2fef42ba94ade2220af369468d3d68cb | 2,781 | py | Python | source/sagemaker/data-preparation/data_prep.py | awslabs/sagemaker-graph-entity-resolution | 74bfb3f782843623c5bcbec833e9deec448c3a51 | [
"Apache-2.0"
] | 8 | 2020-12-08T22:52:45.000Z | 2022-02-21T19:36:56.000Z | source/sagemaker/data-preparation/data_prep.py | awslabs/sagemaker-graph-entity-resolution | 74bfb3f782843623c5bcbec833e9deec448c3a51 | [
"Apache-2.0"
] | null | null | null | source/sagemaker/data-preparation/data_prep.py | awslabs/sagemaker-graph-entity-resolution | 74bfb3f782843623c5bcbec833e9deec448c3a51 | [
"Apache-2.0"
] | 2 | 2021-09-19T20:46:32.000Z | 2022-02-21T19:37:11.000Z | import argparse
import logging
import os
import json
import pandas as pd
def get_logger(name):
logger = logging.getLogger(name)
log_format = '%(asctime)s %(levelname)s %(name)s: %(message)s'
logging.basicConfig(format=log_format, level=logging.INFO)
logger.setLevel(logging.INFO)
return logger
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--data-dir', type=str, default='/opt/ml/processing/input')
parser.add_argument('--output-dir', type=str, default='/opt/ml/processing/output')
parser.add_argument('--urls', type=str, default='urls.csv', help='map fact id to urls')
parser.add_argument('--titles', type=str, default='titles.csv', help='map fact id to url titles')
parser.add_argument('--facts', type=str, default='facts.json', help='map user to list of facts')
parser.add_argument('--logs', type=str, default='logs.csv', help='file to store output normalized log files')
parser.add_argument('--primary-key', type=str, default='fid', help='id key that corresponds to url')
return parser.parse_args()
def load_url_data(data_dir, urls_path, titles_path, primary_key):
logging.info("Loading website urls from file: {}".format(os.path.join(data_dir, urls_path)))
urls_df = pd.read_csv(os.path.join(data_dir, urls_path), header=None, names=[primary_key, 'urls'])
logging.info("Loading website titles from file: {}".format(os.path.join(data_dir, titles_path)))
titles_df = pd.read_csv(os.path.join(data_dir, titles_path), header=None, names=[primary_key, 'titles'])
logging.info("Merging website urls with website titles")
return urls_df.merge(titles_df, how='left', on=primary_key).fillna("").set_index(primary_key)
def merge_websites_with_user_visits(data_dir, facts, url_data, primary_key, output_dir, logs):
with open(os.path.join(data_dir, facts)) as f_in:
for i, line in enumerate(f_in):
j = json.loads(line.strip())
user_visits = pd.json_normalize(j.get("facts"))
fids = user_visits[primary_key].values
user_visits = pd.concat((user_visits.set_index(primary_key), url_data.loc[fids]), axis=1)
user_visits['uid'] = j.get('uid')
mode, header = ('w', True) if i == 0 else ('a', False)
with open(os.path.join(output_dir, logs), mode) as f:
user_visits.to_csv(f, index=False, header=header)
if __name__ == '__main__':
logging = get_logger(__name__)
args = parse_args()
websites = load_url_data(args.data_dir, args.urls, args.titles, args.primary_key)
logging.info("Obtained website info; merging with user visits")
merge_websites_with_user_visits(args.data_dir, args.facts, websites, args.primary_key, args.output_dir, args.logs)
| 47.948276 | 118 | 0.699029 | import argparse
import logging
import os
import json
import pandas as pd
def get_logger(name):
logger = logging.getLogger(name)
log_format = '%(asctime)s %(levelname)s %(name)s: %(message)s'
logging.basicConfig(format=log_format, level=logging.INFO)
logger.setLevel(logging.INFO)
return logger
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--data-dir', type=str, default='/opt/ml/processing/input')
parser.add_argument('--output-dir', type=str, default='/opt/ml/processing/output')
parser.add_argument('--urls', type=str, default='urls.csv', help='map fact id to urls')
parser.add_argument('--titles', type=str, default='titles.csv', help='map fact id to url titles')
parser.add_argument('--facts', type=str, default='facts.json', help='map user to list of facts')
parser.add_argument('--logs', type=str, default='logs.csv', help='file to store output normalized log files')
parser.add_argument('--primary-key', type=str, default='fid', help='id key that corresponds to url')
return parser.parse_args()
def load_url_data(data_dir, urls_path, titles_path, primary_key):
logging.info("Loading website urls from file: {}".format(os.path.join(data_dir, urls_path)))
urls_df = pd.read_csv(os.path.join(data_dir, urls_path), header=None, names=[primary_key, 'urls'])
logging.info("Loading website titles from file: {}".format(os.path.join(data_dir, titles_path)))
titles_df = pd.read_csv(os.path.join(data_dir, titles_path), header=None, names=[primary_key, 'titles'])
logging.info("Merging website urls with website titles")
return urls_df.merge(titles_df, how='left', on=primary_key).fillna("").set_index(primary_key)
def merge_websites_with_user_visits(data_dir, facts, url_data, primary_key, output_dir, logs):
with open(os.path.join(data_dir, facts)) as f_in:
for i, line in enumerate(f_in):
j = json.loads(line.strip())
user_visits = pd.json_normalize(j.get("facts"))
fids = user_visits[primary_key].values
user_visits = pd.concat((user_visits.set_index(primary_key), url_data.loc[fids]), axis=1)
user_visits['uid'] = j.get('uid')
mode, header = ('w', True) if i == 0 else ('a', False)
with open(os.path.join(output_dir, logs), mode) as f:
user_visits.to_csv(f, index=False, header=header)
if __name__ == '__main__':
logging = get_logger(__name__)
args = parse_args()
websites = load_url_data(args.data_dir, args.urls, args.titles, args.primary_key)
logging.info("Obtained website info; merging with user visits")
merge_websites_with_user_visits(args.data_dir, args.facts, websites, args.primary_key, args.output_dir, args.logs)
| true | true |
f71e0b1a4419edda57e214d8e61d25ce51473ba2 | 3,780 | py | Python | gym_extensions/continuous/mujoco/reacher_contextual.py | vBarbaros/gym-extensions | f17058459b36756cd9bb0877689ab29a729e9dd0 | [
"MIT"
] | 1 | 2018-06-06T21:30:49.000Z | 2018-06-06T21:30:49.000Z | gym_extensions/continuous/mujoco/reacher_contextual.py | vBarbaros/gym-extensions | f17058459b36756cd9bb0877689ab29a729e9dd0 | [
"MIT"
] | null | null | null | gym_extensions/continuous/mujoco/reacher_contextual.py | vBarbaros/gym-extensions | f17058459b36756cd9bb0877689ab29a729e9dd0 | [
"MIT"
] | 1 | 2019-01-27T12:09:43.000Z | 2019-01-27T12:09:43.000Z | PYTHONPATH = '~/Documents/gym-extensions/'
import sys
sys.path.append(PYTHONPATH)
import numpy as np
from gym import utils
from gym.envs.mujoco import mujoco_env
import os.path as osp
from gym.envs.mujoco.reacher import ReacherEnv
try:
import mujoco_py
from mujoco_py.mjlib import mjlib
except ImportError as e:
raise error.DependencyNotInstalled("{}. (HINT: you need to install mujoco_py, and also perform the setup instructions here: https://github.com/openai/mujoco-py/.)".format(e))
import os
import gym
import gym_extensions
class ReacherContextualEnv(ReacherEnv):
def __init__(self, *args, **kwargs):
ReacherEnv.__init__(self)
# the context is a 4-dim vector [x1, y1, x2, y2]
# (x1,y1) - coords of the tip of reacher; (x2,y2) - coords of the target
self.context = np.array([0.1, 0.1, 0.1, 0.1])
self.policy_type = ""
self.context_high = np.array([ i*2 for i in self.context])
self.context_low = np.array([ -i*2 for i in self.context]) # the params in the context can't be less or equal to zero!
self.bias = 0
self.weights = [0]*self.observation_space.shape[0]
def _step(self, action):
state, reward, done, _ = super(ReacherContextualEnv, self)._step(action)
return state, reward, done, {}
def change_context(self, context_vector):
# the context is a 4-dim vector [x1, y1, x2, y2]
# (x1,y1) - coords of the tip of reacher; (x2,y2) - coords of the target
qpos = np.array(context_vector)
qvel = self.init_qvel
qvel[-2:] = 0
self.set_state(qpos, qvel)
def set_policy_type(self, policy_type):
self.policy_type = policy_type
def context_space_info(self):
context_info_dict = {}
context_info_dict['context_vals'] = self.context
context_info_dict['context_dims'] = len(self.context)
context_info_dict['context_high'] = self.context_high.tolist()
context_info_dict['context_low' ] = self.context_low.tolist()
context_info_dict['state_dims' ] = self.observation_space.shape[0]
# I need to know what the size of the action vector I need to pass to the transition function
context_info_dict['action_dims' ] = self.action_space.shape[0]
context_info_dict['action_space'] = 'continuous'
context_info_dict['state_high' ] = self.observation_space.high.tolist()
context_info_dict['state_low' ] = self.observation_space.low.tolist()
context_info_dict['action_high' ] = self.action_space.high.tolist()
context_info_dict['action_low' ] = self.action_space.low.tolist()
return context_info_dict
if __name__ == "__main__":
import time
#env = gym.make('Reacher-v1')
env = gym.make('HopperContextual-v0')
#env = gym.make('PusherContextual-v0')
for i_episode in range(500):
env.reset()
while True:
goal = np.random.uniform(low=-.25, high=.25, size=4)
if np.linalg.norm(goal) < 2:
break
#env.unwrapped.change_context(goal)
#print 'target', env.unwrapped.get_body_com("target")
#print 'qpos', env.unwrapped.model.data.qpos
time.sleep(2)
#print env.unwrapped.context_space_info()['action_dims']
#print env.unwrapped.context_space_info()
#print env.unwrapped.weights
#print env.unwrapped.model.nq
for t in range(500):
env.render()
action = env.action_space.sample()
print 'action ', action
observation, reward, done, info = env.step(action)
#print observation
if done:
print("Episode finished after {} timesteps".format(t+1))
break | 39.789474 | 178 | 0.646032 | PYTHONPATH = '~/Documents/gym-extensions/'
import sys
sys.path.append(PYTHONPATH)
import numpy as np
from gym import utils
from gym.envs.mujoco import mujoco_env
import os.path as osp
from gym.envs.mujoco.reacher import ReacherEnv
try:
import mujoco_py
from mujoco_py.mjlib import mjlib
except ImportError as e:
raise error.DependencyNotInstalled("{}. (HINT: you need to install mujoco_py, and also perform the setup instructions here: https://github.com/openai/mujoco-py/.)".format(e))
import os
import gym
import gym_extensions
class ReacherContextualEnv(ReacherEnv):
def __init__(self, *args, **kwargs):
ReacherEnv.__init__(self)
self.context = np.array([0.1, 0.1, 0.1, 0.1])
self.policy_type = ""
self.context_high = np.array([ i*2 for i in self.context])
self.context_low = np.array([ -i*2 for i in self.context])
self.bias = 0
self.weights = [0]*self.observation_space.shape[0]
def _step(self, action):
state, reward, done, _ = super(ReacherContextualEnv, self)._step(action)
return state, reward, done, {}
def change_context(self, context_vector):
# the context is a 4-dim vector [x1, y1, x2, y2]
# (x1,y1) - coords of the tip of reacher; (x2,y2) - coords of the target
qpos = np.array(context_vector)
qvel = self.init_qvel
qvel[-2:] = 0
self.set_state(qpos, qvel)
def set_policy_type(self, policy_type):
self.policy_type = policy_type
def context_space_info(self):
context_info_dict = {}
context_info_dict['context_vals'] = self.context
context_info_dict['context_dims'] = len(self.context)
context_info_dict['context_high'] = self.context_high.tolist()
context_info_dict['context_low' ] = self.context_low.tolist()
context_info_dict['state_dims' ] = self.observation_space.shape[0]
# I need to know what the size of the action vector I need to pass to the transition function
context_info_dict['action_dims' ] = self.action_space.shape[0]
context_info_dict['action_space'] = 'continuous'
context_info_dict['state_high' ] = self.observation_space.high.tolist()
context_info_dict['state_low' ] = self.observation_space.low.tolist()
context_info_dict['action_high' ] = self.action_space.high.tolist()
context_info_dict['action_low' ] = self.action_space.low.tolist()
return context_info_dict
if __name__ == "__main__":
import time
#env = gym.make('Reacher-v1')
env = gym.make('HopperContextual-v0')
#env = gym.make('PusherContextual-v0')
for i_episode in range(500):
env.reset()
while True:
goal = np.random.uniform(low=-.25, high=.25, size=4)
if np.linalg.norm(goal) < 2:
break
#env.unwrapped.change_context(goal)
#print 'target', env.unwrapped.get_body_com("target")
#print 'qpos', env.unwrapped.model.data.qpos
time.sleep(2)
#print env.unwrapped.context_space_info()['action_dims']
#print env.unwrapped.context_space_info()
#print env.unwrapped.weights
#print env.unwrapped.model.nq
for t in range(500):
env.render()
action = env.action_space.sample()
print 'action ', action
observation, reward, done, info = env.step(action)
#print observation
if done:
print("Episode finished after {} timesteps".format(t+1))
break | false | true |
f71e0cc0b2ded5e59f8e1dc4fc028c52b0ccfc1f | 9,369 | py | Python | tests/test_code_climate_formatting.py | yuriisk/clang-tidy-reformatter | 2be23785e1f47b366320dd5375d3a24393a0661e | [
"MIT"
] | 2 | 2021-11-12T09:40:19.000Z | 2022-02-12T18:02:14.000Z | tests/test_code_climate_formatting.py | yuriisk/clang-tidy-reformatter | 2be23785e1f47b366320dd5375d3a24393a0661e | [
"MIT"
] | null | null | null | tests/test_code_climate_formatting.py | yuriisk/clang-tidy-reformatter | 2be23785e1f47b366320dd5375d3a24393a0661e | [
"MIT"
] | 1 | 2021-10-04T11:51:40.000Z | 2021-10-04T11:51:40.000Z | #!/usr/bin/env python3
import unittest
import unittest.mock
import json
from clang_tidy_converter import CodeClimateFormatter, ClangMessage
class CodeClimateFormatterTest(unittest.TestCase):
def test_format(self):
child1 = ClangMessage('/some/file/path1.cpp', 8, 10, ClangMessage.Level.NOTE, 'Allocated here', '', ['return new A;', ' ^'])
msg = ClangMessage('/some/file/path.cpp', 100, 2, ClangMessage.Level.WARNING, 'Memory leak', 'bugprone-undefined-memory-manipulation.SomethingWrong',
['void a(int)', ' ^'], [child1])
formatter = CodeClimateFormatter()
args = unittest.mock.Mock()
args.use_location_lines = True
self.assertEqual(
"""{
"type": "issue",
"check_name": "bugprone-undefined-memory-manipulation.SomethingWrong",
"description": "Memory leak",
"content": {
"body": "```\\nvoid a(int)\\n ^\\n/some/file/path1.cpp:8:10: Allocated here\\nreturn new A;\\n ^\\n```"
},
"categories": [
"Bug Risk"
],
"location": {
"path": "/some/file/path.cpp",
"lines": {
"begin": 100
}
},
"trace": {
"locations": [
{
"path": "/some/file/path1.cpp",
"lines": {
"begin": 8
}
}
]
},
"severity": "major",
"fingerprint": "f2f6ccb970f2259d10e525b4b5805a5c"
}\0
""", formatter.format([msg], args))
def test_extract_content(self):
child1 = ClangMessage('/some/file/path1.cpp', 8, 10, ClangMessage.Level.NOTE, 'Allocated here', '', ['return new A;', ' ^'])
msg = ClangMessage('/some/file/path.cpp', 100, 2, ClangMessage.Level.WARNING, 'Memory leak', 'bugprone-undefined-memory-manipulation.SomethingWrong',
['void a(int)', ' ^'], [child1])
formatter = CodeClimateFormatter()
self.assertEqual({
'body': '\n'.join([
'```',
'void a(int)',
' ^',
'/some/file/path1.cpp:8:10: Allocated here',
'return new A;',
' ^',
'```'])
}, formatter._extract_content(msg, object()))
def test_extract_bug_risk_category(self):
self._test_diagnostic_category('bugprone-use-after-move', 'Bug Risk')
def test_extract_compatibility_category_1(self):
self._test_diagnostic_category('modernize-replace-auto-ptr', 'Compatibility')
def test_extract_compatibility_category_2(self):
self._test_diagnostic_category('portability-restrict-system-includes', 'Compatibility')
def test_extract_compatibility_category_3(self):
self._test_diagnostic_category('boost-use-to-string', 'Compatibility')
def test_extract_performance_category(self):
self._test_diagnostic_category('performance-inefficient-algorithm', 'Performance')
def test_extract_clarity_category_1(self):
self._test_diagnostic_category('google-readability-avoid-underscore-in-googletest-name', 'Clarity')
def test_extract_clarity_category_2(self):
self._test_diagnostic_category('readability-misplaced-array-index', 'Clarity')
def test_extract_security_category_1(self):
self._test_diagnostic_category('android-cloexec-open', 'Security')
def test_extract_security_category_2(self):
self._test_diagnostic_category('clang-analyzer-security.insecureAPI.bcmp', 'Security')
def test_extract_style_category_1(self):
self._test_diagnostic_category('readability-identifier-naming', 'Style')
def test_extract_style_category_2(self):
self._test_diagnostic_category('cppcoreguidelines-avoid-goto', 'Style')
def test_extract_style_category_3(self):
self._test_diagnostic_category('hicpp-no-assembler', 'Style')
def test_extract_complexity_category(self):
self._test_diagnostic_category('readability-simplify-boolean-expr', 'Complexity')
def test_extract_duplication_category(self):
self._test_diagnostic_category('misc-redundant-expression', 'Duplication')
def test_extract_default_category(self):
self._test_diagnostic_category('cert-dcl16-c', 'Bug Risk')
def _test_diagnostic_category(self, diagnostic, category):
msg = ClangMessage(diagnostic_name=diagnostic)
formatter = CodeClimateFormatter()
self.assertIn(category, formatter._extract_categories(msg, object()))
def test_extract_duplicated_categories(self):
msg = ClangMessage(diagnostic_name='cppcoreguidelines-readability-avoid-goto')
formatter = CodeClimateFormatter()
categories = formatter._extract_categories(msg, object())
self.assertEqual(2, len(categories))
self.assertIn('Style', categories)
self.assertIn('Clarity', categories)
def test_extract_trace_lines(self):
child1 = ClangMessage('/some/file/path1.cpp', 8, 10)
msg = ClangMessage('/some/file/path.cpp', 100, 2, children=[child1])
formatter = CodeClimateFormatter()
args = unittest.mock.Mock()
args.use_location_lines = True
self.assertEqual({
'locations': [
{
'path': '/some/file/path1.cpp',
'lines': {
'begin': 8
}
}
]
}, formatter._extract_trace(msg, args))
def test_extract_trace_positions(self):
child1 = ClangMessage('/some/file/path1.cpp', 8, 10)
msg = ClangMessage('/some/file/path.cpp', 100, 2, children=[child1])
formatter = CodeClimateFormatter()
args = unittest.mock.Mock()
args.use_location_lines = False
self.assertEqual({
'locations': [
{
'path': '/some/file/path1.cpp',
'positions': {
'begin': {
'line': 8,
'column': 10
}
}
}
]
}, formatter._extract_trace(msg, args))
def test_extract_location_lines(self):
msg = ClangMessage('/some/file/path.cpp', 100, 2)
formatter = CodeClimateFormatter()
args = unittest.mock.Mock()
args.use_location_lines = True
self.assertEqual({
'path': '/some/file/path.cpp',
'lines': {
'begin': 100
}
}, formatter._extract_location(msg, args))
def test_extract_location_positions(self):
msg = ClangMessage('/some/file/path.cpp', 100, 2)
formatter = CodeClimateFormatter()
args = unittest.mock.Mock()
args.use_location_lines = False
self.assertEqual({
'path': '/some/file/path.cpp',
'positions': {
'begin': {
'line': 100,
'column': 2
}
}
}, formatter._extract_location(msg, args))
def test_extracting_note_severity(self):
self._test_extracting_severity(ClangMessage.Level.NOTE, 'info')
def test_extracting_remark_severity(self):
self._test_extracting_severity(ClangMessage.Level.REMARK, 'minor')
def test_extracting_warning_severity(self):
self._test_extracting_severity(ClangMessage.Level.WARNING, 'major')
def test_extracting_error_severity(self):
self._test_extracting_severity(ClangMessage.Level.ERROR, 'critical')
def test_extracting_fatal_severity(self):
self._test_extracting_severity(ClangMessage.Level.FATAL, 'blocker')
def _test_extracting_severity(self, level, severity_str):
msg = ClangMessage(level=level)
formatter = CodeClimateFormatter()
self.assertEqual(severity_str, formatter._extract_severity(msg, object()))
def test_generate_fingerprint_reproducibility(self):
msg1 = ClangMessage('path1', line=1)
msg2 = ClangMessage('path1', line=1)
formatter = CodeClimateFormatter()
self.assertEqual(formatter._generate_fingerprint(msg1), formatter._generate_fingerprint(msg2))
def test_generate_fingerprint_uses_filepath(self):
self._test_fingerprints_different(ClangMessage('/path/to/file1.cpp'), ClangMessage('/path/to/file2.cpp'))
def test_generate_fingerprint_uses_line(self):
self._test_fingerprints_different(ClangMessage(line=1), ClangMessage(line=2))
def test_generate_fingerprint_uses_column(self):
self._test_fingerprints_different(ClangMessage(column=1), ClangMessage(column=2))
def test_generate_fingerprint_uses_message(self):
self._test_fingerprints_different(ClangMessage(message='A'), ClangMessage(message='B'))
def test_generate_fingerprint_uses_diagnostic_name(self):
self._test_fingerprints_different(ClangMessage(diagnostic_name='A'), ClangMessage(diagnostic_name='B'))
def test_generate_fingerprint_uses_children(self):
child1 = ClangMessage(line=1)
child2 = ClangMessage(line=2)
self._test_fingerprints_different(ClangMessage(children=[child1]), ClangMessage(children=[child2]))
def _test_fingerprints_different(self, msg1, msg2):
formatter = CodeClimateFormatter()
self.assertNotEqual(formatter._generate_fingerprint(msg1), formatter._generate_fingerprint(msg2))
| 39.868085 | 157 | 0.641904 |
import unittest
import unittest.mock
import json
from clang_tidy_converter import CodeClimateFormatter, ClangMessage
class CodeClimateFormatterTest(unittest.TestCase):
def test_format(self):
child1 = ClangMessage('/some/file/path1.cpp', 8, 10, ClangMessage.Level.NOTE, 'Allocated here', '', ['return new A;', ' ^'])
msg = ClangMessage('/some/file/path.cpp', 100, 2, ClangMessage.Level.WARNING, 'Memory leak', 'bugprone-undefined-memory-manipulation.SomethingWrong',
['void a(int)', ' ^'], [child1])
formatter = CodeClimateFormatter()
args = unittest.mock.Mock()
args.use_location_lines = True
self.assertEqual(
"""{
"type": "issue",
"check_name": "bugprone-undefined-memory-manipulation.SomethingWrong",
"description": "Memory leak",
"content": {
"body": "```\\nvoid a(int)\\n ^\\n/some/file/path1.cpp:8:10: Allocated here\\nreturn new A;\\n ^\\n```"
},
"categories": [
"Bug Risk"
],
"location": {
"path": "/some/file/path.cpp",
"lines": {
"begin": 100
}
},
"trace": {
"locations": [
{
"path": "/some/file/path1.cpp",
"lines": {
"begin": 8
}
}
]
},
"severity": "major",
"fingerprint": "f2f6ccb970f2259d10e525b4b5805a5c"
}\0
""", formatter.format([msg], args))
def test_extract_content(self):
child1 = ClangMessage('/some/file/path1.cpp', 8, 10, ClangMessage.Level.NOTE, 'Allocated here', '', ['return new A;', ' ^'])
msg = ClangMessage('/some/file/path.cpp', 100, 2, ClangMessage.Level.WARNING, 'Memory leak', 'bugprone-undefined-memory-manipulation.SomethingWrong',
['void a(int)', ' ^'], [child1])
formatter = CodeClimateFormatter()
self.assertEqual({
'body': '\n'.join([
'```',
'void a(int)',
' ^',
'/some/file/path1.cpp:8:10: Allocated here',
'return new A;',
' ^',
'```'])
}, formatter._extract_content(msg, object()))
def test_extract_bug_risk_category(self):
self._test_diagnostic_category('bugprone-use-after-move', 'Bug Risk')
def test_extract_compatibility_category_1(self):
self._test_diagnostic_category('modernize-replace-auto-ptr', 'Compatibility')
def test_extract_compatibility_category_2(self):
self._test_diagnostic_category('portability-restrict-system-includes', 'Compatibility')
def test_extract_compatibility_category_3(self):
self._test_diagnostic_category('boost-use-to-string', 'Compatibility')
def test_extract_performance_category(self):
self._test_diagnostic_category('performance-inefficient-algorithm', 'Performance')
def test_extract_clarity_category_1(self):
self._test_diagnostic_category('google-readability-avoid-underscore-in-googletest-name', 'Clarity')
def test_extract_clarity_category_2(self):
self._test_diagnostic_category('readability-misplaced-array-index', 'Clarity')
def test_extract_security_category_1(self):
self._test_diagnostic_category('android-cloexec-open', 'Security')
def test_extract_security_category_2(self):
self._test_diagnostic_category('clang-analyzer-security.insecureAPI.bcmp', 'Security')
def test_extract_style_category_1(self):
self._test_diagnostic_category('readability-identifier-naming', 'Style')
def test_extract_style_category_2(self):
self._test_diagnostic_category('cppcoreguidelines-avoid-goto', 'Style')
def test_extract_style_category_3(self):
self._test_diagnostic_category('hicpp-no-assembler', 'Style')
def test_extract_complexity_category(self):
self._test_diagnostic_category('readability-simplify-boolean-expr', 'Complexity')
def test_extract_duplication_category(self):
self._test_diagnostic_category('misc-redundant-expression', 'Duplication')
def test_extract_default_category(self):
self._test_diagnostic_category('cert-dcl16-c', 'Bug Risk')
def _test_diagnostic_category(self, diagnostic, category):
msg = ClangMessage(diagnostic_name=diagnostic)
formatter = CodeClimateFormatter()
self.assertIn(category, formatter._extract_categories(msg, object()))
def test_extract_duplicated_categories(self):
msg = ClangMessage(diagnostic_name='cppcoreguidelines-readability-avoid-goto')
formatter = CodeClimateFormatter()
categories = formatter._extract_categories(msg, object())
self.assertEqual(2, len(categories))
self.assertIn('Style', categories)
self.assertIn('Clarity', categories)
def test_extract_trace_lines(self):
child1 = ClangMessage('/some/file/path1.cpp', 8, 10)
msg = ClangMessage('/some/file/path.cpp', 100, 2, children=[child1])
formatter = CodeClimateFormatter()
args = unittest.mock.Mock()
args.use_location_lines = True
self.assertEqual({
'locations': [
{
'path': '/some/file/path1.cpp',
'lines': {
'begin': 8
}
}
]
}, formatter._extract_trace(msg, args))
def test_extract_trace_positions(self):
child1 = ClangMessage('/some/file/path1.cpp', 8, 10)
msg = ClangMessage('/some/file/path.cpp', 100, 2, children=[child1])
formatter = CodeClimateFormatter()
args = unittest.mock.Mock()
args.use_location_lines = False
self.assertEqual({
'locations': [
{
'path': '/some/file/path1.cpp',
'positions': {
'begin': {
'line': 8,
'column': 10
}
}
}
]
}, formatter._extract_trace(msg, args))
def test_extract_location_lines(self):
msg = ClangMessage('/some/file/path.cpp', 100, 2)
formatter = CodeClimateFormatter()
args = unittest.mock.Mock()
args.use_location_lines = True
self.assertEqual({
'path': '/some/file/path.cpp',
'lines': {
'begin': 100
}
}, formatter._extract_location(msg, args))
def test_extract_location_positions(self):
msg = ClangMessage('/some/file/path.cpp', 100, 2)
formatter = CodeClimateFormatter()
args = unittest.mock.Mock()
args.use_location_lines = False
self.assertEqual({
'path': '/some/file/path.cpp',
'positions': {
'begin': {
'line': 100,
'column': 2
}
}
}, formatter._extract_location(msg, args))
def test_extracting_note_severity(self):
self._test_extracting_severity(ClangMessage.Level.NOTE, 'info')
def test_extracting_remark_severity(self):
self._test_extracting_severity(ClangMessage.Level.REMARK, 'minor')
def test_extracting_warning_severity(self):
self._test_extracting_severity(ClangMessage.Level.WARNING, 'major')
def test_extracting_error_severity(self):
self._test_extracting_severity(ClangMessage.Level.ERROR, 'critical')
def test_extracting_fatal_severity(self):
self._test_extracting_severity(ClangMessage.Level.FATAL, 'blocker')
def _test_extracting_severity(self, level, severity_str):
msg = ClangMessage(level=level)
formatter = CodeClimateFormatter()
self.assertEqual(severity_str, formatter._extract_severity(msg, object()))
def test_generate_fingerprint_reproducibility(self):
msg1 = ClangMessage('path1', line=1)
msg2 = ClangMessage('path1', line=1)
formatter = CodeClimateFormatter()
self.assertEqual(formatter._generate_fingerprint(msg1), formatter._generate_fingerprint(msg2))
def test_generate_fingerprint_uses_filepath(self):
self._test_fingerprints_different(ClangMessage('/path/to/file1.cpp'), ClangMessage('/path/to/file2.cpp'))
def test_generate_fingerprint_uses_line(self):
self._test_fingerprints_different(ClangMessage(line=1), ClangMessage(line=2))
def test_generate_fingerprint_uses_column(self):
self._test_fingerprints_different(ClangMessage(column=1), ClangMessage(column=2))
def test_generate_fingerprint_uses_message(self):
self._test_fingerprints_different(ClangMessage(message='A'), ClangMessage(message='B'))
def test_generate_fingerprint_uses_diagnostic_name(self):
self._test_fingerprints_different(ClangMessage(diagnostic_name='A'), ClangMessage(diagnostic_name='B'))
def test_generate_fingerprint_uses_children(self):
child1 = ClangMessage(line=1)
child2 = ClangMessage(line=2)
self._test_fingerprints_different(ClangMessage(children=[child1]), ClangMessage(children=[child2]))
def _test_fingerprints_different(self, msg1, msg2):
formatter = CodeClimateFormatter()
self.assertNotEqual(formatter._generate_fingerprint(msg1), formatter._generate_fingerprint(msg2))
| true | true |
f71e0d8e12f4533b0d8ccc6cecdc0f4b5899c521 | 18,554 | py | Python | tests/formats/dataclass/parsers/nodes/test_element.py | pashashocky/xsdata | 1cd681598d2235626d0e21716fc9fb885d26e351 | [
"MIT"
] | null | null | null | tests/formats/dataclass/parsers/nodes/test_element.py | pashashocky/xsdata | 1cd681598d2235626d0e21716fc9fb885d26e351 | [
"MIT"
] | null | null | null | tests/formats/dataclass/parsers/nodes/test_element.py | pashashocky/xsdata | 1cd681598d2235626d0e21716fc9fb885d26e351 | [
"MIT"
] | null | null | null | import copy
from dataclasses import make_dataclass
from unittest import mock
from tests.fixtures.books import Books
from tests.fixtures.models import AttrsType
from tests.fixtures.models import ExtendedListType
from tests.fixtures.models import ExtendedType
from tests.fixtures.models import FixedType
from tests.fixtures.models import NillableType
from tests.fixtures.models import Paragraph
from tests.fixtures.models import SequentialType
from tests.fixtures.models import TypeA
from tests.fixtures.models import TypeB
from tests.fixtures.models import TypeC
from xsdata.exceptions import ParserError
from xsdata.formats.dataclass.context import XmlContext
from xsdata.formats.dataclass.models.elements import XmlType
from xsdata.formats.dataclass.models.generics import AnyElement
from xsdata.formats.dataclass.models.generics import DerivedElement
from xsdata.formats.dataclass.parsers.config import ParserConfig
from xsdata.formats.dataclass.parsers.nodes import ElementNode
from xsdata.formats.dataclass.parsers.nodes import PrimitiveNode
from xsdata.formats.dataclass.parsers.nodes import SkipNode
from xsdata.formats.dataclass.parsers.nodes import StandardNode
from xsdata.formats.dataclass.parsers.nodes import UnionNode
from xsdata.formats.dataclass.parsers.nodes import WildcardNode
from xsdata.formats.dataclass.parsers.utils import ParserUtils
from xsdata.models.enums import DataType
from xsdata.models.enums import Namespace
from xsdata.models.enums import QNames
from xsdata.utils.testing import FactoryTestCase
from xsdata.utils.testing import XmlMetaFactory
from xsdata.utils.testing import XmlVarFactory
class ElementNodeTests(FactoryTestCase):
def setUp(self) -> None:
super().setUp()
self.context = XmlContext()
self.meta = XmlMetaFactory.create(clazz=TypeC, qname="foo", wildcards=[])
self.node = ElementNode(
position=0,
meta=self.meta,
context=self.context,
config=ParserConfig(),
attrs={},
ns_map={},
)
def test_bind(self):
node = ElementNode(
position=0,
meta=self.context.build(SequentialType),
context=self.context,
config=ParserConfig(),
attrs={"a": "b", "a0": "0"},
ns_map={"ns0": "xsdata"},
)
objects = [("x1", 1), ("x2", 2), ("x2", 3)]
expected = SequentialType(a0="0", a1={"a": "b"}, x0=1, x1=[1], x2=[2, 3])
self.assertTrue(node.bind("foo", "1", "tail", objects))
self.assertEqual("foo", objects[-1][0])
self.assertEqual(expected, objects[-1][1])
def test_bind_nil_value(self):
self.node.xsi_nil = True
objects = []
self.assertTrue(self.node.bind("foo", None, None, objects))
self.assertEqual(("foo", None), objects[-1])
def test_bind_nillable_type(self):
self.node.meta = self.context.build(NillableType)
self.node.xsi_nil = True
objects = []
self.assertTrue(self.node.bind("foo", None, None, objects))
self.assertEqual(("foo", NillableType(None)), objects[-1])
def test_bind_fixed_value(self):
self.node.meta = self.context.build(FixedType)
objects = []
self.assertTrue(self.node.bind("foo", "not the fixed value", None, objects))
self.assertEqual(("foo", FixedType()), objects[-1])
def test_bind_with_derived_element(self):
self.node.meta = self.context.build(TypeA)
self.node.derived_factory = DerivedElement
objects = []
self.assertTrue(self.node.bind("foo", "2", None, objects))
self.assertEqual("foo", objects[-1][0])
self.assertEqual(DerivedElement("foo", TypeA(2)), objects[-1][1])
def test_bind_with_wildcard_var(self):
self.node.meta = self.context.build(ExtendedType)
self.node.attrs = {"a": "b"}
self.node.ns_map = {"ns0": "xsdata"}
objects = [("a", "1"), ("b", "2")]
expected = ExtendedType(
a="1",
wildcard=AnyElement(
text="text",
tail="tail",
children=[AnyElement(qname="b", text="2")],
attributes={"a": "b"},
),
)
self.assertTrue(self.node.bind("foo", "text", "tail", objects))
self.assertEqual("foo", objects[-1][0])
self.assertEqual(expected, objects[-1][1])
def test_bind_with_mixed_flag_true(self):
self.node.meta = self.context.build(TypeB)
self.node.attrs = {"a": "b"}
self.node.ns_map = {"ns0": "xsdata"}
self.node.mixed = True
objects = [("x", 1), ("y", "a")]
self.assertTrue(self.node.bind("foo", "text", " ", objects))
self.assertEqual(1, len(objects))
self.assertEqual(TypeB(x=1, y="a"), objects[-1][1])
objects = [("x", 1), ("y", "a")]
self.assertTrue(self.node.bind("foo", "text", " tail ", objects))
self.assertEqual(2, len(objects))
self.assertEqual(None, objects[-1][0])
self.assertEqual(" tail ", objects[-1][1])
def test_bind_with_mixed_content_var(self):
self.node.meta = self.context.build(Paragraph)
self.node.attrs = {"a": "b"}
self.node.ns_map = {"ns0": "xsdata"}
objects = [("a", 1)]
expected = Paragraph(content=["text", AnyElement(qname="a", text="1"), "tail"])
self.assertTrue(self.node.bind("foo", "text", "tail", objects))
self.assertEqual("foo", objects[-1][0])
self.assertEqual(expected, objects[-1][1])
def test_bind_wild_text(self):
self.node.meta = self.context.build(ExtendedType)
var = self.node.meta.wildcards[0]
params = {}
self.node.bind_wild_text(params, var, None, None)
self.assertEqual(0, len(params))
params = {}
self.node.bind_wild_text(params, var, "txt", "tail")
expected = AnyElement(text="txt", tail="tail")
self.assertEqual({"wildcard": expected}, params)
self.node.attrs = {"a": "b"}
self.node.ns_map = {"ns0": "a"}
self.node.bind_wild_text(params, var, "txt", "tail")
expected = AnyElement(
text="txt", tail="tail", children=[expected], attributes=self.node.attrs
)
self.assertEqual({"wildcard": expected}, params)
self.node.meta = self.context.build(ExtendedListType)
var = self.node.meta.wildcards[0]
params = {}
self.node.bind_wild_text(params, var, "txt", "tail")
self.assertEqual({"wildcard": ["txt", "tail"]}, params)
self.node.bind_wild_text(params, var, None, "tail")
self.assertEqual({"wildcard": ["txt", "tail", "tail"]}, params)
self.node.bind_wild_text(params, var, "first", None)
self.assertEqual({"wildcard": ["first", "txt", "tail", "tail"]}, params)
def test_bind_attrs(self):
self.node.meta = self.context.build(AttrsType)
self.node.attrs = {
"index": "0",
"fixed": "will be ignored",
"{what}ever": "qname",
"extended": "attr",
}
params = {}
self.node.bind_attrs(params)
expected = {"attrs": {"extended": "attr", "{what}ever": "qname"}, "index": 0}
self.assertEqual(expected, params)
def test_bind_attrs_with_fail_on_unknown_attributes(self):
self.node.meta = self.context.build(AttrsType)
self.node.config.fail_on_unknown_attributes = True
self.node.attrs = {
"index": "0",
"fixed": "will be ignored",
"{what}ever": "qname",
"extended": "attr",
}
params = {}
self.node.bind_attrs(params)
expected = {"attrs": {"extended": "attr", "{what}ever": "qname"}, "index": 0}
self.assertEqual(expected, params)
def test_bind_with_fail_on_unknown_attributes(self):
self.node.meta = self.context.build(ExtendedType)
self.node.config.fail_on_unknown_attributes = True
self.node.attrs = {"a": "b"}
objects = [("a", "1")]
with self.assertRaises(ParserError) as cm:
self.node.bind("foo", "text", "tail", objects)
self.assertEqual("Unknown attribute ExtendedType:a", str(cm.exception))
@mock.patch("xsdata.formats.dataclass.parsers.nodes.element.logger.warning")
def test_bind_objects(self, mock_warning):
self.node.meta = self.context.build(TypeC)
objects = [("x", 1), ("x", 2), ("z", 3.0), ("fixed", "bar")]
params = {}
self.node.bind_objects(params, objects)
self.assertEqual({"x": 1, "z": 3.0}, params)
mock_warning.assert_called_once_with("Unassigned parsed object %s", "x")
def test_bind_wild_var(self):
self.node.meta = self.context.build(ExtendedType)
params = {}
objects = [("x", 1), ("x", 2), ("z", 3.0)]
self.node.bind_objects(params, objects)
expected = {
"wildcard": AnyElement(
children=[
AnyElement(qname="x", text="1"),
AnyElement(qname="x", text="2"),
AnyElement(qname="z", text="3.0"),
]
)
}
self.assertEqual(expected, params)
def test_bind_wild_list_var(self):
self.node.meta = self.context.build(ExtendedListType)
params = {}
objects = [("x", 1), ("x", 2), ("z", 3.0)]
self.node.bind_objects(params, objects)
expected = {
"wildcard": [
AnyElement(qname="x", text="1"),
AnyElement(qname="x", text="2"),
AnyElement(qname="z", text="3.0"),
]
}
self.assertEqual(expected, params)
def test_prepare_generic_value(self):
actual = self.node.prepare_generic_value(None, 1)
self.assertEqual(1, actual)
actual = self.node.prepare_generic_value("a", 1)
expected = AnyElement(qname="a", text="1")
self.assertEqual(expected, actual)
actual = self.node.prepare_generic_value("a", "foo")
expected = AnyElement(qname="a", text="foo")
self.assertEqual(expected, actual)
fixture = make_dataclass("Fixture", [("content", str)])
actual = self.node.prepare_generic_value("a", fixture("foo"))
expected = DerivedElement(qname="a", value=fixture("foo"), type="Fixture")
self.assertEqual(expected, actual)
actual = self.node.prepare_generic_value("a", expected)
self.assertIs(expected, actual)
actual = self.node.prepare_generic_value("Fixture", fixture("foo"))
expected = DerivedElement(qname="Fixture", value=fixture("foo"))
self.assertEqual(expected, actual)
def test_child(self):
var = XmlVarFactory.create(xml_type=XmlType.ELEMENT, qname="a", types=(TypeC,))
attrs = {"a": "b"}
ns_map = {"ns0": "xsdata"}
position = 1
self.meta.elements[var.qname] = [var]
actual = self.node.child("a", attrs, ns_map, position)
self.assertIsInstance(actual, ElementNode)
self.assertEqual(attrs, actual.attrs)
self.assertEqual(ns_map, actual.ns_map)
self.assertEqual(position, actual.position)
def test_child_with_unique_element(self):
single = XmlVarFactory.create(
index=1, xml_type=XmlType.ELEMENT, qname="a", types=(TypeC,)
)
wildcard = XmlVarFactory.create(
index=2, xml_type=XmlType.WILDCARD, qname="a", types=(object,)
)
self.meta.elements[single.qname] = [single]
self.meta.wildcards.append(wildcard)
attrs = {"a": "b"}
ns_map = {"ns0": "xsdata"}
position = 1
actual = self.node.child("a", attrs, ns_map, position)
self.assertIsInstance(actual, ElementNode)
self.assertIn(single.index, self.node.assigned)
actual = self.node.child("a", attrs, ns_map, position)
self.assertIsInstance(actual, WildcardNode)
self.assertNotIn(wildcard.index, self.node.assigned)
@mock.patch.object(ElementNode, "build_node")
def test_child_when_failed_to_build_next_node(self, mock_build_node):
mock_build_node.return_value = None
element = XmlVarFactory.create(xml_type=XmlType.ELEMENT, qname="a")
wildcard = XmlVarFactory.create(xml_type=XmlType.WILDCARD, qname="a")
self.meta.elements[element.qname] = [element]
self.meta.wildcards.append(wildcard)
with self.assertRaises(ParserError) as cm:
self.node.child("a", {}, {}, 0)
self.assertEqual("Unknown property foo:a", str(cm.exception))
self.node.config.fail_on_unknown_properties = False
actual = self.node.child("foobar", {}, {}, 0)
self.assertIsInstance(actual, SkipNode)
def test_build_node_with_dataclass_union_var(self):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(TypeC, TypeB),
)
attrs = {"a": "b"}
ns_map = {"ns0": "xsdata"}
actual = self.node.build_node(var, attrs, ns_map, 10)
self.assertIsInstance(actual, UnionNode)
self.assertEqual(10, actual.position)
self.assertIs(var, actual.var)
self.assertEqual(attrs, actual.attrs)
self.assertEqual(ns_map, actual.ns_map)
self.assertEqual(0, actual.level)
self.assertEqual(0, len(actual.events))
@mock.patch.object(ParserUtils, "xsi_type", return_value="foo")
@mock.patch.object(XmlContext, "fetch")
def test_build_node_with_dataclass_var(self, mock_ctx_fetch, mock_xsi_type):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(TypeC,),
derived=True,
)
xsi_type = "foo"
namespace = self.meta.namespace
mock_ctx_fetch.return_value = self.meta
mock_xsi_type.return_value = xsi_type
attrs = {"a": "b"}
ns_map = {"ns0": "xsdata"}
actual = self.node.build_node(var, attrs, ns_map, 10)
self.assertIsInstance(actual, ElementNode)
self.assertEqual(10, actual.position)
self.assertEqual(DerivedElement, actual.derived_factory)
self.assertIs(mock_ctx_fetch.return_value, actual.meta)
mock_xsi_type.assert_called_once_with(attrs, ns_map)
mock_ctx_fetch.assert_called_once_with(var.clazz, namespace, xsi_type)
@mock.patch.object(XmlContext, "fetch")
def test_build_node_with_dataclass_var_validates_nillable(self, mock_ctx_fetch):
var = XmlVarFactory.create(xml_type=XmlType.ELEMENT, qname="a", types=(TypeC,))
ns_map = {}
nillable_meta = copy.deepcopy(self.meta)
nillable_meta.nillable = True
mock_ctx_fetch.side_effect = [self.meta, self.meta, nillable_meta]
attrs = {QNames.XSI_NIL: "false"}
self.assertIsNotNone(self.node.build_node(var, attrs, ns_map, 10))
attrs = {QNames.XSI_NIL: "true"}
self.assertIsNotNone(self.node.build_node(var, attrs, ns_map, 10))
attrs = {QNames.XSI_NIL: "false"}
self.assertIsNone(self.node.build_node(var, attrs, ns_map, 10))
def test_build_node_with_any_type_var_with_matching_xsi_type(self):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(object,),
any_type=True,
)
attrs = {QNames.XSI_TYPE: "bk:books"}
ns_map = {"bk": "urn:books"}
actual = self.node.build_node(var, attrs, ns_map, 10)
self.assertIsInstance(actual, ElementNode)
self.assertEqual(10, actual.position)
self.assertEqual(self.context.build(Books), actual.meta)
self.assertEqual(attrs, actual.attrs)
self.assertEqual(ns_map, actual.ns_map)
self.assertFalse(actual.mixed)
def test_build_node_with_any_type_var_with_datatype(self):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(object,),
any_type=True,
)
attrs = {QNames.XSI_TYPE: "xs:hexBinary"}
ns_map = {Namespace.XS.prefix: Namespace.XS.uri}
actual = self.node.build_node(var, attrs, ns_map, 10)
self.assertIsInstance(actual, StandardNode)
self.assertEqual(ns_map, actual.ns_map)
self.assertEqual(DataType.HEX_BINARY, actual.datatype)
self.assertIsNone(actual.derived_factory)
def test_build_node_with_any_type_var_with_no_matching_xsi_type(self):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(object,),
any_type=True,
)
attrs = {QNames.XSI_TYPE: "noMatch"}
actual = self.node.build_node(var, attrs, {}, 10)
self.assertIsInstance(actual, WildcardNode)
self.assertEqual(10, actual.position)
self.assertEqual(var, actual.var)
self.assertEqual(attrs, actual.attrs)
self.assertEqual({}, actual.ns_map)
def test_build_node_with_any_type_var_with_no_xsi_type(self):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(object,),
any_type=True,
)
attrs = {}
actual = self.node.build_node(var, attrs, {}, 10)
self.assertIsInstance(actual, WildcardNode)
self.assertEqual(10, actual.position)
self.assertEqual(var, actual.var)
self.assertEqual(attrs, actual.attrs)
self.assertEqual({}, actual.ns_map)
def test_build_node_with_wildcard_var(self):
var = XmlVarFactory.create(xml_type=XmlType.WILDCARD, qname="a")
actual = self.node.build_node(var, {}, {}, 10)
self.assertIsInstance(actual, WildcardNode)
self.assertEqual(10, actual.position)
self.assertEqual(var, actual.var)
def test_build_node_with_primitive_var(self):
var = XmlVarFactory.create(
xml_type=XmlType.TEXT, qname="a", types=(int,), default=100
)
attrs = {"a": "b"}
ns_map = {"ns0": "xsdata"}
actual = self.node.build_node(var, attrs, ns_map, 10)
self.assertIsInstance(actual, PrimitiveNode)
self.assertEqual(ns_map, actual.ns_map)
self.assertEqual(var, actual.var)
| 37.182365 | 87 | 0.617926 | import copy
from dataclasses import make_dataclass
from unittest import mock
from tests.fixtures.books import Books
from tests.fixtures.models import AttrsType
from tests.fixtures.models import ExtendedListType
from tests.fixtures.models import ExtendedType
from tests.fixtures.models import FixedType
from tests.fixtures.models import NillableType
from tests.fixtures.models import Paragraph
from tests.fixtures.models import SequentialType
from tests.fixtures.models import TypeA
from tests.fixtures.models import TypeB
from tests.fixtures.models import TypeC
from xsdata.exceptions import ParserError
from xsdata.formats.dataclass.context import XmlContext
from xsdata.formats.dataclass.models.elements import XmlType
from xsdata.formats.dataclass.models.generics import AnyElement
from xsdata.formats.dataclass.models.generics import DerivedElement
from xsdata.formats.dataclass.parsers.config import ParserConfig
from xsdata.formats.dataclass.parsers.nodes import ElementNode
from xsdata.formats.dataclass.parsers.nodes import PrimitiveNode
from xsdata.formats.dataclass.parsers.nodes import SkipNode
from xsdata.formats.dataclass.parsers.nodes import StandardNode
from xsdata.formats.dataclass.parsers.nodes import UnionNode
from xsdata.formats.dataclass.parsers.nodes import WildcardNode
from xsdata.formats.dataclass.parsers.utils import ParserUtils
from xsdata.models.enums import DataType
from xsdata.models.enums import Namespace
from xsdata.models.enums import QNames
from xsdata.utils.testing import FactoryTestCase
from xsdata.utils.testing import XmlMetaFactory
from xsdata.utils.testing import XmlVarFactory
class ElementNodeTests(FactoryTestCase):
def setUp(self) -> None:
super().setUp()
self.context = XmlContext()
self.meta = XmlMetaFactory.create(clazz=TypeC, qname="foo", wildcards=[])
self.node = ElementNode(
position=0,
meta=self.meta,
context=self.context,
config=ParserConfig(),
attrs={},
ns_map={},
)
def test_bind(self):
node = ElementNode(
position=0,
meta=self.context.build(SequentialType),
context=self.context,
config=ParserConfig(),
attrs={"a": "b", "a0": "0"},
ns_map={"ns0": "xsdata"},
)
objects = [("x1", 1), ("x2", 2), ("x2", 3)]
expected = SequentialType(a0="0", a1={"a": "b"}, x0=1, x1=[1], x2=[2, 3])
self.assertTrue(node.bind("foo", "1", "tail", objects))
self.assertEqual("foo", objects[-1][0])
self.assertEqual(expected, objects[-1][1])
def test_bind_nil_value(self):
self.node.xsi_nil = True
objects = []
self.assertTrue(self.node.bind("foo", None, None, objects))
self.assertEqual(("foo", None), objects[-1])
def test_bind_nillable_type(self):
self.node.meta = self.context.build(NillableType)
self.node.xsi_nil = True
objects = []
self.assertTrue(self.node.bind("foo", None, None, objects))
self.assertEqual(("foo", NillableType(None)), objects[-1])
def test_bind_fixed_value(self):
self.node.meta = self.context.build(FixedType)
objects = []
self.assertTrue(self.node.bind("foo", "not the fixed value", None, objects))
self.assertEqual(("foo", FixedType()), objects[-1])
def test_bind_with_derived_element(self):
self.node.meta = self.context.build(TypeA)
self.node.derived_factory = DerivedElement
objects = []
self.assertTrue(self.node.bind("foo", "2", None, objects))
self.assertEqual("foo", objects[-1][0])
self.assertEqual(DerivedElement("foo", TypeA(2)), objects[-1][1])
def test_bind_with_wildcard_var(self):
self.node.meta = self.context.build(ExtendedType)
self.node.attrs = {"a": "b"}
self.node.ns_map = {"ns0": "xsdata"}
objects = [("a", "1"), ("b", "2")]
expected = ExtendedType(
a="1",
wildcard=AnyElement(
text="text",
tail="tail",
children=[AnyElement(qname="b", text="2")],
attributes={"a": "b"},
),
)
self.assertTrue(self.node.bind("foo", "text", "tail", objects))
self.assertEqual("foo", objects[-1][0])
self.assertEqual(expected, objects[-1][1])
def test_bind_with_mixed_flag_true(self):
self.node.meta = self.context.build(TypeB)
self.node.attrs = {"a": "b"}
self.node.ns_map = {"ns0": "xsdata"}
self.node.mixed = True
objects = [("x", 1), ("y", "a")]
self.assertTrue(self.node.bind("foo", "text", " ", objects))
self.assertEqual(1, len(objects))
self.assertEqual(TypeB(x=1, y="a"), objects[-1][1])
objects = [("x", 1), ("y", "a")]
self.assertTrue(self.node.bind("foo", "text", " tail ", objects))
self.assertEqual(2, len(objects))
self.assertEqual(None, objects[-1][0])
self.assertEqual(" tail ", objects[-1][1])
def test_bind_with_mixed_content_var(self):
self.node.meta = self.context.build(Paragraph)
self.node.attrs = {"a": "b"}
self.node.ns_map = {"ns0": "xsdata"}
objects = [("a", 1)]
expected = Paragraph(content=["text", AnyElement(qname="a", text="1"), "tail"])
self.assertTrue(self.node.bind("foo", "text", "tail", objects))
self.assertEqual("foo", objects[-1][0])
self.assertEqual(expected, objects[-1][1])
def test_bind_wild_text(self):
self.node.meta = self.context.build(ExtendedType)
var = self.node.meta.wildcards[0]
params = {}
self.node.bind_wild_text(params, var, None, None)
self.assertEqual(0, len(params))
params = {}
self.node.bind_wild_text(params, var, "txt", "tail")
expected = AnyElement(text="txt", tail="tail")
self.assertEqual({"wildcard": expected}, params)
self.node.attrs = {"a": "b"}
self.node.ns_map = {"ns0": "a"}
self.node.bind_wild_text(params, var, "txt", "tail")
expected = AnyElement(
text="txt", tail="tail", children=[expected], attributes=self.node.attrs
)
self.assertEqual({"wildcard": expected}, params)
self.node.meta = self.context.build(ExtendedListType)
var = self.node.meta.wildcards[0]
params = {}
self.node.bind_wild_text(params, var, "txt", "tail")
self.assertEqual({"wildcard": ["txt", "tail"]}, params)
self.node.bind_wild_text(params, var, None, "tail")
self.assertEqual({"wildcard": ["txt", "tail", "tail"]}, params)
self.node.bind_wild_text(params, var, "first", None)
self.assertEqual({"wildcard": ["first", "txt", "tail", "tail"]}, params)
def test_bind_attrs(self):
self.node.meta = self.context.build(AttrsType)
self.node.attrs = {
"index": "0",
"fixed": "will be ignored",
"{what}ever": "qname",
"extended": "attr",
}
params = {}
self.node.bind_attrs(params)
expected = {"attrs": {"extended": "attr", "{what}ever": "qname"}, "index": 0}
self.assertEqual(expected, params)
def test_bind_attrs_with_fail_on_unknown_attributes(self):
self.node.meta = self.context.build(AttrsType)
self.node.config.fail_on_unknown_attributes = True
self.node.attrs = {
"index": "0",
"fixed": "will be ignored",
"{what}ever": "qname",
"extended": "attr",
}
params = {}
self.node.bind_attrs(params)
expected = {"attrs": {"extended": "attr", "{what}ever": "qname"}, "index": 0}
self.assertEqual(expected, params)
def test_bind_with_fail_on_unknown_attributes(self):
self.node.meta = self.context.build(ExtendedType)
self.node.config.fail_on_unknown_attributes = True
self.node.attrs = {"a": "b"}
objects = [("a", "1")]
with self.assertRaises(ParserError) as cm:
self.node.bind("foo", "text", "tail", objects)
self.assertEqual("Unknown attribute ExtendedType:a", str(cm.exception))
@mock.patch("xsdata.formats.dataclass.parsers.nodes.element.logger.warning")
def test_bind_objects(self, mock_warning):
self.node.meta = self.context.build(TypeC)
objects = [("x", 1), ("x", 2), ("z", 3.0), ("fixed", "bar")]
params = {}
self.node.bind_objects(params, objects)
self.assertEqual({"x": 1, "z": 3.0}, params)
mock_warning.assert_called_once_with("Unassigned parsed object %s", "x")
def test_bind_wild_var(self):
self.node.meta = self.context.build(ExtendedType)
params = {}
objects = [("x", 1), ("x", 2), ("z", 3.0)]
self.node.bind_objects(params, objects)
expected = {
"wildcard": AnyElement(
children=[
AnyElement(qname="x", text="1"),
AnyElement(qname="x", text="2"),
AnyElement(qname="z", text="3.0"),
]
)
}
self.assertEqual(expected, params)
def test_bind_wild_list_var(self):
self.node.meta = self.context.build(ExtendedListType)
params = {}
objects = [("x", 1), ("x", 2), ("z", 3.0)]
self.node.bind_objects(params, objects)
expected = {
"wildcard": [
AnyElement(qname="x", text="1"),
AnyElement(qname="x", text="2"),
AnyElement(qname="z", text="3.0"),
]
}
self.assertEqual(expected, params)
def test_prepare_generic_value(self):
actual = self.node.prepare_generic_value(None, 1)
self.assertEqual(1, actual)
actual = self.node.prepare_generic_value("a", 1)
expected = AnyElement(qname="a", text="1")
self.assertEqual(expected, actual)
actual = self.node.prepare_generic_value("a", "foo")
expected = AnyElement(qname="a", text="foo")
self.assertEqual(expected, actual)
fixture = make_dataclass("Fixture", [("content", str)])
actual = self.node.prepare_generic_value("a", fixture("foo"))
expected = DerivedElement(qname="a", value=fixture("foo"), type="Fixture")
self.assertEqual(expected, actual)
actual = self.node.prepare_generic_value("a", expected)
self.assertIs(expected, actual)
actual = self.node.prepare_generic_value("Fixture", fixture("foo"))
expected = DerivedElement(qname="Fixture", value=fixture("foo"))
self.assertEqual(expected, actual)
def test_child(self):
var = XmlVarFactory.create(xml_type=XmlType.ELEMENT, qname="a", types=(TypeC,))
attrs = {"a": "b"}
ns_map = {"ns0": "xsdata"}
position = 1
self.meta.elements[var.qname] = [var]
actual = self.node.child("a", attrs, ns_map, position)
self.assertIsInstance(actual, ElementNode)
self.assertEqual(attrs, actual.attrs)
self.assertEqual(ns_map, actual.ns_map)
self.assertEqual(position, actual.position)
def test_child_with_unique_element(self):
single = XmlVarFactory.create(
index=1, xml_type=XmlType.ELEMENT, qname="a", types=(TypeC,)
)
wildcard = XmlVarFactory.create(
index=2, xml_type=XmlType.WILDCARD, qname="a", types=(object,)
)
self.meta.elements[single.qname] = [single]
self.meta.wildcards.append(wildcard)
attrs = {"a": "b"}
ns_map = {"ns0": "xsdata"}
position = 1
actual = self.node.child("a", attrs, ns_map, position)
self.assertIsInstance(actual, ElementNode)
self.assertIn(single.index, self.node.assigned)
actual = self.node.child("a", attrs, ns_map, position)
self.assertIsInstance(actual, WildcardNode)
self.assertNotIn(wildcard.index, self.node.assigned)
@mock.patch.object(ElementNode, "build_node")
def test_child_when_failed_to_build_next_node(self, mock_build_node):
mock_build_node.return_value = None
element = XmlVarFactory.create(xml_type=XmlType.ELEMENT, qname="a")
wildcard = XmlVarFactory.create(xml_type=XmlType.WILDCARD, qname="a")
self.meta.elements[element.qname] = [element]
self.meta.wildcards.append(wildcard)
with self.assertRaises(ParserError) as cm:
self.node.child("a", {}, {}, 0)
self.assertEqual("Unknown property foo:a", str(cm.exception))
self.node.config.fail_on_unknown_properties = False
actual = self.node.child("foobar", {}, {}, 0)
self.assertIsInstance(actual, SkipNode)
def test_build_node_with_dataclass_union_var(self):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(TypeC, TypeB),
)
attrs = {"a": "b"}
ns_map = {"ns0": "xsdata"}
actual = self.node.build_node(var, attrs, ns_map, 10)
self.assertIsInstance(actual, UnionNode)
self.assertEqual(10, actual.position)
self.assertIs(var, actual.var)
self.assertEqual(attrs, actual.attrs)
self.assertEqual(ns_map, actual.ns_map)
self.assertEqual(0, actual.level)
self.assertEqual(0, len(actual.events))
@mock.patch.object(ParserUtils, "xsi_type", return_value="foo")
@mock.patch.object(XmlContext, "fetch")
def test_build_node_with_dataclass_var(self, mock_ctx_fetch, mock_xsi_type):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(TypeC,),
derived=True,
)
xsi_type = "foo"
namespace = self.meta.namespace
mock_ctx_fetch.return_value = self.meta
mock_xsi_type.return_value = xsi_type
attrs = {"a": "b"}
ns_map = {"ns0": "xsdata"}
actual = self.node.build_node(var, attrs, ns_map, 10)
self.assertIsInstance(actual, ElementNode)
self.assertEqual(10, actual.position)
self.assertEqual(DerivedElement, actual.derived_factory)
self.assertIs(mock_ctx_fetch.return_value, actual.meta)
mock_xsi_type.assert_called_once_with(attrs, ns_map)
mock_ctx_fetch.assert_called_once_with(var.clazz, namespace, xsi_type)
@mock.patch.object(XmlContext, "fetch")
def test_build_node_with_dataclass_var_validates_nillable(self, mock_ctx_fetch):
var = XmlVarFactory.create(xml_type=XmlType.ELEMENT, qname="a", types=(TypeC,))
ns_map = {}
nillable_meta = copy.deepcopy(self.meta)
nillable_meta.nillable = True
mock_ctx_fetch.side_effect = [self.meta, self.meta, nillable_meta]
attrs = {QNames.XSI_NIL: "false"}
self.assertIsNotNone(self.node.build_node(var, attrs, ns_map, 10))
attrs = {QNames.XSI_NIL: "true"}
self.assertIsNotNone(self.node.build_node(var, attrs, ns_map, 10))
attrs = {QNames.XSI_NIL: "false"}
self.assertIsNone(self.node.build_node(var, attrs, ns_map, 10))
def test_build_node_with_any_type_var_with_matching_xsi_type(self):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(object,),
any_type=True,
)
attrs = {QNames.XSI_TYPE: "bk:books"}
ns_map = {"bk": "urn:books"}
actual = self.node.build_node(var, attrs, ns_map, 10)
self.assertIsInstance(actual, ElementNode)
self.assertEqual(10, actual.position)
self.assertEqual(self.context.build(Books), actual.meta)
self.assertEqual(attrs, actual.attrs)
self.assertEqual(ns_map, actual.ns_map)
self.assertFalse(actual.mixed)
def test_build_node_with_any_type_var_with_datatype(self):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(object,),
any_type=True,
)
attrs = {QNames.XSI_TYPE: "xs:hexBinary"}
ns_map = {Namespace.XS.prefix: Namespace.XS.uri}
actual = self.node.build_node(var, attrs, ns_map, 10)
self.assertIsInstance(actual, StandardNode)
self.assertEqual(ns_map, actual.ns_map)
self.assertEqual(DataType.HEX_BINARY, actual.datatype)
self.assertIsNone(actual.derived_factory)
def test_build_node_with_any_type_var_with_no_matching_xsi_type(self):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(object,),
any_type=True,
)
attrs = {QNames.XSI_TYPE: "noMatch"}
actual = self.node.build_node(var, attrs, {}, 10)
self.assertIsInstance(actual, WildcardNode)
self.assertEqual(10, actual.position)
self.assertEqual(var, actual.var)
self.assertEqual(attrs, actual.attrs)
self.assertEqual({}, actual.ns_map)
def test_build_node_with_any_type_var_with_no_xsi_type(self):
var = XmlVarFactory.create(
xml_type=XmlType.ELEMENT,
name="a",
qname="a",
types=(object,),
any_type=True,
)
attrs = {}
actual = self.node.build_node(var, attrs, {}, 10)
self.assertIsInstance(actual, WildcardNode)
self.assertEqual(10, actual.position)
self.assertEqual(var, actual.var)
self.assertEqual(attrs, actual.attrs)
self.assertEqual({}, actual.ns_map)
def test_build_node_with_wildcard_var(self):
var = XmlVarFactory.create(xml_type=XmlType.WILDCARD, qname="a")
actual = self.node.build_node(var, {}, {}, 10)
self.assertIsInstance(actual, WildcardNode)
self.assertEqual(10, actual.position)
self.assertEqual(var, actual.var)
def test_build_node_with_primitive_var(self):
var = XmlVarFactory.create(
xml_type=XmlType.TEXT, qname="a", types=(int,), default=100
)
attrs = {"a": "b"}
ns_map = {"ns0": "xsdata"}
actual = self.node.build_node(var, attrs, ns_map, 10)
self.assertIsInstance(actual, PrimitiveNode)
self.assertEqual(ns_map, actual.ns_map)
self.assertEqual(var, actual.var)
| true | true |
f71e0df35e63fc94299cae74adce175b14ce46d5 | 1,249 | py | Python | azure/mgmt/network/v2017_11_01/models/virtual_network_usage_name.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | azure/mgmt/network/v2017_11_01/models/virtual_network_usage_name.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | azure/mgmt/network/v2017_11_01/models/virtual_network_usage_name.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualNetworkUsageName(Model):
"""Usage strings container.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar localized_value: Localized subnet size and usage string.
:vartype localized_value: str
:ivar value: Subnet size and usage string.
:vartype value: str
"""
_validation = {
'localized_value': {'readonly': True},
'value': {'readonly': True},
}
_attribute_map = {
'localized_value': {'key': 'localizedValue', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(self):
self.localized_value = None
self.value = None
| 31.225 | 77 | 0.567654 |
from msrest.serialization import Model
class VirtualNetworkUsageName(Model):
_validation = {
'localized_value': {'readonly': True},
'value': {'readonly': True},
}
_attribute_map = {
'localized_value': {'key': 'localizedValue', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(self):
self.localized_value = None
self.value = None
| true | true |
f71e0e8918e2a9d56b2c7d803883124189453ab6 | 3,835 | py | Python | demo-and-experimental-scripts/demo_predict_presynch_eT.py | ddarmon/transCSSR | 8ed057eee70d2d50d14bc719c7850ac46a00e4d4 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 12 | 2018-03-10T08:22:27.000Z | 2022-02-19T19:56:31.000Z | demo-and-experimental-scripts/demo_predict_presynch_eT.py | ddarmon/transCSSR | 8ed057eee70d2d50d14bc719c7850ac46a00e4d4 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1 | 2017-05-12T12:37:09.000Z | 2017-05-20T19:04:47.000Z | demo-and-experimental-scripts/demo_predict_presynch_eT.py | ddarmon/transCSSR | 8ed057eee70d2d50d14bc719c7850ac46a00e4d4 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 10 | 2018-04-04T19:53:52.000Z | 2022-02-19T19:56:25.000Z | import numpy
import scipy.stats
import itertools
import copy
import string
import os
from collections import Counter, defaultdict
from filter_data_methods import *
from igraph import *
from transCSSR import *
data_prefix = ''
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#
# The various test transducers. Xt is the input
# and Yt is the output.
#
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# Xt_name = 'coinflip'
# Yt_name = 'coinflip-excite_w_refrac'
Xt_name = 'barnettX'
Yt_name = 'barnettY'
# Xt_name = ''
# Yt_name = 'even'
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#
# Load in the data for each process.
#
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
stringY = open('data/{}{}.dat'.format(data_prefix, Yt_name)).readline().strip()
if Xt_name == '':
stringX = '0'*len(stringY)
else:
stringX = open('data/{}{}.dat'.format(data_prefix, Xt_name)).readline().strip()
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#
# Set the parameters and associated quantities:
# axs, ays -- the input / output alphabets
# alpha -- the significance level associated with
# CSSR's hypothesis tests.
# L -- The maximum history length to look
# back when inferring predictive
# distributions.
#
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
if Xt_name == '':
axs = ['0']
ays = ['0', '1']
else:
axs = ['0', '1']
ays = ['0', '1']
e_symbols = list(itertools.product(axs, ays)) # All of the possible pairs of emission
# symbols for (x, y)
alpha = 0.001
verbose = False
# L is the maximum amount we want to ever look back.
L_max = 3
Tx = len(stringX); Ty = len(stringY)
assert Tx == Ty, 'The two time series must have the same length.'
T = Tx
word_lookup_marg, word_lookup_fut = estimate_predictive_distributions(stringX, stringY, L_max)
epsilon, invepsilon, morph_by_state = run_transCSSR(word_lookup_marg, word_lookup_fut, L_max, axs, ays, e_symbols, Xt_name, Yt_name, alpha = alpha)
ind_go_to = 20
possible_states_from_predict_presynch_eT = numpy.zeros((ind_go_to-1, len(invepsilon)), dtype = numpy.int32)
for cur_ind in range(1, ind_go_to):
curX = stringX[:cur_ind]
curY = stringY[:cur_ind-1]
preds, possible_states = predict_presynch_eT(curX, curY, machine_fname = 'transCSSR_results/+{}.dot'.format(Xt_name), transducer_fname = 'transCSSR_results/{}+{}.dot'.format(Xt_name, Yt_name), axs = axs, ays = ays, inf_alg = 'transCSSR')
possible_states_from_predict_presynch_eT[cur_ind - 1] = possible_states
print((cur_ind, curX, curY + '*', preds.tolist(), possible_states))
print('')
preds_all, possible_states_all = filter_and_pred_probs(stringX, stringY, machine_fname = 'transCSSR_results/+{}.dot'.format(Xt_name), transducer_fname = 'transCSSR_results/{}+{}.dot'.format(Xt_name, Yt_name), axs = axs, ays = ays, inf_alg = 'transCSSR')
for cur_ind in range(1, ind_go_to):
curX = stringX[:cur_ind]
curY = stringY[:cur_ind-1]
print((cur_ind, curX, curY + '*', preds_all[cur_ind-1, :].tolist(), possible_states_all[cur_ind-1, :].tolist()))
filtered_states, filtered_probs, stringY_pred = filter_and_predict(stringX, stringY, epsilon, invepsilon, morph_by_state, axs, ays, e_symbols, L_max, memoryless = False)
print_go_to = 40
print(("\n\nFirst {} predictions.".format(print_go_to)))
for ind in range(print_go_to):
print((filtered_probs[ind], preds_all[ind, 1]))
print(("\n\nLast {} predictions.".format(print_go_to)))
for ind in range(preds_all.shape[0] - print_go_to, preds_all.shape[0]):
print((filtered_probs[ind], preds_all[ind, 1]))
import matplotlib.pyplot as plt
plt.figure()
plt.plot(filtered_probs[:, 1], label = 'Using filter_and_predict')
plt.plot(preds_all[:, 1], label = 'Using filter_and_pred_probs')
plt.xlim([0, 1000])
plt.legend()
plt.show() | 29.960938 | 253 | 0.645893 | import numpy
import scipy.stats
import itertools
import copy
import string
import os
from collections import Counter, defaultdict
from filter_data_methods import *
from igraph import *
from transCSSR import *
data_prefix = ''
Xt_name = 'barnettX'
Yt_name = 'barnettY'
stringY = open('data/{}{}.dat'.format(data_prefix, Yt_name)).readline().strip()
if Xt_name == '':
stringX = '0'*len(stringY)
else:
stringX = open('data/{}{}.dat'.format(data_prefix, Xt_name)).readline().strip()
# L -- The maximum history length to look
# back when inferring predictive
# distributions.
#
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
if Xt_name == '':
axs = ['0']
ays = ['0', '1']
else:
axs = ['0', '1']
ays = ['0', '1']
e_symbols = list(itertools.product(axs, ays)) # All of the possible pairs of emission
# symbols for (x, y)
alpha = 0.001
verbose = False
# L is the maximum amount we want to ever look back.
L_max = 3
Tx = len(stringX); Ty = len(stringY)
assert Tx == Ty, 'The two time series must have the same length.'
T = Tx
word_lookup_marg, word_lookup_fut = estimate_predictive_distributions(stringX, stringY, L_max)
epsilon, invepsilon, morph_by_state = run_transCSSR(word_lookup_marg, word_lookup_fut, L_max, axs, ays, e_symbols, Xt_name, Yt_name, alpha = alpha)
ind_go_to = 20
possible_states_from_predict_presynch_eT = numpy.zeros((ind_go_to-1, len(invepsilon)), dtype = numpy.int32)
for cur_ind in range(1, ind_go_to):
curX = stringX[:cur_ind]
curY = stringY[:cur_ind-1]
preds, possible_states = predict_presynch_eT(curX, curY, machine_fname = 'transCSSR_results/+{}.dot'.format(Xt_name), transducer_fname = 'transCSSR_results/{}+{}.dot'.format(Xt_name, Yt_name), axs = axs, ays = ays, inf_alg = 'transCSSR')
possible_states_from_predict_presynch_eT[cur_ind - 1] = possible_states
print((cur_ind, curX, curY + '*', preds.tolist(), possible_states))
print('')
preds_all, possible_states_all = filter_and_pred_probs(stringX, stringY, machine_fname = 'transCSSR_results/+{}.dot'.format(Xt_name), transducer_fname = 'transCSSR_results/{}+{}.dot'.format(Xt_name, Yt_name), axs = axs, ays = ays, inf_alg = 'transCSSR')
for cur_ind in range(1, ind_go_to):
curX = stringX[:cur_ind]
curY = stringY[:cur_ind-1]
print((cur_ind, curX, curY + '*', preds_all[cur_ind-1, :].tolist(), possible_states_all[cur_ind-1, :].tolist()))
filtered_states, filtered_probs, stringY_pred = filter_and_predict(stringX, stringY, epsilon, invepsilon, morph_by_state, axs, ays, e_symbols, L_max, memoryless = False)
print_go_to = 40
print(("\n\nFirst {} predictions.".format(print_go_to)))
for ind in range(print_go_to):
print((filtered_probs[ind], preds_all[ind, 1]))
print(("\n\nLast {} predictions.".format(print_go_to)))
for ind in range(preds_all.shape[0] - print_go_to, preds_all.shape[0]):
print((filtered_probs[ind], preds_all[ind, 1]))
import matplotlib.pyplot as plt
plt.figure()
plt.plot(filtered_probs[:, 1], label = 'Using filter_and_predict')
plt.plot(preds_all[:, 1], label = 'Using filter_and_pred_probs')
plt.xlim([0, 1000])
plt.legend()
plt.show() | true | true |
f71e0eafa25a709e07c82815a3949bc5f6c67b04 | 5,390 | py | Python | vunit/simulator_factory.py | AdamSteenkamer/vunit | 60b710619c3c7270e62a25ddb3f53091291217d2 | [
"Artistic-2.0"
] | 1 | 2018-10-08T18:01:41.000Z | 2018-10-08T18:01:41.000Z | vunit/simulator_factory.py | AdamSteenkamer/vunit | 60b710619c3c7270e62a25ddb3f53091291217d2 | [
"Artistic-2.0"
] | null | null | null | vunit/simulator_factory.py | AdamSteenkamer/vunit | 60b710619c3c7270e62a25ddb3f53091291217d2 | [
"Artistic-2.0"
] | null | null | null | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2014-2018, Lars Asplund lars.anders.asplund@gmail.com
"""
Create simulator instances
"""
import os
from vunit.modelsim_interface import ModelSimInterface
from vunit.activehdl_interface import ActiveHDLInterface
from vunit.rivierapro_interface import RivieraProInterface
from vunit.ghdl_interface import GHDLInterface
from vunit.incisive_interface import IncisiveInterface
from vunit.simulator_interface import (BooleanOption,
ListOfStringOption,
VHDLAssertLevelOption)
class SimulatorFactory(object):
"""
Create simulator instances
"""
@staticmethod
def supported_simulators():
"""
Return a list of supported simulator classes
"""
return [ModelSimInterface,
RivieraProInterface,
ActiveHDLInterface,
GHDLInterface,
IncisiveInterface]
def _extract_compile_options(self):
"""
Return all supported compile options
"""
result = dict((opt.name, opt) for opt in
[BooleanOption("disable_coverage")])
for sim_class in self.supported_simulators():
for opt in sim_class.compile_options:
assert hasattr(opt, "name")
assert hasattr(opt, "validate")
assert opt.name.startswith(sim_class.name + ".")
assert opt.name not in result
result[opt.name] = opt
return result
def _extract_sim_options(self):
"""
Return all supported sim options
"""
result = dict((opt.name, opt) for opt in
[VHDLAssertLevelOption(),
BooleanOption("disable_ieee_warnings"),
ListOfStringOption("pli")])
for sim_class in self.supported_simulators():
for opt in sim_class.sim_options:
assert hasattr(opt, "name")
assert hasattr(opt, "validate")
assert opt.name.startswith(sim_class.name + ".")
assert opt.name not in result
result[opt.name] = opt
return result
def check_sim_option(self, name, value):
"""
Check that sim_option has legal name and value
"""
known_options = sorted(list(self._sim_options.keys()))
if name not in self._sim_options:
raise ValueError("Unknown sim_option %r, expected one of %r" %
(name, known_options))
self._sim_options[name].validate(value)
def check_compile_option_name(self, name):
"""
Check that the compile option is valid
"""
known_options = sorted(list(self._compile_options.keys()))
if name not in known_options:
raise ValueError("Unknown compile_option %r, expected one of %r" %
(name, known_options))
def check_compile_option(self, name, value):
"""
Check that the compile option is valid
"""
self.check_compile_option_name(name)
self._compile_options[name].validate(value)
def select_simulator(self):
"""
Select simulator class, either from VUNIT_SIMULATOR environment variable
or the first available
"""
available_simulators = self._detect_available_simulators()
name_mapping = {simulator_class.name: simulator_class for simulator_class in self.supported_simulators()}
if not available_simulators:
return None
environ_name = "VUNIT_SIMULATOR"
if environ_name in os.environ:
simulator_name = os.environ[environ_name]
if simulator_name not in name_mapping:
raise RuntimeError(
("Simulator from " + environ_name + " environment variable %r is not supported. "
"Supported simulators are %r")
% (simulator_name, name_mapping.keys()))
simulator_class = name_mapping[simulator_name]
else:
simulator_class = available_simulators[0]
return simulator_class
def add_arguments(self, parser):
"""
Add command line arguments to parser
"""
parser.add_argument('-g', '--gui',
action="store_true",
default=False,
help=("Open test case(s) in simulator gui with top level pre loaded"))
for sim in self.supported_simulators():
sim.add_arguments(parser)
def __init__(self):
self._compile_options = self._extract_compile_options()
self._sim_options = self._extract_sim_options()
def _detect_available_simulators(self):
"""
Detect available simulators and return a list
"""
return [simulator_class
for simulator_class in self.supported_simulators()
if simulator_class.is_available()]
@property
def has_simulator(self):
return bool(self._detect_available_simulators())
SIMULATOR_FACTORY = SimulatorFactory()
| 34.774194 | 113 | 0.605195 |
import os
from vunit.modelsim_interface import ModelSimInterface
from vunit.activehdl_interface import ActiveHDLInterface
from vunit.rivierapro_interface import RivieraProInterface
from vunit.ghdl_interface import GHDLInterface
from vunit.incisive_interface import IncisiveInterface
from vunit.simulator_interface import (BooleanOption,
ListOfStringOption,
VHDLAssertLevelOption)
class SimulatorFactory(object):
@staticmethod
def supported_simulators():
return [ModelSimInterface,
RivieraProInterface,
ActiveHDLInterface,
GHDLInterface,
IncisiveInterface]
def _extract_compile_options(self):
result = dict((opt.name, opt) for opt in
[BooleanOption("disable_coverage")])
for sim_class in self.supported_simulators():
for opt in sim_class.compile_options:
assert hasattr(opt, "name")
assert hasattr(opt, "validate")
assert opt.name.startswith(sim_class.name + ".")
assert opt.name not in result
result[opt.name] = opt
return result
def _extract_sim_options(self):
result = dict((opt.name, opt) for opt in
[VHDLAssertLevelOption(),
BooleanOption("disable_ieee_warnings"),
ListOfStringOption("pli")])
for sim_class in self.supported_simulators():
for opt in sim_class.sim_options:
assert hasattr(opt, "name")
assert hasattr(opt, "validate")
assert opt.name.startswith(sim_class.name + ".")
assert opt.name not in result
result[opt.name] = opt
return result
def check_sim_option(self, name, value):
known_options = sorted(list(self._sim_options.keys()))
if name not in self._sim_options:
raise ValueError("Unknown sim_option %r, expected one of %r" %
(name, known_options))
self._sim_options[name].validate(value)
def check_compile_option_name(self, name):
known_options = sorted(list(self._compile_options.keys()))
if name not in known_options:
raise ValueError("Unknown compile_option %r, expected one of %r" %
(name, known_options))
def check_compile_option(self, name, value):
self.check_compile_option_name(name)
self._compile_options[name].validate(value)
def select_simulator(self):
available_simulators = self._detect_available_simulators()
name_mapping = {simulator_class.name: simulator_class for simulator_class in self.supported_simulators()}
if not available_simulators:
return None
environ_name = "VUNIT_SIMULATOR"
if environ_name in os.environ:
simulator_name = os.environ[environ_name]
if simulator_name not in name_mapping:
raise RuntimeError(
("Simulator from " + environ_name + " environment variable %r is not supported. "
"Supported simulators are %r")
% (simulator_name, name_mapping.keys()))
simulator_class = name_mapping[simulator_name]
else:
simulator_class = available_simulators[0]
return simulator_class
def add_arguments(self, parser):
parser.add_argument('-g', '--gui',
action="store_true",
default=False,
help=("Open test case(s) in simulator gui with top level pre loaded"))
for sim in self.supported_simulators():
sim.add_arguments(parser)
def __init__(self):
self._compile_options = self._extract_compile_options()
self._sim_options = self._extract_sim_options()
def _detect_available_simulators(self):
return [simulator_class
for simulator_class in self.supported_simulators()
if simulator_class.is_available()]
@property
def has_simulator(self):
return bool(self._detect_available_simulators())
SIMULATOR_FACTORY = SimulatorFactory()
| true | true |
f71e0faf9b628abe054c0f4ac9d1468f103bcb5f | 410 | py | Python | homework/pages/main_page.py | viaviare/MyFirstRepository | dab8530d16ab9746471b61b61e006d9febfed195 | [
"Unlicense"
] | null | null | null | homework/pages/main_page.py | viaviare/MyFirstRepository | dab8530d16ab9746471b61b61e006d9febfed195 | [
"Unlicense"
] | null | null | null | homework/pages/main_page.py | viaviare/MyFirstRepository | dab8530d16ab9746471b61b61e006d9febfed195 | [
"Unlicense"
] | null | null | null | from selenium.webdriver.support.wait import WebDriverWait
class MainPage:
def __init__(self, driver):
self.driver = driver
self.wait = WebDriverWait(driver, 10)
def open(self):
self.driver.get("http://localhost/litecart")
return self
@property
def choose_item_on_main_page(self):
return self.driver.find_element_by_css_selector("div.content a.link") | 25.625 | 77 | 0.690244 | from selenium.webdriver.support.wait import WebDriverWait
class MainPage:
def __init__(self, driver):
self.driver = driver
self.wait = WebDriverWait(driver, 10)
def open(self):
self.driver.get("http://localhost/litecart")
return self
@property
def choose_item_on_main_page(self):
return self.driver.find_element_by_css_selector("div.content a.link") | true | true |
f71e1171ccdc837a4c6947b4b28a96f186752e0f | 3,396 | py | Python | src/models/build_model.py | ds-praveenkumar/m5-accuracy-prediction | 20255adc95c3e0fe6c6acec9fd16ac88c6e95908 | [
"MIT"
] | null | null | null | src/models/build_model.py | ds-praveenkumar/m5-accuracy-prediction | 20255adc95c3e0fe6c6acec9fd16ac88c6e95908 | [
"MIT"
] | null | null | null | src/models/build_model.py | ds-praveenkumar/m5-accuracy-prediction | 20255adc95c3e0fe6c6acec9fd16ac88c6e95908 | [
"MIT"
] | null | null | null | # github link: https://github.com/ds-praveenkumar/kaggle
# Author: ds-praveenkumar
# file: forcasting/build_model.py/
# Created by ds-praveenkumar at 13-06-2020 02 09
# feature:
import os
import psutil
from fbprophet import Prophet
from pathlib import Path
import pandas as pd
import numpy as np
import pickle
from src.utility.timeit import timeit
ROOT_DIR = Path(__file__).parent.parent.parent
print('ROOT_DIR:', ROOT_DIR)
@timeit
def us_public_holidays():
ny = pd.DataFrame({'holiday': "New Year's Day", 'ds': pd.to_datetime(['2016-01-01', '2017-01-01'])})
mlk = pd.DataFrame(
{'holiday': 'Birthday of Martin Luther King, Jr.', 'ds': pd.to_datetime(['2016-01-18', '2017-01-16'])})
wash = pd.DataFrame({'holiday': "Washington's Birthday", 'ds': pd.to_datetime(['2016-02-15', '2017-02-20'])})
mem = pd.DataFrame({'holiday': 'Memorial Day', 'ds': pd.to_datetime(['2016-05-30', '2017-05-29'])})
ind = pd.DataFrame(
{'holiday': 'Independence Day', 'ds': pd.to_datetime(['2015-07-04', '2016-07-04', '2017-07-04'])})
lab = pd.DataFrame({'holiday': 'Labor Day', 'ds': pd.to_datetime(['2015-09-07', '2016-09-05', '2017-09-04'])})
col = pd.DataFrame({'holiday': 'Columbus Day', 'ds': pd.to_datetime(['2015-10-12', '2016-10-10', '2017-10-09'])})
vet = pd.DataFrame({'holiday': "Veteran's Day", 'ds': pd.to_datetime(['2015-11-11', '2016-11-11', '2017-11-11'])})
thanks = pd.DataFrame({'holiday': 'Thanksgiving Day', 'ds': pd.to_datetime(['2015-11-26', '2016-11-24'])})
christ = pd.DataFrame({'holiday': 'Christmas', 'ds': pd.to_datetime(['2015-12-25', '2016-12-25'])})
inaug = pd.DataFrame({'holiday': 'Inauguration Day', 'ds': pd.to_datetime(['2017-01-20'])})
us_public_holidays = pd.concat([ny, mlk, wash, mem, ind, lab, col, vet, thanks, christ, inaug])
return us_public_holidays
def is_nfl_season(ds):
date = pd.to_datetime(ds)
return (date.month > 8 or date.month < 2)
def nfl_sunday(ds):
date = pd.to_datetime(ds)
if date.weekday() == 6 and (date.month > 8 or date.month < 2):
return 1
else:
return 0
@timeit
def build_model():
df = pd.read_csv('H:\\forcasting\\data\\training\\10655.csv')
df['y'] = np.log1p(df.y.astype(float) + 1)
print(df)
model = Prophet(
interval_width=0.95,
changepoint_prior_scale=0.15,
daily_seasonality=True,
holidays=us_public_holidays(),
yearly_seasonality=True,
weekly_seasonality=True,
seasonality_mode='multiplicative'
)
model.add_seasonality(
name='weekly', period=7, fourier_order=3, prior_scale=0.1)
df['nfl_sunday'] = df['ds'].apply(nfl_sunday)
print(df)
model.add_regressor('nfl_sunday')
model.add_country_holidays(country_name='US')
#save model
filename = 'prophet_1.0.pkl'
root = os.path.join(ROOT_DIR,'models')
print(ROOT_DIR)
path = os.path.join(root,filename)
# with open(path, "wb") as f:
# pickle.dump(model, f)
print(f"model saved at: {path}")
model.fit(df)
future = model.make_future_dataframe(periods=28)
future['nfl_sunday'] = future['ds'].apply(nfl_sunday)
forecast = model.predict(future)
print(forecast[-28:])
if __name__ == '__main__':
process = psutil.Process(os.getpid())
build_model()
print('Memory Usage(MB):',process.memory_info()[0] / float(2 ** 20)) | 35.747368 | 118 | 0.645465 |
import os
import psutil
from fbprophet import Prophet
from pathlib import Path
import pandas as pd
import numpy as np
import pickle
from src.utility.timeit import timeit
ROOT_DIR = Path(__file__).parent.parent.parent
print('ROOT_DIR:', ROOT_DIR)
@timeit
def us_public_holidays():
ny = pd.DataFrame({'holiday': "New Year's Day", 'ds': pd.to_datetime(['2016-01-01', '2017-01-01'])})
mlk = pd.DataFrame(
{'holiday': 'Birthday of Martin Luther King, Jr.', 'ds': pd.to_datetime(['2016-01-18', '2017-01-16'])})
wash = pd.DataFrame({'holiday': "Washington's Birthday", 'ds': pd.to_datetime(['2016-02-15', '2017-02-20'])})
mem = pd.DataFrame({'holiday': 'Memorial Day', 'ds': pd.to_datetime(['2016-05-30', '2017-05-29'])})
ind = pd.DataFrame(
{'holiday': 'Independence Day', 'ds': pd.to_datetime(['2015-07-04', '2016-07-04', '2017-07-04'])})
lab = pd.DataFrame({'holiday': 'Labor Day', 'ds': pd.to_datetime(['2015-09-07', '2016-09-05', '2017-09-04'])})
col = pd.DataFrame({'holiday': 'Columbus Day', 'ds': pd.to_datetime(['2015-10-12', '2016-10-10', '2017-10-09'])})
vet = pd.DataFrame({'holiday': "Veteran's Day", 'ds': pd.to_datetime(['2015-11-11', '2016-11-11', '2017-11-11'])})
thanks = pd.DataFrame({'holiday': 'Thanksgiving Day', 'ds': pd.to_datetime(['2015-11-26', '2016-11-24'])})
christ = pd.DataFrame({'holiday': 'Christmas', 'ds': pd.to_datetime(['2015-12-25', '2016-12-25'])})
inaug = pd.DataFrame({'holiday': 'Inauguration Day', 'ds': pd.to_datetime(['2017-01-20'])})
us_public_holidays = pd.concat([ny, mlk, wash, mem, ind, lab, col, vet, thanks, christ, inaug])
return us_public_holidays
def is_nfl_season(ds):
date = pd.to_datetime(ds)
return (date.month > 8 or date.month < 2)
def nfl_sunday(ds):
date = pd.to_datetime(ds)
if date.weekday() == 6 and (date.month > 8 or date.month < 2):
return 1
else:
return 0
@timeit
def build_model():
df = pd.read_csv('H:\\forcasting\\data\\training\\10655.csv')
df['y'] = np.log1p(df.y.astype(float) + 1)
print(df)
model = Prophet(
interval_width=0.95,
changepoint_prior_scale=0.15,
daily_seasonality=True,
holidays=us_public_holidays(),
yearly_seasonality=True,
weekly_seasonality=True,
seasonality_mode='multiplicative'
)
model.add_seasonality(
name='weekly', period=7, fourier_order=3, prior_scale=0.1)
df['nfl_sunday'] = df['ds'].apply(nfl_sunday)
print(df)
model.add_regressor('nfl_sunday')
model.add_country_holidays(country_name='US')
#save model
filename = 'prophet_1.0.pkl'
root = os.path.join(ROOT_DIR,'models')
print(ROOT_DIR)
path = os.path.join(root,filename)
# with open(path, "wb") as f:
# pickle.dump(model, f)
print(f"model saved at: {path}")
model.fit(df)
future = model.make_future_dataframe(periods=28)
future['nfl_sunday'] = future['ds'].apply(nfl_sunday)
forecast = model.predict(future)
print(forecast[-28:])
if __name__ == '__main__':
process = psutil.Process(os.getpid())
build_model()
print('Memory Usage(MB):',process.memory_info()[0] / float(2 ** 20)) | true | true |
f71e12dce904f62ef30ba9880c80ecf12faae214 | 2,440 | py | Python | nnexpy/network_generator.py | Spiilgriim/nnexpy | f8e419598ef94bebb532eb32ccaeeb48a3edfb5e | [
"MIT"
] | 1 | 2021-09-17T12:18:51.000Z | 2021-09-17T12:18:51.000Z | nnexpy/network_generator.py | Spiilgriim/nn-expressiveness | f8e419598ef94bebb532eb32ccaeeb48a3edfb5e | [
"MIT"
] | null | null | null | nnexpy/network_generator.py | Spiilgriim/nn-expressiveness | f8e419598ef94bebb532eb32ccaeeb48a3edfb5e | [
"MIT"
] | null | null | null | class NetworkGenerator(object):
def build_model(self, *args, **kwargs):
import tensorflow as tf
depth = kwargs.get('depth', 1)
input_shape = kwargs.get('input_shape', (2,))
width = kwargs.get('width', 8)
activation = kwargs.get('activation', 'relu')
model = tf.keras.Sequential()
model.add(tf.keras.layers.Dense(8, input_dim=input_shape[0], activation=activation,
kernel_initializer='he_uniform'))
for _ in range(depth):
model.add(tf.keras.layers.Dense(8, activation=activation))
model.add(tf.keras.layers.Dense(1, activation='sigmoid'))
return model
def train_and_save(self, *args, **kwargs):
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
model = kwargs.get('model', None)
epoch_number = kwargs.get('epoch_number', 100)
data = kwargs.get('data', None)
label = kwargs.get('label', None)
save_path = kwargs.get('save_path', './model.h5')
callbacks = kwargs.get('callbacks', None)
batch_size = kwargs.get('batch_size', 10)
loss = kwargs.get('loss', 'sparse_categorical_crossentropy')
model.summary()
model.compile(optimizer="adam",
loss=loss, metrics=['accuracy'])
model.fit(data, label, validation_split=0.2, batch_size=batch_size,
epochs=epoch_number, shuffle=True, verbose=2, callbacks=callbacks)
model.save(save_path)
import gc
del model
gc.collect()
tf.keras.backend.clear_session()
tf.compat.v1.reset_default_graph()
def full_net_combined(self, depth, input_shape, mypath, epoch_number, data, label):
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
model = self.build_model(
depth=depth, input_shape=input_shape, width=8, activation='relu')
csv = tf.keras.callbacks.CSVLogger(
mypath + str(depth) + 'layer.csv', separator=',', append=False)
self.train_and_save(model=model, epoch_number=epoch_number, data=data, label=label, save_path=mypath +
str(depth) + 'layer.h5', batch_size=64, loss="binary_crossentropy", callbacks=[csv])
import gc
del model
gc.collect()
tf.keras.backend.clear_session()
tf.compat.v1.reset_default_graph()
| 42.807018 | 112 | 0.617213 | class NetworkGenerator(object):
def build_model(self, *args, **kwargs):
import tensorflow as tf
depth = kwargs.get('depth', 1)
input_shape = kwargs.get('input_shape', (2,))
width = kwargs.get('width', 8)
activation = kwargs.get('activation', 'relu')
model = tf.keras.Sequential()
model.add(tf.keras.layers.Dense(8, input_dim=input_shape[0], activation=activation,
kernel_initializer='he_uniform'))
for _ in range(depth):
model.add(tf.keras.layers.Dense(8, activation=activation))
model.add(tf.keras.layers.Dense(1, activation='sigmoid'))
return model
def train_and_save(self, *args, **kwargs):
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
model = kwargs.get('model', None)
epoch_number = kwargs.get('epoch_number', 100)
data = kwargs.get('data', None)
label = kwargs.get('label', None)
save_path = kwargs.get('save_path', './model.h5')
callbacks = kwargs.get('callbacks', None)
batch_size = kwargs.get('batch_size', 10)
loss = kwargs.get('loss', 'sparse_categorical_crossentropy')
model.summary()
model.compile(optimizer="adam",
loss=loss, metrics=['accuracy'])
model.fit(data, label, validation_split=0.2, batch_size=batch_size,
epochs=epoch_number, shuffle=True, verbose=2, callbacks=callbacks)
model.save(save_path)
import gc
del model
gc.collect()
tf.keras.backend.clear_session()
tf.compat.v1.reset_default_graph()
def full_net_combined(self, depth, input_shape, mypath, epoch_number, data, label):
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
model = self.build_model(
depth=depth, input_shape=input_shape, width=8, activation='relu')
csv = tf.keras.callbacks.CSVLogger(
mypath + str(depth) + 'layer.csv', separator=',', append=False)
self.train_and_save(model=model, epoch_number=epoch_number, data=data, label=label, save_path=mypath +
str(depth) + 'layer.h5', batch_size=64, loss="binary_crossentropy", callbacks=[csv])
import gc
del model
gc.collect()
tf.keras.backend.clear_session()
tf.compat.v1.reset_default_graph()
| true | true |
f71e1310bcce4fa74977649ebed5e88e8882ded8 | 26,238 | py | Python | src/signalalign/tests/test_bwaWrapper.py | UCSC-nanopore-cgl/signalAlign | 90041772403fe9f7142abc8a9103a7696e70f445 | [
"MIT"
] | 5 | 2019-08-03T14:43:49.000Z | 2021-12-20T00:39:05.000Z | src/signalalign/tests/test_bwaWrapper.py | UCSC-nanopore-cgl/signalAlign | 90041772403fe9f7142abc8a9103a7696e70f445 | [
"MIT"
] | 8 | 2018-06-13T00:32:34.000Z | 2021-12-20T01:06:13.000Z | src/signalalign/tests/test_bwaWrapper.py | UCSC-nanopore-cgl/signalAlign | 90041772403fe9f7142abc8a9103a7696e70f445 | [
"MIT"
] | 5 | 2017-07-24T20:29:41.000Z | 2020-01-03T17:16:29.000Z | #!/usr/bin/env python
"""Tests for bwaWrapper.py"""
########################################################################
# File: test_bwaWrapper.py
# executable: test_bwaWrapper.py
#
# Author: Andrew Bailey
# History: Created 08/14/18
########################################################################
import os
import unittest
from signalalign.utils.bwaWrapper import get_aligned_segment_from_alignment_file, getGuideAlignmentFromAlignmentFile
class BwaWrapperTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
super(BwaWrapperTest, cls).setUpClass()
cls.HOME = '/'.join(os.path.abspath(__file__).split("/")[:-4])
cls.reference = os.path.join(cls.HOME, "tests/test_sequences/pUC19_SspI_Zymo.fa")
cls.ecoli_reference = os.path.join(cls.HOME, "tests/test_sequences/E.coli_K12.fasta")
cls.fast5_dir = os.path.join(cls.HOME, "tests/minion_test_reads/canonical_ecoli_R9")
cls.files = [
"miten_PC_20160820_FNFAD20259_MN17223_mux_scan_AMS_158_R9_WGA_Ecoli_08_20_16_83098_ch138_"
"read23_strand.fast5",
"miten_PC_20160820_FNFAD20259_MN17223_sequencing_run_AMS_158_R9_WGA_Ecoli_08_20_16_43623_"
"ch101_read456_strand.fast5",
"miten_PC_20160820_FNFAD20259_MN17223_sequencing_run_AMS_158_R9_WGA_Ecoli_08_20_16_43623_"
"ch101_read544_strand1.fast5",
"miten_PC_20160820_FNFAD20259_MN17223_sequencing_run_AMS_158_R9_WGA_Ecoli_08_20_16_43623_"
"ch103_read333_strand1.fast5"]
cls.fast5_paths = [os.path.join(cls.fast5_dir, f) for f in os.listdir(cls.fast5_dir)
if os.path.isfile(os.path.join(cls.fast5_dir, f))]
cls.template_hmm = os.path.join(cls.HOME, "models/testModelR9_acgt_template.model")
cls.path_to_bin = os.path.join(cls.HOME, 'bin')
cls.signal_file_guide_alignment = os.path.join(cls.HOME, "tests/minion_test_reads/oneD_alignments.sam")
def test_getGuideAlignmentFromAlignmentFile(self):
read_name = "9e4d14b1-8167-44ef-9fdb-5c29dd0763fd"
guide_alignment = getGuideAlignmentFromAlignmentFile(self.signal_file_guide_alignment, read_name=read_name)
self.assertEqual(guide_alignment.strand, '-')
self.assertEqual(guide_alignment.reference_name, "gi_ecoli")
self.assertEqual(guide_alignment.cigar, "cigar: 9e4d14b1-8167-44ef-9fdb-5c29dd0763fd_Basecall_Alignment_template:1D_000:template 1 11458 + gi_ecoli 1845113 1832930 - 1 M 5 I 2 M 12 I 1 M 6 D 1 M 2 D 1 M 18 D 3 M 7 D 3 M 13 D 3 M 49 D 5 M 9 D 1 M 2 D 1 M 50 D 5 M 11 I 2 M 9 D 1 M 15 D 1 M 17 D 1 M 4 D 1 M 22 D 4 M 10 D 1 M 54 D 3 M 6 I 1 M 14 D 1 M 26 D 3 M 10 D 1 M 13 D 1 M 18 D 2 M 20 D 2 M 47 D 1 M 10 D 2 M 19 I 1 M 23 I 1 M 8 D 2 M 23 D 2 M 24 D 1 M 8 D 2 M 37 D 1 M 9 D 1 M 15 D 1 M 10 D 1 M 17 D 1 M 16 I 1 M 71 D 1 M 14 D 2 M 11 I 1 M 11 I 1 M 2 D 1 M 22 D 1 M 59 D 1 M 5 D 1 M 9 I 1 M 30 D 3 M 29 D 1 M 14 D 1 M 17 D 1 M 3 D 3 M 32 D 3 M 10 D 2 M 12 D 2 M 19 D 4 M 60 D 1 M 16 D 3 M 8 D 5 M 21 D 2 M 27 D 1 M 14 D 2 M 8 D 3 M 25 D 3 M 28 D 1 M 15 I 1 M 5 D 1 M 5 D 2 M 28 D 3 M 37 D 2 M 18 D 1 M 24 I 1 M 8 D 1 M 1 D 2 M 33 D 1 M 11 D 1 M 17 I 1 M 6 D 1 M 24 D 1 M 5 I 2 M 5 I 1 M 2 D 3 M 14 I 2 M 10 D 4 M 38 D 1 M 4 D 1 M 24 D 1 M 9 D 1 M 13 I 1 M 6 D 1 M 27 D 1 M 47 D 1 M 2 D 2 M 21 D 1 M 69 D 3 M 8 I 1 M 26 I 1 M 4 D 1 M 12 D 1 M 8 D 1 M 38 D 1 M 37 D 1 M 16 D 2 M 40 D 1 M 18 D 1 M 4 D 1 M 11 I 2 M 7 D 6 M 2 D 3 M 6 D 1 M 13 I 1 M 5 D 1 M 5 D 2 M 22 D 3 M 8 D 1 M 16 D 1 M 10 D 2 M 25 D 1 M 6 D 1 M 2 D 3 M 10 D 2 M 48 D 2 M 4 I 1 M 10 D 1 M 8 I 1 M 3 I 1 M 18 D 2 M 6 D 1 M 16 D 1 M 1 D 2 M 37 D 1 M 6 D 2 M 8 D 2 M 29 I 1 M 15 D 1 M 15 D 1 M 17 D 1 M 36 D 1 M 9 D 1 M 1 D 1 M 12 D 1 M 1 D 4 M 24 I 1 M 17 D 1 M 4 I 1 M 18 D 3 M 37 D 2 M 13 D 2 M 24 D 1 M 7 D 1 M 22 D 1 M 6 D 1 M 1 D 2 M 6 D 2 M 5 D 2 M 21 D 2 M 36 D 1 M 4 D 1 M 5 D 2 M 25 I 1 M 10 D 1 M 2 D 1 M 5 D 3 M 43 D 1 M 7 D 1 M 10 D 3 M 21 D 2 M 7 D 2 M 28 I 1 M 25 I 1 M 12 D 1 M 13 D 1 M 40 D 1 M 1 D 1 M 10 D 1 M 1 D 2 M 31 D 1 M 1 D 3 M 14 D 2 M 3 D 7 M 5 D 3 M 2 D 1 M 20 D 1 M 14 D 1 M 5 D 1 M 2 D 1 M 29 D 2 M 3 D 1 M 3 D 3 M 4 D 1 M 22 D 1 M 3 D 1 M 33 D 2 M 14 D 1 M 7 D 1 M 17 I 1 M 16 D 2 M 12 I 1 M 7 D 4 M 9 D 1 M 28 D 1 M 3 D 1 M 4 I 1 M 2 I 1 M 9 D 1 M 31 D 1 M 9 D 1 M 31 D 1 M 33 D 1 M 15 D 1 M 31 D 1 M 20 D 1 M 57 D 1 M 12 D 2 M 6 D 3 M 27 D 2 M 12 I 2 M 7 D 4 M 47 D 2 M 10 D 2 M 12 D 3 M 38 D 1 M 1 D 2 M 5 D 1 M 19 D 1 M 10 D 2 M 3 D 4 M 8 D 1 M 16 I 1 M 39 I 2 M 16 D 2 M 8 D 2 M 10 D 1 M 15 D 5 M 15 D 1 M 18 D 1 M 11 D 1 M 18 D 1 M 51 D 1 M 93 D 2 M 15 D 1 M 20 D 1 M 14 D 1 M 4 D 1 M 16 D 3 M 5 D 1 M 29 I 1 M 30 I 3 M 12 D 3 M 6 D 1 M 28 D 1 M 7 D 3 M 70 I 1 M 9 D 2 M 46 D 1 M 31 I 1 M 2 I 1 M 28 D 2 M 26 D 1 M 28 D 1 M 32 I 2 M 10 I 1 M 58 D 5 M 16 I 2 M 41 D 1 M 26 D 1 M 19 D 1 M 3 D 1 M 8 I 2 M 54 D 1 M 10 D 1 M 30 D 1 M 81 D 1 M 19 D 1 M 8 D 1 M 20 D 1 M 11 D 2 M 12 D 1 M 21 I 3 M 5 D 3 M 13 D 1 M 10 D 1 M 27 I 1 M 1 I 2 M 12 I 1 M 7 D 1 M 19 D 2 M 6 D 2 M 25 I 1 M 25 I 2 M 63 D 1 M 13 I 1 M 32 D 1 M 4 D 1 M 28 D 2 M 18 D 1 M 2 D 2 M 11 I 1 M 27 I 1 M 8 D 1 M 32 D 1 M 41 I 1 M 28 D 1 M 5 D 2 M 42 D 2 M 5 D 1 M 41 D 1 M 38 I 2 M 14 D 1 M 15 D 1 M 9 D 1 M 19 D 1 M 37 D 1 M 13 I 1 M 16 D 3 M 28 D 2 M 13 D 2 M 37 I 1 M 90 D 1 M 6 D 1 M 63 D 3 M 4 D 1 M 21 D 2 M 17 D 1 M 29 D 1 M 24 D 1 M 39 D 2 M 20 D 1 M 7 D 2 M 27 D 3 M 5 D 1 M 11 D 2 M 24 D 1 M 2 D 1 M 5 D 1 M 3 D 1 M 19 I 3 M 24 D 1 M 3 D 4 M 15 D 1 M 5 D 1 M 4 D 1 M 6 D 1 M 19 I 1 M 28 D 1 M 29 D 1 M 1 D 1 M 1 D 1 M 7 D 1 M 26 D 1 M 37 D 1 M 13 I 3 M 40 D 1 M 1 D 3 M 36 D 3 M 16 D 1 M 21 D 1 M 5 D 1 M 8 D 1 M 5 D 1 M 4 D 1 M 19 D 2 M 2 D 1 M 16 D 1 M 9 D 2 M 8 D 2 M 16 I 2 M 26 D 1 M 7 D 1 M 54 D 1 M 38 D 1 M 24 D 1 M 19 I 3 M 23 D 2 M 18 I 1 M 109 D 5 M 36 D 1 M 29 D 1 M 17 D 1 M 21 D 7 M 7 I 1 M 11 D 1 M 11 D 1 M 32 D 1 M 1 D 1 M 1 D 1 M 1 D 2 M 16 D 1 M 18 D 4 M 22 D 1 M 11 D 1 M 36 D 1 M 29 I 1 M 5 D 1 M 2 D 1 M 32 D 1 M 7 D 1 M 46 D 1 M 7 D 1 M 46 D 1 M 24 I 1 M 14 D 1 M 11 D 2 M 85 D 2 M 11 D 3 M 10 D 3 M 10 I 1 M 4 D 1 M 22 D 1 M 13 D 4 M 13 D 1 M 41 D 2 M 20 D 3 M 47 D 1 M 21 D 1 M 20 I 1 M 37 D 2 M 4 D 1 M 70 D 7 M 35 D 1 M 50 D 1 M 41 D 2 M 8 D 1 M 20 D 2 M 7 D 2 M 44 D 2 M 7 D 1 M 19 D 1 M 12 D 1 M 17 D 4 M 17 D 1 M 1 D 1 M 40 I 1 M 30 D 1 M 13 I 1 M 5 D 1 M 4 D 1 M 41 D 2 M 8 D 1 M 36 D 1 M 16 D 9 M 2 D 1 M 12 D 1 M 18 I 1 M 37 D 1 M 13 I 3 M 21 D 2 M 6 D 1 M 25 D 2 M 30 D 1 M 3 D 1 M 3 D 2 M 13 D 1 M 12 I 1 M 6 D 1 M 24 D 2 M 5 I 2 M 13 I 1 M 17 D 1 M 8 D 1 M 13 I 1 M 46 D 2 M 8 D 2 M 25 I 1 M 11 D 2 M 10 D 1 M 25 D 3 M 29 D 3 M 17 D 2 M 20 D 4 M 57 D 1 M 11 D 3 M 6 D 1 M 19 D 1 M 15 D 2 M 14 D 1 M 35 D 2 M 28 D 2 M 1 D 3 M 69 D 1 M 54 D 1 M 8 D 1 M 41 D 1 M 3 D 1 M 38 D 1 M 8 D 1 M 8 D 1 M 7 D 2 M 3 D 1 M 4 D 1 M 23 D 1 M 7 D 1 M 6 D 1 M 19 D 1 M 3 D 4 M 6 D 1 M 19 D 1 M 14 D 3 M 18 D 1 M 30 D 1 M 29 D 2 M 25 I 1 M 11 D 2 M 6 D 1 M 8 D 1 M 5 D 1 M 48 D 1 M 31 D 1 M 1 D 3 M 7 D 1 M 14 D 1 M 4 D 1 M 6 D 1 M 34 D 1 M 13 D 3 M 6 D 1 M 2 D 1 M 5 I 1 M 17 D 1 M 47 I 1 M 6 D 1 M 23 D 2 M 20 D 1 M 5 D 2 M 26 D 3 M 12 D 1 M 1 D 1 M 43 D 1 M 14 D 3 M 23 D 1 M 25 D 1 M 10 D 1 M 7 D 1 M 21 D 1 M 15 D 1 M 4 D 1 M 30 D 1 M 8 D 1 M 10 D 1 M 9 D 1 M 33 D 1 M 11 D 1 M 39 D 1 M 13 D 3 M 28 D 1 M 2 D 1 M 6 I 2 M 14 I 1 M 22 D 1 M 3 D 1 M 9 D 1 M 8 D 1 M 27 D 1 M 6 D 7 M 12 D 1 M 17 D 3 M 24 D 1 M 14 I 1 M 7 D 1 M 26 I 1 M 3 I 1 M 30 I 1 M 6 D 1 M 28 D 2 M 6 D 1 M 2 D 1 M 22 D 2 M 17 D 1 M 6 D 2 M 6 D 1 M 21 D 1 M 20 I 1 M 25 D 1 M 10 D 1 M 29 D 1 M 16 D 1 M 8 D 1 M 16 D 2 M 14 D 1 M 5 D 2 M 7 I 1 M 32 D 1 M 7 D 2 M 24 D 3 M 2 D 2 M 2 D 1 M 6 D 5 M 12 D 2 M 13 D 1 M 6 D 1 M 11 D 1 M 3 D 1 M 9 I 1 M 10 D 1 M 13 D 2 M 28 D 1 M 5 D 2 M 8 D 1 M 2 D 1 M 13 D 1 M 15 I 1 M 42 D 2 M 34 D 4 M 22 D 1 M 25 D 1 M 2 D 1 M 20 D 1 M 3 D 1 M 15 D 1 M 42 D 1 M 4 D 1 M 18 D 1 M 9 D 1 M 13 D 1 M 9 D 1 M 18 D 1 M 17 D 1 M 2 D 1 M 24 D 1 M 9 D 5 M 20 I 2 M 105 D 3 M 12 D 1 M 16 D 1 M 21 D 1 M 1 D 2 M 25 I 1 M 3 I 1 M 4")
def test_create_sam_from_guideAlignment(self):
read_name = "9e4d14b1-8167-44ef-9fdb-5c29dd0763fd"
correct_segment, n_aligned_segments, reference_name = \
get_aligned_segment_from_alignment_file(self.signal_file_guide_alignment, read_name=read_name)
self.assertEqual(reference_name, "gi_ecoli")
self.assertEqual(n_aligned_segments, 1)
self.assertEqual(correct_segment.tostring(), "9e4d14b1-8167-44ef-9fdb-5c29dd0763fd_Basecall_Alignment_template:1D_000:template\t16\tgi_ecoli\t1832931\t60\t4M1I3M1I25M2D1M1D21M1D16M1D12M3D105M2I20M5D9M1D24M1D2M1D17M1D18M1D9M1D13M1D9M1D18M1D4M1D42M1D15M1D3M1D20M1D2M1D25M1D22M4D34M2D42M1I15M1D13M1D2M1D8M2D5M1D28M2D13M1D10M1I9M1D3M1D11M1D6M1D13M2D12M5D6M1D2M2D2M3D24M2D7M1D32M1I7M2D5M1D14M2D16M1D8M1D16M1D29M1D10M1D25M1I20M1D21M1D6M2D6M1D17M2D22M1D2M1D6M2D28M1D6M1I30M1I3M1I26M1D7M1I14M1D24M3D17M1D12M7D6M1D27M1D8M1D9M1D3M1D22M1I14M2I6M1D2M1D28M3D13M1D39M1D11M1D33M1D9M1D10M1D8M1D30M1D4M1D15M1D21M1D7M1D10M1D25M1D23M3D14M1D43M1D1M1D12M3D26M2D5M1D20M2D23M1D6M1I47M1D17M1I5M1D2M1D6M3D13M1D34M1D6M1D4M1D14M1D7M3D1M1D31M1D48M1D5M1D8M1D6M2D11M1I25M2D29M1D30M1D18M3D14M1D19M1D6M4D3M1D19M1D6M1D7M1D23M1D4M1D3M2D7M1D8M1D8M1D38M1D3M1D41M1D8M1D54M1D69M3D1M2D28M2D35M1D14M2D15M1D19M1D6M3D11M1D57M4D20M2D17M3D29M3D25M1D10M2D11M1I25M2D8M2D46M1I13M1D8M1D17M1I13M2I5M2D24M1D6M1I12M1D13M2D3M1D3M1D30M2D25M1D6M2D21M3I13M1D37M1I18M1D12M1D2M9D16M1D36M1D8M2D41M1D4M1D5M1I13M1D30M1I40M1D1M1D17M4D17M1D12M1D19M1D7M2D44M2D7M2D20M1D8M2D41M1D50M1D35M7D70M1D4M2D37M1I20M1D21M1D47M3D20M2D41M1D13M4D13M1D22M1D4M1I10M3D10M3D11M2D85M2D11M1D14M1I24M1D46M1D7M1D46M1D7M1D32M1D2M1D5M1I29M1D36M1D11M1D22M4D18M1D16M2D1M1D1M1D1M1D32M1D11M1D11M1I7M7D21M1D17M1D29M1D36M5D109M1I18M2D23M3I19M1D24M1D38M1D54M1D7M1D26M2I16M2D8M2D9M1D16M1D2M2D19M1D4M1D5M1D8M1D5M1D21M1D16M3D36M3D1M1D40M3I13M1D37M1D26M1D7M1D1M1D1M1D29M1D28M1I19M1D6M1D4M1D5M1D15M4D3M1D24M3I19M1D3M1D5M1D2M1D24M2D11M1D5M3D27M2D7M1D20M2D39M1D24M1D29M1D17M2D21M1D4M3D63M1D6M1D90M1I37M2D13M2D28M3D16M1I13M1D37M1D19M1D9M1D15M1D14M2I38M1D41M1D5M2D42M2D5M1D28M1I41M1D32M1D8M1I27M1I11M2D2M1D18M2D28M1D4M1D32M1I13M1D63M2I25M1I25M2D6M2D19M1D7M1I12M2I1M1I27M1D10M1D13M3D5M3I21M1D12M2D11M1D20M1D8M1D19M1D81M1D30M1D10M1D54M2I8M1D3M1D19M1D26M1D41M2I16M5D58M1I10M2I32M1D28M1D26M2D28M1I2M1I31M1D46M2D9M1I70M3D7M1D28M1D6M3D12M3I30M1I29M1D5M3D16M1D4M1D14M1D20M1D15M2D93M1D51M1D18M1D11M1D18M1D15M5D15M1D10M2D8M2D16M2I39M1I16M1D8M4D3M2D10M1D19M1D5M2D1M1D38M3D12M2D10M2D47M4D7M2I12M2D27M3D6M2D12M1D57M1D20M1D31M1D15M1D33M1D31M1D9M1D31M1D9M1I2M1I4M1D3M1D28M1D9M4D7M1I12M2D16M1I17M1D7M1D14M2D33M1D3M1D22M1D4M3D3M1D3M2D29M1D2M1D5M1D14M1D20M1D2M3D5M7D3M2D14M3D1M1D31M2D1M1D10M1D1M1D40M1D13M1D12M1I25M1I28M2D7M2D21M3D10M1D7M1D43M3D5M1D2M1D10M1I25M2D5M1D4M1D36M2D21M2D5M2D6M2D1M1D6M1D22M1D7M1D24M2D13M2D37M3D18M1I4M1D17M1I24M4D1M1D12M1D1M1D9M1D36M1D17M1D15M1D15M1I29M2D8M2D6M1D37M2D1M1D16M1D6M2D18M1I3M1I8M1D10M1I4M2D48M2D10M3D2M1D6M1D25M2D10M1D16M1D8M3D22M2D5M1D5M1I13M1D6M3D2M6D7M2I11M1D4M1D18M1D40M2D16M1D37M1D38M1D8M1D12M1D4M1I26M1I8M3D69M1D21M2D2M1D47M1D27M1D6M1I13M1D9M1D24M1D4M1D38M4D10M2I14M3D2M1I5M2I5M1D24M1D6M1I17M1D11M1D33M2D1M1D8M1I24M1D18M2D37M3D28M2D5M1D5M1I15M1D28M3D25M3D8M2D14M1D27M2D21M5D8M3D16M1D60M4D19M2D12M2D10M3D32M3D3M1D17M1D14M1D29M3D30M1I9M1D5M1D59M1D22M1D2M1I11M1I11M2D14M1D71M1I16M1D17M1D10M1D15M1D9M1D37M2D8M1D24M2D23M2D8M1I23M1I19M2D10M1D47M2D20M2D18M1D13M1D10M3D26M1D14M1I6M3D54M1D10M4D22M1D4M1D17M1D15M1D9M2I11M5D50M1D2M1D9M5D49M3D13M3D7M3D18M1D2M1D6M1I12M2I5M1S\t*\t0\t0\tTAAGCCGCATTGCTTCAGGTCGGTAAATGCTCTTCGCCGGAAGAGCGCGAATGGATGACAGGCTGATGAGCTGGGGTTGGTCGACTTCCGCCATGCGAATCCGCAAACAGCAGATCGTTTCTCATGGTTTGATTACCGCTCAAAAGGTTTTGACGATAACCGTGGTCTGCGCATCGACTCGTTGCTCGCTGTAGCCAACCGCTGGCAGAATGTAGAAACCGCATCGACTATGAAATCCGCAGCAGGAAAACCGTCCGATCACGCCCCGTCTGGGCGACCTTCGCCGCTAATTAGCAGCTCTCCCGCTCAAACGGGTCAGGAGAATTAACCTGAGAAAATCAACAAACTGTCAGTAATGATTTGTTGCCTGTCGTCCTTGTTATACCGTCTCGCGTTTTAGTTGTTCGACCACTTTCATTATCAAGTTTGATATAGGAAACTCACGATGAACGCTGAGCGTAAATTTTTGCCTGTCTTATTTTTGCGCTGGTCATTTATTATCCACGCTTTCGGCTTATCTGATTCGTTCACCGATTTACTCCCACTTACAGACACCATCCGCCAGAGCGATTTTCGGCTAGCCCCATATTCTGTTATTCATCATTGCCACCCTTGCTGTTACCAGAAGCATACTGGGTGATCGCGGAGAATAGTTTTTGCCCGCCTTTAGGACACTACTCATTAACTGCCGCTGGCTTCCTCATTCCTGCTGGCGCGCTGGCTGACGTGATTACTGCTGAAATACGTTGGCCATAGCAATACTCTTCCAGCATTGAAAAGGCATTGCGCAACGGTATTGATTTTCTATTCTGACCGCTTAATCCCGTTGTTCTTTACAATATTCAAAATTACGCTTACGATTAACCACACCGCCTTCTGGCCTTATACCCTTATTTTCGGCACTCAAGACCCCACCGGTATTGTTATTTGCACAGGATGGCGTTATCCGCCACTGAAGGTATTATTGCGCTTTATTTGTTAACTGTTCGGCGGGTCGCGCTGTTTATTCTCGTCCAGCCTGCAAACTCATACGCCCGACACAAACATGTGGATTCGTCTGGCTCCCGCGCCGCAGCCCACTTACTCACCCAAAAATGCAAGGATAGAACGATATGCAACATTATTCAGTGTCATGGAAAGGACTGGCTGCACTTCGTTACTGGCTGGCCTATGGCCGCGATCAACAAGAGAATGCGGCGCAAAAGTGAATACGACGACTTCGAACAGCCAACCGCTGCGTGGCCGATGCCAATAAATTCATACGTACCATGCTGGTGCAAATTAATGGTCGTTTCACCGACGACACCGTCACGGTATTGTGTTTAAAGATGGTTCCAACGGACATAATCGCTGCTTGCGGTTATGCGACCCCGAAAGCATTTTATGAAGCCTGAAAGAGTAGGTGGCAGCCGGCCGAAACATGATGATGGATAATAAAGAAACGACGGCGCTCTGGCAGCAAACTGATATTTCGGTCAACTGGCAAGGGCGGCAAAGCGTATTCTTCGATGAAGTGATTGTTGACAGTACGGCAAGAAACTGGACATGCACGGCGGTAATTTAACGCAGCAGAAGAGAAGAAAACGGGTTGCCCGGTGTGTCTGGATACGTCTGGTCGGCATCAGCAATGCAACATACACTTATGGTGGTTGAAAACGGTGTGAAGTTAACTAAAGGCAATGCCTCAGTTCTCCCGCGGATAAACACGCTGGCAACGGTTACCTTTAAAATCGCCGAATAAAGCCGTGACAGCGATGATGATGATGATAATCAGAAAATCTACTACCGTATAACCTCATCATCCTGTTGTCTGCCATGCTGCTGGCAAGGCGCGCTTCCGGCGTCTGCGAGCTATCAACGCGTTGCGGCGTTTGCCGCCGTGGACCAACAGGTATAGAACGCTTTATTCAGTCTTACGGCGCACTGGCGGCAGTTGTCCGTTCTGTTGATGGTTTGCGTCATTGCTGCAATCGCTACCTGCGTTTTTGATCACCTGCCAATGCGTCGCTGTTTGGCGCGTTCTGGGCGGCTTGCTGTCGTGGACCAGTTCGATATCGGCGCGGCGCTGTGCTTTATCGCCAGAGTATGGGCCGCGAAGTGGTGGAAAATTGGCAAACCGTGCTTGACAGTATGACGGCTTTTCACCGCTACGGCAAACACACCATTCTGTCTACGTATTGCCTTTGTCCCTTCGATCCACCAGCTATGCTGCCGGTTTGACTTCAATACGTTTTCGTCGTTTTTATCGCCACCGGGCTTGGCCAGTTATCGGCAACTATTACTATTCCCGGCGGGCAGCATGTTAACAGGCGGTACTTTCTGGTTTGTCACCGGACTGTTTATCTGTTTGCCCTGACCGTGGTGATTTTTATGGCGAAGAAAATATGGGTTGAACGCCAGAAGAGGGCTGCACATATACCGCCGCTTAGCAAAATTCCTTATTTATTGTCCACAGGCGTGGCTGCATCGTCGCCACATGGCGAGGTGCTGCCCCATTCGCTGGTGGGGCGGATCCCTGCTATCTTTATCTGTCGATGTTTATGGCTGGCTGGAGCGCAAACGCCCACCGCTCGATCCGGTGGTACGATCGCTTGTCAGCGCATTGCGCAAATGTGCCTGTGAGTTTTGTGTGGACATCAGTATGAAAGTCGCCGAGCGCACCGGCAGCGATAAACTGCTGGCAGTGGCGACTGGCGGCAGCCCGCTCTTGGGGCGATGAAGAACGGCTGGCGCTGGCACGCCGGCTGCAAGCGTAACGCCGCCAACGGTCGATGATGCCCTGCGTACCCGGACTGGCTGCGCATTTGACGCCTGGCGCCCACCGAACTGGACGGCATTGATCTTGGCCTAAAATCTGTCAGCCCGTTTTAATTTGCCATATGACATTCCTGCCTAGGGCTGTGCCATTCTGAAAACGTTCTTAAGGAGAGATGATGCGCCACAGGGTGGTTGCTGGGATTGTTATCATGTTTTCTGGCAACACATGCCAGTGACTTCTGGCAAGAAATTAAAATGAGGCCAAAGGGCAAACCGTCTGGTTTAACGCTCCGGGGCGGCGATACCGCACTAACCGCTATTCAGAGGCGAGATGAAAACCATTACGCTATAAACCTGAAGATTGTCCGTCTGGCGATGCCGCACGCGGTGAAGCGCATTCAGATCGAAGCCGCAGCCGGACGTAAACGGCGGTTTCGGTGGATCTGTTTGGGTGAACGGCGAAAACTTCCGCACCTGTGCAAGAGGCCAATTTATTACAAACGGGCTGGGCGGAGACCGCCTAACTGGCGCTATGCTCACAGCTGCCGGTGCGGAAGATTTTTCGTGCCGACACAAGGTGCGGACCGCCCGGGCGGCGAATAACTGACGTTTATCGCCCGCCGCGATGTTACGCCAGCCACCAAACGCCGCAAGCCTTACTGAGTTTGCCAGCCAACTTCGGCACGGCCACCTATCCGCGCCCACCGGACTTACCGGCACGGCGTTTCTTGAACAGTTGCTGATTATGCTGACGCCCGATCCGCCGCATTAAAAGAAGCGCCGGACGATGCGACTGTGTCACCGTTCCCTTGTGGCAATATCTTGATGTGCTGCATCCGTATTTGTGGCGCGAAGGAAAGGATTTCTGCTCACCTGCGCGGATGGATGCTCTGCTGAAAGCCGGCAGCATTGCGCCTGTCGCTGACCTTAACCCCGCGCATGTGCAGCAAAAATCGCCAGCGGCGATTTGCCTGCAAGCAGTTACAGTTTTGGCTGCGAGGGGATGATTGGCAACGCATTTCGTCACCATTCCTGCCAACGCGAATGCCAGTGCTGTATGAAGGTAGTTACTTCCTGCTCACCCGGATGCGCAACTGCGTAAAGTGATCCCCGCTGTCTGCGATCCTTCACTCGATCCGCAAACCGCCTGATGGGCAGCGCGAATCATTGCAATCAAGAATGCCGCAGGATCTGCCGCCGGTACTGGCTGAACCGCACGCAGGTTGCAAATGCGCTGAACAAGAATGGCTGCCACCGTTACGGCACGCATTAATCTTTTGCTGTGGGCGATGGTGGCGGTGATTTATGCACCGCTGATCCCGCAGCTCAATGCTGATATCGCCTGCTTTGTCGTTGACACACTGGCAGGCGTTATCGTCGACCACAGCTACCGCAGGCATTACTGGCAACGCTGTACGACAAACCATCGCGGCGGTCGGGGCATTGCTGACGCCCTGCTGGTGATTGTGGCGCTGTGGCTCGGGCCAAATGGCAGCGGCGTGCGCCCGTCTGCCGTGGCCGTCACACCCCATGTGGCTTTGCCACCAGCGCCCCGTTTTGCCGACGGAGGGCTGCTTTATGACTATTTCCGTATTTCACCTGCCAATGGACCCGATTTGTCTGGGCCTCACCCTTGCAGGAAAGAAAGCGCATTTCGCTGTGGATCTTAGCAGCAGTATTGAGCGAAAATGGCTGCTGCAGCAGGTCATTGTGCTGGATTCGCTACAGCCGCTGGCAATGCCTGAACTGGCTGCTGTTGCTCTCCGTCGCGCCTGCGCTGGCAATGGCGATGCTGGCGATTGTTGCCCGGTCGCTGTCGGTCGTGGATGTGGGTAATTATTCTCGGGCCGTAATCCCCTGACGCTGGCGGTAATTATTAGCTGGCAGTGGTTAACCTGTATGACATTGACCAACAAACAAAGGCGCGCTTGCCAGCCTGCTGTTGATGCTGTTACTGCCGCCTGCCGTTTGCTGAGCTATCTGCTATGGCGCAGCTGGCGGCGCACTATTCCCGCGTGATGGCGTTCGCAAGCCCGCCACGCCTTTTTATTGCCGGGCAATGCTGGCGATTTTTACCCCAACCGGTGTGCCGTGGTTCCTGCTGGCGATCCTCGCGATCGACGAGATCAATATGAAGGCTCATCACCAGCCCGACAATGGGCTGGTGGCGACATTCGCTTTGCTCCTGTTACTGCTGTGGCTGGAATGGGCCAGCGTCGCCAGTTGTGGCTATGGCTGCCCATTTTATTACCCGCTGCTCTGCCACTGTGGCGGGCCAGTACACGCTGGCGCCATGGCTGAAACCGATGGAAGCTGGACGGCGGTGGTCTGGGGCATTCGTGGGTGATGCCGTGGATGCTGTTTATCCGCAACCCGCCCCGCAGCGCATTGATTCATCGGTTAATTTTGATTGCGCAACACTGGCTGTCGCGGTCAAAATCTTCTTGTGAATGCCCACTCATGTTGCGCCCTGTGCTGCTGATTGCCTTCGCGGTGGATTGCAACATATTGCGCAGTATATGCCAACGCCAGCTGGGCGCGGGCGCCCGACGCTCACCACTGAGGCGGTGGCTAAGCAGTAAGGCAGCAACGGTATTCTTCGCCCGTGCTTTGCGGCAACCGTTATTACCGCTTATTATTTTGCCCCGACCGCGTTAGTCGCAAATGGGTAGGTTATGTCAGACAAGGACTCGCTAATGCTCTGCGTGAAATGTTTCGCTACGTTTACCGAAAGCTTGCTGACAAACGTTAACTTTACGGTGGATAAAGGTGACATTGTCACGTTAATGGGGCCGTTCATTGGGAAAATCCACTCTGTTTTCATGGATGATTGGTGCACTGGCCGAACAGTTTTCTTGTACAGGTGAGCTATGGCTCAATGAGCAACGGATTGGACATCCCACCCACCGCACAGCGTCAGATTGGCATTCTTTCAGGATGCACTAAGACGACCAGTTCAGTGTCGGGCAAATACTGCTGGCGCTACCTGGCGACACTTAAAGGAATGCCCGACGTAATGCCGTGAATGATGCACTTGACAACCAGGCCTTGAGGGAGTTTCCATCAGATCCCGCCACTTTGCTGGCGGTCAGCGAGCGCGCGCGCTGTTCTACTACGCGCCCTTCTCGCCCAACCAAAGCGTTACTCCCGGATGAGCCATTCAGCCGTCTTGATGTGCTCTGCGATAATTTTCGCCAGTGGGTGTTCAGCGAAGTTCGCGCCCCAGATCCCGTCGTTCAGGTAACGCACGATCTCCGTGGATGTTCCTGCTGATAGTTCTGTTCTGGATATGGCGCAGGGTCAGAAAATTACAACAAACTGCGATAACGCAAGTTTTTCCTCAATGCGTCAGTTCAGAAATATGCACCTCAAAACTACTGCCGGATTTTCGATGAAACGTTTCTCAAATGACCGCGCTGGCAATGGTTTAGGCTGGCTTGCGCTTCTTCGTGGGCCGCTGAATCCGGCGAAGCCTCTACACTGGACCAGCTTCAACAACAAAATGGCAAAGCGATAGATACTCGCCCCAGCGCGTTTTAGGCAACGGCTGGCCACAAACCTTGTGTCGGCCCTTCTGGTCATGAACTTGCCGTCAAACCTCTGCCAGCTGGCTTGACAAATGAGTAACCGAACAGTTAACGAACGCGTGGATCAAGCAACATAACCTGAAACCGATGCCCGGTGGCGCTGTGTCATGTCGACAAAGATGTCGACGCCGTCAAACGCGACTGCAAAGCAGGTTTACCCATATCTCCATCCTGAGGACGCGCTGTTGAACCTTCCTGCCTGCAAAACTGCCGCATTTTGAGCAGCTGGTTTATCCGCAATGGCTGCACGACCTGCAACAAGGTAAAGATAATACGGCGAAACCCATGGTGACTGGAAAGTCATTGAAGCGGCCCGGGCGCTCCAAGCTGCACTTTATCAGCCATATTCCCGGCGCTGACTACATCGATACCAACGAAGCGTGGAAAGGAACGCTGTGGAACAAAGTTTCGATGAACAACCGCAAGCGATGCTGGCAAACACGGCATTCGCCATGACACCACGGTCATTCTGTATGGGCGCGTGACGTATACGCTGCGTGTGGCGCAGATTATGCTTTATGCTGGCGTGAAAGATGTGCGCCTGCTGGATGGCGGGCTGGCAAACCCCGGGTCCGACGCGGGACTGCCTGTTGAGCGCGAACGCCACCGAAAGTGAAAGCGGAACCGATTTCGGCGTGAAGATCCCGGCACACGCAGTTGATGCTCGATATGGAACAGGCTCCGGTGGACTGCTGCATCGCCGTGATGCATCGCTGCGAGCATCTGTTCGTGGCCAGAATTTATCGGTACGACCAGCGGTTTGTTATATTAAAACCAAAAGGTGAAATAGCCGGCGCACGCTGGGGACACGCTGGTAGCGACTCGACGCATATGGAAGATTACAACCCGATGGCACCATGCGCAGCGCCGATGATATACAGCGTGGAAAGCGCGCGTGGACATCAAACCAGAGCAGCAAGTTTCATTCCCATTGCGGTACTGGCTGGCGCGCGTCCGAACCTGGCGCGCACGCGCCATGGTTGAAGAAGGTTTCCGGCATGACGGCGGCTGGTACGACGGAGCAGCGATCCAAAATCCGGTAGCAACCGGTGAACGCGGCCCGGACAGTAGCAAATAACATTGCAATTACTGACGCTGGAGCGACTTCAGCGTCAGATACCCACCCACACCCGCGTAAAGGTCGTCATCCAGCACAACGCACCGAAGATCCACGCAACCACGCAAACCATGCAGGAATAAACATCCAGCACAAACAGTAAGACGTTTCTGTGCCTTCACACCCCCAGTTAGTAAACGACTTATGCATAACGGGTTATCAATCTGATGCGTTTTGCTGCCAGTGCAGCAAAGGCGAGAAAACTGCTGCTCTGCGCTGATAAACGCAACAACAACCGTCACAGCGCATTTGCTCCGGTGCAGCAAGACAAACAACGGCACCAGCGCGTAAAAGAGAAAATCGAGAGAAATAAGAAAGCCGCCCGTCGGTAAGCTCTGCGCCGCGCCAGCGCACCGTCCAGACCATCAAGCAGCCTGTTCAAATGACGTGATCAGCGCCGCGATACCAGCCCAGCGCCAGAAACGGTATCAGCACTGATGGCAACTTAATTAATGTTAAACCATCTGGCGTAATGCCCGGTTTATCAAGAACCCGCACGCACTACGCAACAACGGTTTAATCCGGGATGAAGATGGCGGTTCAGCACAGGCACTCTTAAATATAAAGCCTTCTGATTGAGCAACAGTGCGGATATTATGGCATTTTCGCTTATCTGCCCGTGTGTAATTTGCGAAATGATTGAGTTGTTAAGGCCATCATTGAACGTGATGGCAAATTTTACTTCTGCGCACGCCCGCCCAGAGCGATCAGGCGGGATTATGGAGTTTACGGTAAAGTTTGAGCTGGCCGAGCCAGCGGCAGGCGCGTTGTGCGTGAGTTACGCAAGAACTGGCATCGAAGCACCAGGGTGAATATGTTGCCAGCCATCAGCGAGAAGTTCGGGCGGATTATCCATCTTCATGTTCGGTATCGTACCACGGGACGTTACAGGCACATGAACACAGCGCTGTCTGGTGCTCACCGAAGAGGCGCTGCAATATCCATACCTGTCCCATTAGAGGCGTTTGTTTACGCGCCGCCAGACCAGTTGATTAGTGCAGGTTTTGTCACCGCTGGCATTGCAGCAGTATTCCTTCGGTTTTAATTACCGCCCTTCAGAATAATTTGATCCTGATAAAACGCAGCACTGAGTACAGGGTTGTAGCTGACCGCCCGCTCGAACTGAACACTTGGTGGTGTTTACCTCCACGTCCGGACGATGCGGGTTACCAGTGCACTAATGGAAATGCTAACATTACGGCGAACAATGCTCGACTCAGGAACCCTAACGTTATTGGTTCCCGCTACTGATAACGGTAGCCGCGGCAAACTTAGCGTCTGAGTTATCGCATTTGGTTATGAGATTACTCGTTATTAATTTGCTTTCCGGTCATTTTTCGTTACCGCTCATTCTTGATGGTATAGTCGAAACTGCAAAGCACATGACATAAACAACATACACAATCGTATTAATATAAGGGTTTTATATCTATGGATCAGACATATTCTGAGTCATTCCTCAACCATAGTCCAAAGCGCGACCTGAATCAAAACCGAGTTCGCGCAAGCCGTTCATAATGACCACACCGGCCTTTTCTGAACAAAATCCAAAATATCGCCAGATGTCATTACTGAGCGTCTGGTTGAACCGAGCGCGTGATCCAGCTCGCGTGGTATGGGCTGGATGATCGCAACCAGATACAGGTCAACTGCATGGCGTGCAGTCTGGCCTGCCATCGGCCTGCGCAAAGGCGGTATGCGTCATCCGTCAGTTAACCTTCCATTCAAATTCCTCGGCTTTGAAACAAAACCTTCAAAATGCCCTGGACTTCTGCCGATGGGCGGTGGTAAAGGCGGCAGCGATTTCGATCCGAAAGGAAAGCGAAGGAGCATGCGCTTTGCTGTGCGCTGATGACTGAACTACCGCCACCGGGCGCGGATACCGACACCTGGCAGATATCGGGGTTGGTGGTCGTGGCCGGTTTATGGGCGGGGATGATGAAAAGCCCACCGCCTGCGCGTCTTCACCGTAAGGCCTTTCATTTGGCGGCGTCTTATTCGCCCGGAAGCTACCGGCTACGGTCTGGTTTATCACAGAAGCAATGCTAAACGCCACGGTATGGGTTTTGAAGGGATGCGCGCTTCGTTTCTGGTTCCGGCAATGTCGCCCAGTACGCTATCGAAAAGCGAGGAATTTGGTGCCGTGCGGATCACTGCGTCAGACTCCAGCGGCAACAGTAGTTGACAGCGGATTCACGAAAGAGAAACTGGCACGTCTTATCGAAATCAAAGCCAGCCGCGATGGTCGAGTGCAGATTACGCCAAAGAATTTACCACCTATCTCGAAGGCCAACAGCCGTGGTCTCTACCGGTTGATATCGTCTGCCTTGCGCCACCCAGAATGAACTGATGTTGGACGCCGCGCATCGCTTATCGCCACGGCGTTGAAGCTGTCGCCGAAGGGCAATATGCCGACCACCATCGAAGCGACTGAACTGTTCCAGCAGGCGTACCATATTTGCACCGGGTGCTGGCTAACACGCTGCGGCGTCGCTACATCGGGCCCCGAATGGCGGCCAAACGCTGCGCGCCCGGCTGGAAAGCGAGAAAGTTGACGCACGTTTGCATCACATCATTGATATCCAGCCATGCCTGTGTTGAGCATGGTAGGAAGGTGAGCAAACCAACCATGCAGGGCGCGAACATTGCCGGTTTTGTGAAGGTTGATGCGATGCTGGCGCAGGGTGTGATTTGTTGTAATGCGCCTACGGCGCTACGCTATCAGGCCTACAAATGGGCACAATTCACAGTTACGCTCTAATGTAGGCCGCGTTGCAGCGCCCTGCAAAATTTCGGCGTTTATGAGTATTTAACGGATGATACCCCACGGAACATTTATAATAACGGCATTTACCGTAGTGCTCCCAAACTGCTTGTTGTAACGATAACACGCTTCAAGTTCAGTATCCGTTAACTTTCTGCGATACAGATATGCCAGTAAAGAACCCATTTGACTATTTTTGATAATCTTCGCTTTCGAACAACTCGTGCGCCTTTCGAGCAGCGTTATATAATGCCAGCCAGTTCTTCTTCATCGTCCTGTTTTGAAAAGCTGTGCTTGACGAGATCATCCATGATAATTCCGCCGCCCAGTATTAGCTTGCCGAGATTTACTGGAGCTATGATTAGCGCAATCAGAGATATAGTCTGAGGAAAATACAGCAAATTATTCAACAAGGCGATAAACTCTGCCGGGGCTTCCTTCCATGTTTACGGAAGGTATTGGCTCATGGTCGCCAGAAAGAAAATGCTCCATTAAGGCACAATAACTTTCGCTATCTTCGATACCCCATTGATCCTTCTAAAGACTCGCGTCTTTACTTATGACATCGACGAGTCAAAAGAAGCACATGATATTGAAGGTATCTTGCCGTGTTCAGGCTTTCGCGGCCAGAACTCCAAAGGCAGACCACGTTTATGATAGAATCGATAAGGTGATCAATTGTAGCGCCTGTAACTTCGATACTGGAGCGGCCTAACACCTTTAGCCTGGATAATAATGCAGTTGTTCTTTTTGCTTTAAACCGGCCCGAACAATAAGCCCCATCATAATTAATAGATAAAGAAAAGCATCCCGCGGTAATCAGGTCCTTCATTCAAACCGTTGATGTTATCACAGGAACACAAACACAGCGACAACACATGCTAAATAAAACTCTACCTACAAAGAAAGTATCTTATTTTCTTTAATCATCCGTTCAAAATTACTATTAAATATTTCCCAGCCATTAAAGAATACTGCTCCCGTGATGGTTTTGTAATAAACTTTTTCATCACATTCCCGTTATTACATACTTTGAGGCGATAACACTTTATCCATAAAAATATCCAACGAATTTTCTTGATCCGCTGGATATTTGCGAATATGACCGACAGTACGATCACCTGCTTACGCTATCGCCCCGCTTCCGACTTCATCTGCTGGCGGACTTTTCGCACTACGATGCGGTGCCTTTTTCTTATCAGCAAGCAATACGTAACCGGAACGAAGCAATGT\t*\tNM:i:1442\tMD:Z:8G3G1G0T0A0C14^TC1^T21^G16^G12^TAC94C0T1C7C19^GTTGC9^G24^T2^A17^C18^C9^T13^T0G8^T18^T4^A36C5^T15^T3^T10C0T8^C2^T25^C22^TTTC34^CG0C15T4T0C3C0T1C27^T13^G2^T8^TA5^T1T26^CT13^G19^G3^G11^G6^T3A0G8^CT8T3^CGCCA6^C2^CC2^GTG24^GG7^T39^GC5^A14^GT16^T8^C16^T3C25^G10^A1T35C5T1^C15A0T2C1^T6^AA1C0G3^T5A6C4^CG0C12T0A0C4C1^G2^T6^CC1G22T0C2^A30C0T7T25^A21^G0T23^AAA17^C1G0T9^GTTGCTG3T2^G0C3T22^G8^G9^G3^T23T12C5^G2^A28^TCC13^T26C12^A8T2^A0T0G31^C9^G1C6T1^C5G2^A8C21^T0C0A0T1^T1A1A11^G21^G7^A10^C25^A1T19G1^TTT14^G28T14^G1^T1C1C8^TCG26^GC5^A6T0G10A1^TC23^G49A0G2^T0A1A0G13C4^G0C1^A6^TGG13^C17T0C2T12^T0G5^T4^C8C0A0T3^T0T6^TCG1^A31^G48^T5^T8^A0T5^AG1C10C23^TT29^G30^G0G0C16^TTT14^G19^A6^AACC3^A19^G6^T7^T23^G4^G0T2^GT7^T8^T8^A1T36^C3^T22T6C11^G0T0T6^T0G53^T46C17A0A3^CTG1^TG0G0G1T24^TA4T0A0C28^T0T1C11^AA15^G12G0T0T4^T6^TGG11^G0T22T16A16^GCGC20^TG17^TAT2C26^GCA25^T10^AA11T0A23^GA1T6^AA2C56^T8^T1A5T27^GC24^C6G8C2^T1A0G10^GT3^C3^A30^TT0G0T23^G0C5^TC34^A38T16^A1T10^C2^GACTGGGTG2C13^C36^G8^AG21C19^A4^G5C12^C1C29A0A37^T1^T3C13^TCGA1A15^G12^T19^A1T5^TG8C1C33^CA7^CA20^G8^TA0A6T0C0C8T0T21^T50^C35^TTCGCCC7T1C60^C1C2^CT5C51^T15C5^A47^TTC20^GT41^C0G0G0C10^GCCA1T9T1^A2C19^C0A13^GGG10^TGT0T10^AA4T6C73^GG1T9^G14A11T11^T46^G7^T0C1C43^T1T1C3^T6T25^G2^G0T29T3^T0T28C0T5^G11^T0A0T20^TGCT2C2T0T11^T16^TT1^T1^C1^C5T26^C11^T1C16^GGCATCG21^T17^T29^A10T25^GCTGG39C46T24C15^AG9C32^C2A1G0G0C17^A38^C7A1G0T43^C7^A17T24^AC8^TT9^T0T11T3^T2^GG1T17^G4^A1T3^C8^G5^C8A5T6^G16^TCA36^GGC1^A24T28^G25T11^T0G25^G7^C1^G1^T29^T10T0G35^A6^G4^G5^G2C12^TTAC3^A43^G3^T5^G0T1^G0G23^TG0T0G9^G5^TTT0T26^AT7^C0G0G0C17^CG0C5A0G5A0T6T1C15^T8T15^A29^C17^AA21^A4^GCC63^C1G0G0C2^T97T29^TT13^TG1T1T0T0T22^ATT29^G37^G1G0T0T15^C9^G5T9^T21T2C27^A13T27^G5^GC42^TG0G0C3^C28A40^T32^A28T0G0G0C14^AA2^T1G16^GT28^C4^G33T11^T6T56T20A0A0A26^CC1T4^CT19^A7C9C29^A10^T13^ACG2A23^A12^AA11^A2G17^T8^A0A0G0C9C1T4^A66G0G0T12^T0G0C0C27^T0G9^T5T0T2C52^T3^C19^T10T0A0A13^A57^GCAGC70T29^G28^G26^AC48A0G11^G1T6T0C36^AC0A1C35T40^TTC1T5^G28^T3C2^TAT10T2A1T27T1C5C2C16^A5^TTA0T1T0A12^G4^G6T7^T1T18^A1T13^AA93^T51^A18^G11^C18^T15^GGTTA1T0C7A4^A10^TG8^CG56C1T2C9^A8^CCAG3^CC1G0C7^T19^A1T3^GC1^A38^ATC12^CG10^CT47^AACA9C0T8^CA25C1^GAG0C5^CG1C8A1^C0C0C1C53^G1T18^G17C0T12^C15^T33^T28A0T1^A9^A7G0C0C21^A15^A3^C28^G7G1^CGGT8C4C2A0T1^AA2T14G15^G7^G12A1^TG0T32^T3^G22^C0C1G1^CAC3^C0C2^AC1T27^T2^G5^G14^T20^G0C1^GGC0C4^CTGACAT3^AT0T13^ATG1^C20C0G9^TA1^G10^T1^A28C11^C13^T34C2C6T6C0T12^TC3C3^AC21^ATA10^G7^A1T15T1C23^CAG5^T2^T12C0T1T19^GG1T3^A4^T36^CT21^TG5^TT6^TT1^C6^T1A20^A7^A24^AG13^AT37^CTG22^A11C29^GTGA1^G12^T1^T9^T36^G17^G15^T0T15T27^CG8^GT6^T2A1C0T12C1T0A15^CT1^C16^T6^TC29^A14^AC48^AA10^TGA2^T0G5^T0T5C0A0G16^TG2T7^T16^G0T0T1C4^GGT22^AA1T3^C18^A6^TCT2^AACAAT18^G4^G18^A40^TT16^A34T2^C9C8C19^A8^T12^T4T27T5^TGA3A65^G21^GG0T1^T0G0G0T44^C0C26^G19^A9^T0A1T6A4C9^G4^A38^GCAG24^AAA8T3^G1T19T0G1^A6A1A14^T0G10^C33^GC1^G30G1^T18^TA1G35^GCC28^AA5^A6T0G1T10^T28^TTG23G1^CAA0G0C6^CC3G10^A27^GC0T15C1T2^GGGCC8^TTC4T11^A11C26C21^GCAG19^AT12^TT10^TCT32^AGA3^A3A13^G14^A2T4C21^TAT39^C5^G7C38G3A8^T19C2^G3T18G1^TT0T0A12^C87^T11T5^T10^G15^G9^T5A0G27G0C1^TT0T7^T0T1C21^GC23^TC15T0C2T30^TT10^A47^AG20^CC2T15^G9G1T1^C10^ATT16T9^C1C1C2T7C2C2^GGC54^A10^TCTC6A0G14^A4^T17^T15^A10T9^GCAAA39C0A9^T2^A1T2C4^AAGAT4T0C43^TTT13^TTT1C5^GCA18^C0T1^C2C0C3G0C5G1C6\tAS:i:8793\tXS:i:0")
if __name__ == '__main__':
unittest.main()
| 437.3 | 17,851 | 0.848578 |
3 D 1 M 19 I 3 M 24 D 1 M 3 D 4 M 15 D 1 M 5 D 1 M 4 D 1 M 6 D 1 M 19 I 1 M 28 D 1 M 29 D 1 M 1 D 1 M 1 D 1 M 7 D 1 M 26 D 1 M 37 D 1 M 13 I 3 M 40 D 1 M 1 D 3 M 36 D 3 M 16 D 1 M 21 D 1 M 5 D 1 M 8 D 1 M 5 D 1 M 4 D 1 M 19 D 2 M 2 D 1 M 16 D 1 M 9 D 2 M 8 D 2 M 16 I 2 M 26 D 1 M 7 D 1 M 54 D 1 M 38 D 1 M 24 D 1 M 19 I 3 M 23 D 2 M 18 I 1 M 109 D 5 M 36 D 1 M 29 D 1 M 17 D 1 M 21 D 7 M 7 I 1 M 11 D 1 M 11 D 1 M 32 D 1 M 1 D 1 M 1 D 1 M 1 D 2 M 16 D 1 M 18 D 4 M 22 D 1 M 11 D 1 M 36 D 1 M 29 I 1 M 5 D 1 M 2 D 1 M 32 D 1 M 7 D 1 M 46 D 1 M 7 D 1 M 46 D 1 M 24 I 1 M 14 D 1 M 11 D 2 M 85 D 2 M 11 D 3 M 10 D 3 M 10 I 1 M 4 D 1 M 22 D 1 M 13 D 4 M 13 D 1 M 41 D 2 M 20 D 3 M 47 D 1 M 21 D 1 M 20 I 1 M 37 D 2 M 4 D 1 M 70 D 7 M 35 D 1 M 50 D 1 M 41 D 2 M 8 D 1 M 20 D 2 M 7 D 2 M 44 D 2 M 7 D 1 M 19 D 1 M 12 D 1 M 17 D 4 M 17 D 1 M 1 D 1 M 40 I 1 M 30 D 1 M 13 I 1 M 5 D 1 M 4 D 1 M 41 D 2 M 8 D 1 M 36 D 1 M 16 D 9 M 2 D 1 M 12 D 1 M 18 I 1 M 37 D 1 M 13 I 3 M 21 D 2 M 6 D 1 M 25 D 2 M 30 D 1 M 3 D 1 M 3 D 2 M 13 D 1 M 12 I 1 M 6 D 1 M 24 D 2 M 5 I 2 M 13 I 1 M 17 D 1 M 8 D 1 M 13 I 1 M 46 D 2 M 8 D 2 M 25 I 1 M 11 D 2 M 10 D 1 M 25 D 3 M 29 D 3 M 17 D 2 M 20 D 4 M 57 D 1 M 11 D 3 M 6 D 1 M 19 D 1 M 15 D 2 M 14 D 1 M 35 D 2 M 28 D 2 M 1 D 3 M 69 D 1 M 54 D 1 M 8 D 1 M 41 D 1 M 3 D 1 M 38 D 1 M 8 D 1 M 8 D 1 M 7 D 2 M 3 D 1 M 4 D 1 M 23 D 1 M 7 D 1 M 6 D 1 M 19 D 1 M 3 D 4 M 6 D 1 M 19 D 1 M 14 D 3 M 18 D 1 M 30 D 1 M 29 D 2 M 25 I 1 M 11 D 2 M 6 D 1 M 8 D 1 M 5 D 1 M 48 D 1 M 31 D 1 M 1 D 3 M 7 D 1 M 14 D 1 M 4 D 1 M 6 D 1 M 34 D 1 M 13 D 3 M 6 D 1 M 2 D 1 M 5 I 1 M 17 D 1 M 47 I 1 M 6 D 1 M 23 D 2 M 20 D 1 M 5 D 2 M 26 D 3 M 12 D 1 M 1 D 1 M 43 D 1 M 14 D 3 M 23 D 1 M 25 D 1 M 10 D 1 M 7 D 1 M 21 D 1 M 15 D 1 M 4 D 1 M 30 D 1 M 8 D 1 M 10 D 1 M 9 D 1 M 33 D 1 M 11 D 1 M 39 D 1 M 13 D 3 M 28 D 1 M 2 D 1 M 6 I 2 M 14 I 1 M 22 D 1 M 3 D 1 M 9 D 1 M 8 D 1 M 27 D 1 M 6 D 7 M 12 D 1 M 17 D 3 M 24 D 1 M 14 I 1 M 7 D 1 M 26 I 1 M 3 I 1 M 30 I 1 M 6 D 1 M 28 D 2 M 6 D 1 M 2 D 1 M 22 D 2 M 17 D 1 M 6 D 2 M 6 D 1 M 21 D 1 M 20 I 1 M 25 D 1 M 10 D 1 M 29 D 1 M 16 D 1 M 8 D 1 M 16 D 2 M 14 D 1 M 5 D 2 M 7 I 1 M 32 D 1 M 7 D 2 M 24 D 3 M 2 D 2 M 2 D 1 M 6 D 5 M 12 D 2 M 13 D 1 M 6 D 1 M 11 D 1 M 3 D 1 M 9 I 1 M 10 D 1 M 13 D 2 M 28 D 1 M 5 D 2 M 8 D 1 M 2 D 1 M 13 D 1 M 15 I 1 M 42 D 2 M 34 D 4 M 22 D 1 M 25 D 1 M 2 D 1 M 20 D 1 M 3 D 1 M 15 D 1 M 42 D 1 M 4 D 1 M 18 D 1 M 9 D 1 M 13 D 1 M 9 D 1 M 18 D 1 M 17 D 1 M 2 D 1 M 24 D 1 M 9 D 5 M 20 I 2 M 105 D 3 M 12 D 1 M 16 D 1 M 21 D 1 M 1 D 2 M 25 I 1 M 3 I 1 M 4")
def test_create_sam_from_guideAlignment(self):
read_name = "9e4d14b1-8167-44ef-9fdb-5c29dd0763fd"
correct_segment, n_aligned_segments, reference_name = \
get_aligned_segment_from_alignment_file(self.signal_file_guide_alignment, read_name=read_name)
self.assertEqual(reference_name, "gi_ecoli")
self.assertEqual(n_aligned_segments, 1)
self.assertEqual(correct_segment.tostring(), "9e4d14b1-8167-44ef-9fdb-5c29dd0763fd_Basecall_Alignment_template:1D_000:template\t16\tgi_ecoli\t1832931\t60\t4M1I3M1I25M2D1M1D21M1D16M1D12M3D105M2I20M5D9M1D24M1D2M1D17M1D18M1D9M1D13M1D9M1D18M1D4M1D42M1D15M1D3M1D20M1D2M1D25M1D22M4D34M2D42M1I15M1D13M1D2M1D8M2D5M1D28M2D13M1D10M1I9M1D3M1D11M1D6M1D13M2D12M5D6M1D2M2D2M3D24M2D7M1D32M1I7M2D5M1D14M2D16M1D8M1D16M1D29M1D10M1D25M1I20M1D21M1D6M2D6M1D17M2D22M1D2M1D6M2D28M1D6M1I30M1I3M1I26M1D7M1I14M1D24M3D17M1D12M7D6M1D27M1D8M1D9M1D3M1D22M1I14M2I6M1D2M1D28M3D13M1D39M1D11M1D33M1D9M1D10M1D8M1D30M1D4M1D15M1D21M1D7M1D10M1D25M1D23M3D14M1D43M1D1M1D12M3D26M2D5M1D20M2D23M1D6M1I47M1D17M1I5M1D2M1D6M3D13M1D34M1D6M1D4M1D14M1D7M3D1M1D31M1D48M1D5M1D8M1D6M2D11M1I25M2D29M1D30M1D18M3D14M1D19M1D6M4D3M1D19M1D6M1D7M1D23M1D4M1D3M2D7M1D8M1D8M1D38M1D3M1D41M1D8M1D54M1D69M3D1M2D28M2D35M1D14M2D15M1D19M1D6M3D11M1D57M4D20M2D17M3D29M3D25M1D10M2D11M1I25M2D8M2D46M1I13M1D8M1D17M1I13M2I5M2D24M1D6M1I12M1D13M2D3M1D3M1D30M2D25M1D6M2D21M3I13M1D37M1I18M1D12M1D2M9D16M1D36M1D8M2D41M1D4M1D5M1I13M1D30M1I40M1D1M1D17M4D17M1D12M1D19M1D7M2D44M2D7M2D20M1D8M2D41M1D50M1D35M7D70M1D4M2D37M1I20M1D21M1D47M3D20M2D41M1D13M4D13M1D22M1D4M1I10M3D10M3D11M2D85M2D11M1D14M1I24M1D46M1D7M1D46M1D7M1D32M1D2M1D5M1I29M1D36M1D11M1D22M4D18M1D16M2D1M1D1M1D1M1D32M1D11M1D11M1I7M7D21M1D17M1D29M1D36M5D109M1I18M2D23M3I19M1D24M1D38M1D54M1D7M1D26M2I16M2D8M2D9M1D16M1D2M2D19M1D4M1D5M1D8M1D5M1D21M1D16M3D36M3D1M1D40M3I13M1D37M1D26M1D7M1D1M1D1M1D29M1D28M1I19M1D6M1D4M1D5M1D15M4D3M1D24M3I19M1D3M1D5M1D2M1D24M2D11M1D5M3D27M2D7M1D20M2D39M1D24M1D29M1D17M2D21M1D4M3D63M1D6M1D90M1I37M2D13M2D28M3D16M1I13M1D37M1D19M1D9M1D15M1D14M2I38M1D41M1D5M2D42M2D5M1D28M1I41M1D32M1D8M1I27M1I11M2D2M1D18M2D28M1D4M1D32M1I13M1D63M2I25M1I25M2D6M2D19M1D7M1I12M2I1M1I27M1D10M1D13M3D5M3I21M1D12M2D11M1D20M1D8M1D19M1D81M1D30M1D10M1D54M2I8M1D3M1D19M1D26M1D41M2I16M5D58M1I10M2I32M1D28M1D26M2D28M1I2M1I31M1D46M2D9M1I70M3D7M1D28M1D6M3D12M3I30M1I29M1D5M3D16M1D4M1D14M1D20M1D15M2D93M1D51M1D18M1D11M1D18M1D15M5D15M1D10M2D8M2D16M2I39M1I16M1D8M4D3M2D10M1D19M1D5M2D1M1D38M3D12M2D10M2D47M4D7M2I12M2D27M3D6M2D12M1D57M1D20M1D31M1D15M1D33M1D31M1D9M1D31M1D9M1I2M1I4M1D3M1D28M1D9M4D7M1I12M2D16M1I17M1D7M1D14M2D33M1D3M1D22M1D4M3D3M1D3M2D29M1D2M1D5M1D14M1D20M1D2M3D5M7D3M2D14M3D1M1D31M2D1M1D10M1D1M1D40M1D13M1D12M1I25M1I28M2D7M2D21M3D10M1D7M1D43M3D5M1D2M1D10M1I25M2D5M1D4M1D36M2D21M2D5M2D6M2D1M1D6M1D22M1D7M1D24M2D13M2D37M3D18M1I4M1D17M1I24M4D1M1D12M1D1M1D9M1D36M1D17M1D15M1D15M1I29M2D8M2D6M1D37M2D1M1D16M1D6M2D18M1I3M1I8M1D10M1I4M2D48M2D10M3D2M1D6M1D25M2D10M1D16M1D8M3D22M2D5M1D5M1I13M1D6M3D2M6D7M2I11M1D4M1D18M1D40M2D16M1D37M1D38M1D8M1D12M1D4M1I26M1I8M3D69M1D21M2D2M1D47M1D27M1D6M1I13M1D9M1D24M1D4M1D38M4D10M2I14M3D2M1I5M2I5M1D24M1D6M1I17M1D11M1D33M2D1M1D8M1I24M1D18M2D37M3D28M2D5M1D5M1I15M1D28M3D25M3D8M2D14M1D27M2D21M5D8M3D16M1D60M4D19M2D12M2D10M3D32M3D3M1D17M1D14M1D29M3D30M1I9M1D5M1D59M1D22M1D2M1I11M1I11M2D14M1D71M1I16M1D17M1D10M1D15M1D9M1D37M2D8M1D24M2D23M2D8M1I23M1I19M2D10M1D47M2D20M2D18M1D13M1D10M3D26M1D14M1I6M3D54M1D10M4D22M1D4M1D17M1D15M1D9M2I11M5D50M1D2M1D9M5D49M3D13M3D7M3D18M1D2M1D6M1I12M2I5M1S\t*\t0\t0\tTAAGCCGCATTGCTTCAGGTCGGTAAATGCTCTTCGCCGGAAGAGCGCGAATGGATGACAGGCTGATGAGCTGGGGTTGGTCGACTTCCGCCATGCGAATCCGCAAACAGCAGATCGTTTCTCATGGTTTGATTACCGCTCAAAAGGTTTTGACGATAACCGTGGTCTGCGCATCGACTCGTTGCTCGCTGTAGCCAACCGCTGGCAGAATGTAGAAACCGCATCGACTATGAAATCCGCAGCAGGAAAACCGTCCGATCACGCCCCGTCTGGGCGACCTTCGCCGCTAATTAGCAGCTCTCCCGCTCAAACGGGTCAGGAGAATTAACCTGAGAAAATCAACAAACTGTCAGTAATGATTTGTTGCCTGTCGTCCTTGTTATACCGTCTCGCGTTTTAGTTGTTCGACCACTTTCATTATCAAGTTTGATATAGGAAACTCACGATGAACGCTGAGCGTAAATTTTTGCCTGTCTTATTTTTGCGCTGGTCATTTATTATCCACGCTTTCGGCTTATCTGATTCGTTCACCGATTTACTCCCACTTACAGACACCATCCGCCAGAGCGATTTTCGGCTAGCCCCATATTCTGTTATTCATCATTGCCACCCTTGCTGTTACCAGAAGCATACTGGGTGATCGCGGAGAATAGTTTTTGCCCGCCTTTAGGACACTACTCATTAACTGCCGCTGGCTTCCTCATTCCTGCTGGCGCGCTGGCTGACGTGATTACTGCTGAAATACGTTGGCCATAGCAATACTCTTCCAGCATTGAAAAGGCATTGCGCAACGGTATTGATTTTCTATTCTGACCGCTTAATCCCGTTGTTCTTTACAATATTCAAAATTACGCTTACGATTAACCACACCGCCTTCTGGCCTTATACCCTTATTTTCGGCACTCAAGACCCCACCGGTATTGTTATTTGCACAGGATGGCGTTATCCGCCACTGAAGGTATTATTGCGCTTTATTTGTTAACTGTTCGGCGGGTCGCGCTGTTTATTCTCGTCCAGCCTGCAAACTCATACGCCCGACACAAACATGTGGATTCGTCTGGCTCCCGCGCCGCAGCCCACTTACTCACCCAAAAATGCAAGGATAGAACGATATGCAACATTATTCAGTGTCATGGAAAGGACTGGCTGCACTTCGTTACTGGCTGGCCTATGGCCGCGATCAACAAGAGAATGCGGCGCAAAAGTGAATACGACGACTTCGAACAGCCAACCGCTGCGTGGCCGATGCCAATAAATTCATACGTACCATGCTGGTGCAAATTAATGGTCGTTTCACCGACGACACCGTCACGGTATTGTGTTTAAAGATGGTTCCAACGGACATAATCGCTGCTTGCGGTTATGCGACCCCGAAAGCATTTTATGAAGCCTGAAAGAGTAGGTGGCAGCCGGCCGAAACATGATGATGGATAATAAAGAAACGACGGCGCTCTGGCAGCAAACTGATATTTCGGTCAACTGGCAAGGGCGGCAAAGCGTATTCTTCGATGAAGTGATTGTTGACAGTACGGCAAGAAACTGGACATGCACGGCGGTAATTTAACGCAGCAGAAGAGAAGAAAACGGGTTGCCCGGTGTGTCTGGATACGTCTGGTCGGCATCAGCAATGCAACATACACTTATGGTGGTTGAAAACGGTGTGAAGTTAACTAAAGGCAATGCCTCAGTTCTCCCGCGGATAAACACGCTGGCAACGGTTACCTTTAAAATCGCCGAATAAAGCCGTGACAGCGATGATGATGATGATAATCAGAAAATCTACTACCGTATAACCTCATCATCCTGTTGTCTGCCATGCTGCTGGCAAGGCGCGCTTCCGGCGTCTGCGAGCTATCAACGCGTTGCGGCGTTTGCCGCCGTGGACCAACAGGTATAGAACGCTTTATTCAGTCTTACGGCGCACTGGCGGCAGTTGTCCGTTCTGTTGATGGTTTGCGTCATTGCTGCAATCGCTACCTGCGTTTTTGATCACCTGCCAATGCGTCGCTGTTTGGCGCGTTCTGGGCGGCTTGCTGTCGTGGACCAGTTCGATATCGGCGCGGCGCTGTGCTTTATCGCCAGAGTATGGGCCGCGAAGTGGTGGAAAATTGGCAAACCGTGCTTGACAGTATGACGGCTTTTCACCGCTACGGCAAACACACCATTCTGTCTACGTATTGCCTTTGTCCCTTCGATCCACCAGCTATGCTGCCGGTTTGACTTCAATACGTTTTCGTCGTTTTTATCGCCACCGGGCTTGGCCAGTTATCGGCAACTATTACTATTCCCGGCGGGCAGCATGTTAACAGGCGGTACTTTCTGGTTTGTCACCGGACTGTTTATCTGTTTGCCCTGACCGTGGTGATTTTTATGGCGAAGAAAATATGGGTTGAACGCCAGAAGAGGGCTGCACATATACCGCCGCTTAGCAAAATTCCTTATTTATTGTCCACAGGCGTGGCTGCATCGTCGCCACATGGCGAGGTGCTGCCCCATTCGCTGGTGGGGCGGATCCCTGCTATCTTTATCTGTCGATGTTTATGGCTGGCTGGAGCGCAAACGCCCACCGCTCGATCCGGTGGTACGATCGCTTGTCAGCGCATTGCGCAAATGTGCCTGTGAGTTTTGTGTGGACATCAGTATGAAAGTCGCCGAGCGCACCGGCAGCGATAAACTGCTGGCAGTGGCGACTGGCGGCAGCCCGCTCTTGGGGCGATGAAGAACGGCTGGCGCTGGCACGCCGGCTGCAAGCGTAACGCCGCCAACGGTCGATGATGCCCTGCGTACCCGGACTGGCTGCGCATTTGACGCCTGGCGCCCACCGAACTGGACGGCATTGATCTTGGCCTAAAATCTGTCAGCCCGTTTTAATTTGCCATATGACATTCCTGCCTAGGGCTGTGCCATTCTGAAAACGTTCTTAAGGAGAGATGATGCGCCACAGGGTGGTTGCTGGGATTGTTATCATGTTTTCTGGCAACACATGCCAGTGACTTCTGGCAAGAAATTAAAATGAGGCCAAAGGGCAAACCGTCTGGTTTAACGCTCCGGGGCGGCGATACCGCACTAACCGCTATTCAGAGGCGAGATGAAAACCATTACGCTATAAACCTGAAGATTGTCCGTCTGGCGATGCCGCACGCGGTGAAGCGCATTCAGATCGAAGCCGCAGCCGGACGTAAACGGCGGTTTCGGTGGATCTGTTTGGGTGAACGGCGAAAACTTCCGCACCTGTGCAAGAGGCCAATTTATTACAAACGGGCTGGGCGGAGACCGCCTAACTGGCGCTATGCTCACAGCTGCCGGTGCGGAAGATTTTTCGTGCCGACACAAGGTGCGGACCGCCCGGGCGGCGAATAACTGACGTTTATCGCCCGCCGCGATGTTACGCCAGCCACCAAACGCCGCAAGCCTTACTGAGTTTGCCAGCCAACTTCGGCACGGCCACCTATCCGCGCCCACCGGACTTACCGGCACGGCGTTTCTTGAACAGTTGCTGATTATGCTGACGCCCGATCCGCCGCATTAAAAGAAGCGCCGGACGATGCGACTGTGTCACCGTTCCCTTGTGGCAATATCTTGATGTGCTGCATCCGTATTTGTGGCGCGAAGGAAAGGATTTCTGCTCACCTGCGCGGATGGATGCTCTGCTGAAAGCCGGCAGCATTGCGCCTGTCGCTGACCTTAACCCCGCGCATGTGCAGCAAAAATCGCCAGCGGCGATTTGCCTGCAAGCAGTTACAGTTTTGGCTGCGAGGGGATGATTGGCAACGCATTTCGTCACCATTCCTGCCAACGCGAATGCCAGTGCTGTATGAAGGTAGTTACTTCCTGCTCACCCGGATGCGCAACTGCGTAAAGTGATCCCCGCTGTCTGCGATCCTTCACTCGATCCGCAAACCGCCTGATGGGCAGCGCGAATCATTGCAATCAAGAATGCCGCAGGATCTGCCGCCGGTACTGGCTGAACCGCACGCAGGTTGCAAATGCGCTGAACAAGAATGGCTGCCACCGTTACGGCACGCATTAATCTTTTGCTGTGGGCGATGGTGGCGGTGATTTATGCACCGCTGATCCCGCAGCTCAATGCTGATATCGCCTGCTTTGTCGTTGACACACTGGCAGGCGTTATCGTCGACCACAGCTACCGCAGGCATTACTGGCAACGCTGTACGACAAACCATCGCGGCGGTCGGGGCATTGCTGACGCCCTGCTGGTGATTGTGGCGCTGTGGCTCGGGCCAAATGGCAGCGGCGTGCGCCCGTCTGCCGTGGCCGTCACACCCCATGTGGCTTTGCCACCAGCGCCCCGTTTTGCCGACGGAGGGCTGCTTTATGACTATTTCCGTATTTCACCTGCCAATGGACCCGATTTGTCTGGGCCTCACCCTTGCAGGAAAGAAAGCGCATTTCGCTGTGGATCTTAGCAGCAGTATTGAGCGAAAATGGCTGCTGCAGCAGGTCATTGTGCTGGATTCGCTACAGCCGCTGGCAATGCCTGAACTGGCTGCTGTTGCTCTCCGTCGCGCCTGCGCTGGCAATGGCGATGCTGGCGATTGTTGCCCGGTCGCTGTCGGTCGTGGATGTGGGTAATTATTCTCGGGCCGTAATCCCCTGACGCTGGCGGTAATTATTAGCTGGCAGTGGTTAACCTGTATGACATTGACCAACAAACAAAGGCGCGCTTGCCAGCCTGCTGTTGATGCTGTTACTGCCGCCTGCCGTTTGCTGAGCTATCTGCTATGGCGCAGCTGGCGGCGCACTATTCCCGCGTGATGGCGTTCGCAAGCCCGCCACGCCTTTTTATTGCCGGGCAATGCTGGCGATTTTTACCCCAACCGGTGTGCCGTGGTTCCTGCTGGCGATCCTCGCGATCGACGAGATCAATATGAAGGCTCATCACCAGCCCGACAATGGGCTGGTGGCGACATTCGCTTTGCTCCTGTTACTGCTGTGGCTGGAATGGGCCAGCGTCGCCAGTTGTGGCTATGGCTGCCCATTTTATTACCCGCTGCTCTGCCACTGTGGCGGGCCAGTACACGCTGGCGCCATGGCTGAAACCGATGGAAGCTGGACGGCGGTGGTCTGGGGCATTCGTGGGTGATGCCGTGGATGCTGTTTATCCGCAACCCGCCCCGCAGCGCATTGATTCATCGGTTAATTTTGATTGCGCAACACTGGCTGTCGCGGTCAAAATCTTCTTGTGAATGCCCACTCATGTTGCGCCCTGTGCTGCTGATTGCCTTCGCGGTGGATTGCAACATATTGCGCAGTATATGCCAACGCCAGCTGGGCGCGGGCGCCCGACGCTCACCACTGAGGCGGTGGCTAAGCAGTAAGGCAGCAACGGTATTCTTCGCCCGTGCTTTGCGGCAACCGTTATTACCGCTTATTATTTTGCCCCGACCGCGTTAGTCGCAAATGGGTAGGTTATGTCAGACAAGGACTCGCTAATGCTCTGCGTGAAATGTTTCGCTACGTTTACCGAAAGCTTGCTGACAAACGTTAACTTTACGGTGGATAAAGGTGACATTGTCACGTTAATGGGGCCGTTCATTGGGAAAATCCACTCTGTTTTCATGGATGATTGGTGCACTGGCCGAACAGTTTTCTTGTACAGGTGAGCTATGGCTCAATGAGCAACGGATTGGACATCCCACCCACCGCACAGCGTCAGATTGGCATTCTTTCAGGATGCACTAAGACGACCAGTTCAGTGTCGGGCAAATACTGCTGGCGCTACCTGGCGACACTTAAAGGAATGCCCGACGTAATGCCGTGAATGATGCACTTGACAACCAGGCCTTGAGGGAGTTTCCATCAGATCCCGCCACTTTGCTGGCGGTCAGCGAGCGCGCGCGCTGTTCTACTACGCGCCCTTCTCGCCCAACCAAAGCGTTACTCCCGGATGAGCCATTCAGCCGTCTTGATGTGCTCTGCGATAATTTTCGCCAGTGGGTGTTCAGCGAAGTTCGCGCCCCAGATCCCGTCGTTCAGGTAACGCACGATCTCCGTGGATGTTCCTGCTGATAGTTCTGTTCTGGATATGGCGCAGGGTCAGAAAATTACAACAAACTGCGATAACGCAAGTTTTTCCTCAATGCGTCAGTTCAGAAATATGCACCTCAAAACTACTGCCGGATTTTCGATGAAACGTTTCTCAAATGACCGCGCTGGCAATGGTTTAGGCTGGCTTGCGCTTCTTCGTGGGCCGCTGAATCCGGCGAAGCCTCTACACTGGACCAGCTTCAACAACAAAATGGCAAAGCGATAGATACTCGCCCCAGCGCGTTTTAGGCAACGGCTGGCCACAAACCTTGTGTCGGCCCTTCTGGTCATGAACTTGCCGTCAAACCTCTGCCAGCTGGCTTGACAAATGAGTAACCGAACAGTTAACGAACGCGTGGATCAAGCAACATAACCTGAAACCGATGCCCGGTGGCGCTGTGTCATGTCGACAAAGATGTCGACGCCGTCAAACGCGACTGCAAAGCAGGTTTACCCATATCTCCATCCTGAGGACGCGCTGTTGAACCTTCCTGCCTGCAAAACTGCCGCATTTTGAGCAGCTGGTTTATCCGCAATGGCTGCACGACCTGCAACAAGGTAAAGATAATACGGCGAAACCCATGGTGACTGGAAAGTCATTGAAGCGGCCCGGGCGCTCCAAGCTGCACTTTATCAGCCATATTCCCGGCGCTGACTACATCGATACCAACGAAGCGTGGAAAGGAACGCTGTGGAACAAAGTTTCGATGAACAACCGCAAGCGATGCTGGCAAACACGGCATTCGCCATGACACCACGGTCATTCTGTATGGGCGCGTGACGTATACGCTGCGTGTGGCGCAGATTATGCTTTATGCTGGCGTGAAAGATGTGCGCCTGCTGGATGGCGGGCTGGCAAACCCCGGGTCCGACGCGGGACTGCCTGTTGAGCGCGAACGCCACCGAAAGTGAAAGCGGAACCGATTTCGGCGTGAAGATCCCGGCACACGCAGTTGATGCTCGATATGGAACAGGCTCCGGTGGACTGCTGCATCGCCGTGATGCATCGCTGCGAGCATCTGTTCGTGGCCAGAATTTATCGGTACGACCAGCGGTTTGTTATATTAAAACCAAAAGGTGAAATAGCCGGCGCACGCTGGGGACACGCTGGTAGCGACTCGACGCATATGGAAGATTACAACCCGATGGCACCATGCGCAGCGCCGATGATATACAGCGTGGAAAGCGCGCGTGGACATCAAACCAGAGCAGCAAGTTTCATTCCCATTGCGGTACTGGCTGGCGCGCGTCCGAACCTGGCGCGCACGCGCCATGGTTGAAGAAGGTTTCCGGCATGACGGCGGCTGGTACGACGGAGCAGCGATCCAAAATCCGGTAGCAACCGGTGAACGCGGCCCGGACAGTAGCAAATAACATTGCAATTACTGACGCTGGAGCGACTTCAGCGTCAGATACCCACCCACACCCGCGTAAAGGTCGTCATCCAGCACAACGCACCGAAGATCCACGCAACCACGCAAACCATGCAGGAATAAACATCCAGCACAAACAGTAAGACGTTTCTGTGCCTTCACACCCCCAGTTAGTAAACGACTTATGCATAACGGGTTATCAATCTGATGCGTTTTGCTGCCAGTGCAGCAAAGGCGAGAAAACTGCTGCTCTGCGCTGATAAACGCAACAACAACCGTCACAGCGCATTTGCTCCGGTGCAGCAAGACAAACAACGGCACCAGCGCGTAAAAGAGAAAATCGAGAGAAATAAGAAAGCCGCCCGTCGGTAAGCTCTGCGCCGCGCCAGCGCACCGTCCAGACCATCAAGCAGCCTGTTCAAATGACGTGATCAGCGCCGCGATACCAGCCCAGCGCCAGAAACGGTATCAGCACTGATGGCAACTTAATTAATGTTAAACCATCTGGCGTAATGCCCGGTTTATCAAGAACCCGCACGCACTACGCAACAACGGTTTAATCCGGGATGAAGATGGCGGTTCAGCACAGGCACTCTTAAATATAAAGCCTTCTGATTGAGCAACAGTGCGGATATTATGGCATTTTCGCTTATCTGCCCGTGTGTAATTTGCGAAATGATTGAGTTGTTAAGGCCATCATTGAACGTGATGGCAAATTTTACTTCTGCGCACGCCCGCCCAGAGCGATCAGGCGGGATTATGGAGTTTACGGTAAAGTTTGAGCTGGCCGAGCCAGCGGCAGGCGCGTTGTGCGTGAGTTACGCAAGAACTGGCATCGAAGCACCAGGGTGAATATGTTGCCAGCCATCAGCGAGAAGTTCGGGCGGATTATCCATCTTCATGTTCGGTATCGTACCACGGGACGTTACAGGCACATGAACACAGCGCTGTCTGGTGCTCACCGAAGAGGCGCTGCAATATCCATACCTGTCCCATTAGAGGCGTTTGTTTACGCGCCGCCAGACCAGTTGATTAGTGCAGGTTTTGTCACCGCTGGCATTGCAGCAGTATTCCTTCGGTTTTAATTACCGCCCTTCAGAATAATTTGATCCTGATAAAACGCAGCACTGAGTACAGGGTTGTAGCTGACCGCCCGCTCGAACTGAACACTTGGTGGTGTTTACCTCCACGTCCGGACGATGCGGGTTACCAGTGCACTAATGGAAATGCTAACATTACGGCGAACAATGCTCGACTCAGGAACCCTAACGTTATTGGTTCCCGCTACTGATAACGGTAGCCGCGGCAAACTTAGCGTCTGAGTTATCGCATTTGGTTATGAGATTACTCGTTATTAATTTGCTTTCCGGTCATTTTTCGTTACCGCTCATTCTTGATGGTATAGTCGAAACTGCAAAGCACATGACATAAACAACATACACAATCGTATTAATATAAGGGTTTTATATCTATGGATCAGACATATTCTGAGTCATTCCTCAACCATAGTCCAAAGCGCGACCTGAATCAAAACCGAGTTCGCGCAAGCCGTTCATAATGACCACACCGGCCTTTTCTGAACAAAATCCAAAATATCGCCAGATGTCATTACTGAGCGTCTGGTTGAACCGAGCGCGTGATCCAGCTCGCGTGGTATGGGCTGGATGATCGCAACCAGATACAGGTCAACTGCATGGCGTGCAGTCTGGCCTGCCATCGGCCTGCGCAAAGGCGGTATGCGTCATCCGTCAGTTAACCTTCCATTCAAATTCCTCGGCTTTGAAACAAAACCTTCAAAATGCCCTGGACTTCTGCCGATGGGCGGTGGTAAAGGCGGCAGCGATTTCGATCCGAAAGGAAAGCGAAGGAGCATGCGCTTTGCTGTGCGCTGATGACTGAACTACCGCCACCGGGCGCGGATACCGACACCTGGCAGATATCGGGGTTGGTGGTCGTGGCCGGTTTATGGGCGGGGATGATGAAAAGCCCACCGCCTGCGCGTCTTCACCGTAAGGCCTTTCATTTGGCGGCGTCTTATTCGCCCGGAAGCTACCGGCTACGGTCTGGTTTATCACAGAAGCAATGCTAAACGCCACGGTATGGGTTTTGAAGGGATGCGCGCTTCGTTTCTGGTTCCGGCAATGTCGCCCAGTACGCTATCGAAAAGCGAGGAATTTGGTGCCGTGCGGATCACTGCGTCAGACTCCAGCGGCAACAGTAGTTGACAGCGGATTCACGAAAGAGAAACTGGCACGTCTTATCGAAATCAAAGCCAGCCGCGATGGTCGAGTGCAGATTACGCCAAAGAATTTACCACCTATCTCGAAGGCCAACAGCCGTGGTCTCTACCGGTTGATATCGTCTGCCTTGCGCCACCCAGAATGAACTGATGTTGGACGCCGCGCATCGCTTATCGCCACGGCGTTGAAGCTGTCGCCGAAGGGCAATATGCCGACCACCATCGAAGCGACTGAACTGTTCCAGCAGGCGTACCATATTTGCACCGGGTGCTGGCTAACACGCTGCGGCGTCGCTACATCGGGCCCCGAATGGCGGCCAAACGCTGCGCGCCCGGCTGGAAAGCGAGAAAGTTGACGCACGTTTGCATCACATCATTGATATCCAGCCATGCCTGTGTTGAGCATGGTAGGAAGGTGAGCAAACCAACCATGCAGGGCGCGAACATTGCCGGTTTTGTGAAGGTTGATGCGATGCTGGCGCAGGGTGTGATTTGTTGTAATGCGCCTACGGCGCTACGCTATCAGGCCTACAAATGGGCACAATTCACAGTTACGCTCTAATGTAGGCCGCGTTGCAGCGCCCTGCAAAATTTCGGCGTTTATGAGTATTTAACGGATGATACCCCACGGAACATTTATAATAACGGCATTTACCGTAGTGCTCCCAAACTGCTTGTTGTAACGATAACACGCTTCAAGTTCAGTATCCGTTAACTTTCTGCGATACAGATATGCCAGTAAAGAACCCATTTGACTATTTTTGATAATCTTCGCTTTCGAACAACTCGTGCGCCTTTCGAGCAGCGTTATATAATGCCAGCCAGTTCTTCTTCATCGTCCTGTTTTGAAAAGCTGTGCTTGACGAGATCATCCATGATAATTCCGCCGCCCAGTATTAGCTTGCCGAGATTTACTGGAGCTATGATTAGCGCAATCAGAGATATAGTCTGAGGAAAATACAGCAAATTATTCAACAAGGCGATAAACTCTGCCGGGGCTTCCTTCCATGTTTACGGAAGGTATTGGCTCATGGTCGCCAGAAAGAAAATGCTCCATTAAGGCACAATAACTTTCGCTATCTTCGATACCCCATTGATCCTTCTAAAGACTCGCGTCTTTACTTATGACATCGACGAGTCAAAAGAAGCACATGATATTGAAGGTATCTTGCCGTGTTCAGGCTTTCGCGGCCAGAACTCCAAAGGCAGACCACGTTTATGATAGAATCGATAAGGTGATCAATTGTAGCGCCTGTAACTTCGATACTGGAGCGGCCTAACACCTTTAGCCTGGATAATAATGCAGTTGTTCTTTTTGCTTTAAACCGGCCCGAACAATAAGCCCCATCATAATTAATAGATAAAGAAAAGCATCCCGCGGTAATCAGGTCCTTCATTCAAACCGTTGATGTTATCACAGGAACACAAACACAGCGACAACACATGCTAAATAAAACTCTACCTACAAAGAAAGTATCTTATTTTCTTTAATCATCCGTTCAAAATTACTATTAAATATTTCCCAGCCATTAAAGAATACTGCTCCCGTGATGGTTTTGTAATAAACTTTTTCATCACATTCCCGTTATTACATACTTTGAGGCGATAACACTTTATCCATAAAAATATCCAACGAATTTTCTTGATCCGCTGGATATTTGCGAATATGACCGACAGTACGATCACCTGCTTACGCTATCGCCCCGCTTCCGACTTCATCTGCTGGCGGACTTTTCGCACTACGATGCGGTGCCTTTTTCTTATCAGCAAGCAATACGTAACCGGAACGAAGCAATGT\t*\tNM:i:1442\tMD:Z:8G3G1G0T0A0C14^TC1^T21^G16^G12^TAC94C0T1C7C19^GTTGC9^G24^T2^A17^C18^C9^T13^T0G8^T18^T4^A36C5^T15^T3^T10C0T8^C2^T25^C22^TTTC34^CG0C15T4T0C3C0T1C27^T13^G2^T8^TA5^T1T26^CT13^G19^G3^G11^G6^T3A0G8^CT8T3^CGCCA6^C2^CC2^GTG24^GG7^T39^GC5^A14^GT16^T8^C16^T3C25^G10^A1T35C5T1^C15A0T2C1^T6^AA1C0G3^T5A6C4^CG0C12T0A0C4C1^G2^T6^CC1G22T0C2^A30C0T7T25^A21^G0T23^AAA17^C1G0T9^GTTGCTG3T2^G0C3T22^G8^G9^G3^T23T12C5^G2^A28^TCC13^T26C12^A8T2^A0T0G31^C9^G1C6T1^C5G2^A8C21^T0C0A0T1^T1A1A11^G21^G7^A10^C25^A1T19G1^TTT14^G28T14^G1^T1C1C8^TCG26^GC5^A6T0G10A1^TC23^G49A0G2^T0A1A0G13C4^G0C1^A6^TGG13^C17T0C2T12^T0G5^T4^C8C0A0T3^T0T6^TCG1^A31^G48^T5^T8^A0T5^AG1C10C23^TT29^G30^G0G0C16^TTT14^G19^A6^AACC3^A19^G6^T7^T23^G4^G0T2^GT7^T8^T8^A1T36^C3^T22T6C11^G0T0T6^T0G53^T46C17A0A3^CTG1^TG0G0G1T24^TA4T0A0C28^T0T1C11^AA15^G12G0T0T4^T6^TGG11^G0T22T16A16^GCGC20^TG17^TAT2C26^GCA25^T10^AA11T0A23^GA1T6^AA2C56^T8^T1A5T27^GC24^C6G8C2^T1A0G10^GT3^C3^A30^TT0G0T23^G0C5^TC34^A38T16^A1T10^C2^GACTGGGTG2C13^C36^G8^AG21C19^A4^G5C12^C1C29A0A37^T1^T3C13^TCGA1A15^G12^T19^A1T5^TG8C1C33^CA7^CA20^G8^TA0A6T0C0C8T0T21^T50^C35^TTCGCCC7T1C60^C1C2^CT5C51^T15C5^A47^TTC20^GT41^C0G0G0C10^GCCA1T9T1^A2C19^C0A13^GGG10^TGT0T10^AA4T6C73^GG1T9^G14A11T11^T46^G7^T0C1C43^T1T1C3^T6T25^G2^G0T29T3^T0T28C0T5^G11^T0A0T20^TGCT2C2T0T11^T16^TT1^T1^C1^C5T26^C11^T1C16^GGCATCG21^T17^T29^A10T25^GCTGG39C46T24C15^AG9C32^C2A1G0G0C17^A38^C7A1G0T43^C7^A17T24^AC8^TT9^T0T11T3^T2^GG1T17^G4^A1T3^C8^G5^C8A5T6^G16^TCA36^GGC1^A24T28^G25T11^T0G25^G7^C1^G1^T29^T10T0G35^A6^G4^G5^G2C12^TTAC3^A43^G3^T5^G0T1^G0G23^TG0T0G9^G5^TTT0T26^AT7^C0G0G0C17^CG0C5A0G5A0T6T1C15^T8T15^A29^C17^AA21^A4^GCC63^C1G0G0C2^T97T29^TT13^TG1T1T0T0T22^ATT29^G37^G1G0T0T15^C9^G5T9^T21T2C27^A13T27^G5^GC42^TG0G0C3^C28A40^T32^A28T0G0G0C14^AA2^T1G16^GT28^C4^G33T11^T6T56T20A0A0A26^CC1T4^CT19^A7C9C29^A10^T13^ACG2A23^A12^AA11^A2G17^T8^A0A0G0C9C1T4^A66G0G0T12^T0G0C0C27^T0G9^T5T0T2C52^T3^C19^T10T0A0A13^A57^GCAGC70T29^G28^G26^AC48A0G11^G1T6T0C36^AC0A1C35T40^TTC1T5^G28^T3C2^TAT10T2A1T27T1C5C2C16^A5^TTA0T1T0A12^G4^G6T7^T1T18^A1T13^AA93^T51^A18^G11^C18^T15^GGTTA1T0C7A4^A10^TG8^CG56C1T2C9^A8^CCAG3^CC1G0C7^T19^A1T3^GC1^A38^ATC12^CG10^CT47^AACA9C0T8^CA25C1^GAG0C5^CG1C8A1^C0C0C1C53^G1T18^G17C0T12^C15^T33^T28A0T1^A9^A7G0C0C21^A15^A3^C28^G7G1^CGGT8C4C2A0T1^AA2T14G15^G7^G12A1^TG0T32^T3^G22^C0C1G1^CAC3^C0C2^AC1T27^T2^G5^G14^T20^G0C1^GGC0C4^CTGACAT3^AT0T13^ATG1^C20C0G9^TA1^G10^T1^A28C11^C13^T34C2C6T6C0T12^TC3C3^AC21^ATA10^G7^A1T15T1C23^CAG5^T2^T12C0T1T19^GG1T3^A4^T36^CT21^TG5^TT6^TT1^C6^T1A20^A7^A24^AG13^AT37^CTG22^A11C29^GTGA1^G12^T1^T9^T36^G17^G15^T0T15T27^CG8^GT6^T2A1C0T12C1T0A15^CT1^C16^T6^TC29^A14^AC48^AA10^TGA2^T0G5^T0T5C0A0G16^TG2T7^T16^G0T0T1C4^GGT22^AA1T3^C18^A6^TCT2^AACAAT18^G4^G18^A40^TT16^A34T2^C9C8C19^A8^T12^T4T27T5^TGA3A65^G21^GG0T1^T0G0G0T44^C0C26^G19^A9^T0A1T6A4C9^G4^A38^GCAG24^AAA8T3^G1T19T0G1^A6A1A14^T0G10^C33^GC1^G30G1^T18^TA1G35^GCC28^AA5^A6T0G1T10^T28^TTG23G1^CAA0G0C6^CC3G10^A27^GC0T15C1T2^GGGCC8^TTC4T11^A11C26C21^GCAG19^AT12^TT10^TCT32^AGA3^A3A13^G14^A2T4C21^TAT39^C5^G7C38G3A8^T19C2^G3T18G1^TT0T0A12^C87^T11T5^T10^G15^G9^T5A0G27G0C1^TT0T7^T0T1C21^GC23^TC15T0C2T30^TT10^A47^AG20^CC2T15^G9G1T1^C10^ATT16T9^C1C1C2T7C2C2^GGC54^A10^TCTC6A0G14^A4^T17^T15^A10T9^GCAAA39C0A9^T2^A1T2C4^AAGAT4T0C43^TTT13^TTT1C5^GCA18^C0T1^C2C0C3G0C5G1C6\tAS:i:8793\tXS:i:0")
if __name__ == '__main__':
unittest.main()
| true | true |
f71e13789cc4145d2f9206efb237dec6960ee37f | 172 | py | Python | tests/test.py | f-stop-lang/f-stop-rply | 6b058200a4e1045749edd8f4b9ac40637d3ede5b | [
"MIT"
] | 7 | 2021-06-12T15:02:25.000Z | 2021-06-25T18:42:57.000Z | tests/test.py | f-stop-lang/f-stop-rply | 6b058200a4e1045749edd8f4b9ac40637d3ede5b | [
"MIT"
] | 2 | 2021-06-14T16:59:20.000Z | 2021-06-28T14:42:49.000Z | tests/test.py | f-stop-lang/f-stop-rply | 6b058200a4e1045749edd8f4b9ac40637d3ede5b | [
"MIT"
] | 3 | 2021-06-25T17:52:05.000Z | 2021-07-13T15:24:20.000Z |
from fstop import Runner
if __name__ == '__main__':
with open("tests/test.fstop") as ft:
string = ft.read()
run = Runner()
print(run.execute(string)) | 19.111111 | 40 | 0.622093 |
from fstop import Runner
if __name__ == '__main__':
with open("tests/test.fstop") as ft:
string = ft.read()
run = Runner()
print(run.execute(string)) | true | true |
f71e13a9cb20186b85b883192d55616ea28d6f17 | 60 | py | Python | dekorateur.py | krautware/python | d0f0b2f8812e2053cee9def2ba08d7cf4b443eb7 | [
"MIT"
] | null | null | null | dekorateur.py | krautware/python | d0f0b2f8812e2053cee9def2ba08d7cf4b443eb7 | [
"MIT"
] | null | null | null | dekorateur.py | krautware/python | d0f0b2f8812e2053cee9def2ba08d7cf4b443eb7 | [
"MIT"
] | null | null | null | def is_prime(n):
return all(n % i for i in range(2,n))
| 15 | 41 | 0.6 | def is_prime(n):
return all(n % i for i in range(2,n))
| true | true |
f71e13ae465530be69ec19e17957f4de88433aba | 1,412 | py | Python | test.py | MananSoni42/fantasy-predictions | f511c46b5f5896fa3ba709d5de517116ab8a30ce | [
"MIT"
] | null | null | null | test.py | MananSoni42/fantasy-predictions | f511c46b5f5896fa3ba709d5de517116ab8a30ce | [
"MIT"
] | null | null | null | test.py | MananSoni42/fantasy-predictions | f511c46b5f5896fa3ba709d5de517116ab8a30ce | [
"MIT"
] | null | null | null | import csv
import json
import time
import re
import requests
from pprint import pprint
from bs4 import BeautifulSoup
clean = lambda x: x.lower().replace(' ','').replace('\n','')
def averages_100(avg_url):
r = requests.get(avg_url)
soup = BeautifulSoup(r.content, 'lxml')
avg={
'stats':[],
}
for table in soup('table'):
#if table.get('class',[]) and 'table--scroll-on-tablet top players' in table['class']:
#print(table('tr'))
for row in table('tr'):
#print(row)
i=0
curr_row = []
for col in row('td'):
#print(col.text)
if(i!=1):
curr_row.append(clean(col.text))
if(i==1):
curr_row.append('KL')
i=i+1
avg['stats'].append(curr_row)
return avg
header_player_ave_100=['sr.no','player','matches','innings','not-outs','Runs scored','Highest Score','Average','BF','Strike-rate','100s','50s','4s','6s']
avg_records_100=[]
url_ave="https://www.iplt20.com/stats/all-time/best-batting-average"
records_backup_ave=averages_100(url_ave)
for record in records_backup_ave['stats']:
if record:
avg_records_100.append(record)
with open('avg_player_100.csv', 'w') as f:
writer = csv.writer(f)
writer.writerow(header_player_ave_100)
writer.writerows(avg_records_100)
| 28.24 | 153 | 0.588527 | import csv
import json
import time
import re
import requests
from pprint import pprint
from bs4 import BeautifulSoup
clean = lambda x: x.lower().replace(' ','').replace('\n','')
def averages_100(avg_url):
r = requests.get(avg_url)
soup = BeautifulSoup(r.content, 'lxml')
avg={
'stats':[],
}
for table in soup('table'):
for row in table('tr'):
i=0
curr_row = []
for col in row('td'):
if(i!=1):
curr_row.append(clean(col.text))
if(i==1):
curr_row.append('KL')
i=i+1
avg['stats'].append(curr_row)
return avg
header_player_ave_100=['sr.no','player','matches','innings','not-outs','Runs scored','Highest Score','Average','BF','Strike-rate','100s','50s','4s','6s']
avg_records_100=[]
url_ave="https://www.iplt20.com/stats/all-time/best-batting-average"
records_backup_ave=averages_100(url_ave)
for record in records_backup_ave['stats']:
if record:
avg_records_100.append(record)
with open('avg_player_100.csv', 'w') as f:
writer = csv.writer(f)
writer.writerow(header_player_ave_100)
writer.writerows(avg_records_100)
| true | true |
f71e13bdfe32a4ad3f1a533ca707312e71907da0 | 227,723 | py | Python | pysnmp-with-texts/VMWARE-NSX-MANAGER-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/VMWARE-NSX-MANAGER-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/VMWARE-NSX-MANAGER-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module VMWARE-NSX-MANAGER-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/VMWARE-NSX-MANAGER-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:34:54 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
MibIdentifier, Bits, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Unsigned32, iso, NotificationType, Counter64, ObjectIdentity, Counter32, ModuleIdentity, Gauge32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "Bits", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Unsigned32", "iso", "NotificationType", "Counter64", "ObjectIdentity", "Counter32", "ModuleIdentity", "Gauge32", "IpAddress")
TextualConvention, DisplayString, DateAndTime = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "DateAndTime")
UUID, = mibBuilder.importSymbols("UUID-TC-MIB", "UUID")
vmwNsxManager, = mibBuilder.importSymbols("VMWARE-ROOT-MIB", "vmwNsxManager")
vmwNsxManagerMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1))
vmwNsxManagerMIB.setRevisions(('2016-06-02 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: vmwNsxManagerMIB.setRevisionsDescriptions(('This is the initial version of the NSX Manager MIB. It describes all the notifications sent from the NSX Manager appliance. WARNING: This mib module will not be backward compatible with next version. ',))
if mibBuilder.loadTexts: vmwNsxManagerMIB.setLastUpdated('201606020000Z')
if mibBuilder.loadTexts: vmwNsxManagerMIB.setOrganization('VMware, Inc')
if mibBuilder.loadTexts: vmwNsxManagerMIB.setContactInfo('VMware, Inc 3401 Hillview Ave Palo Alto, CA 94304 Tel: 1-877-486-9273 or 650-427-5000 Fax: 650-427-5001 Web: http://communities.vmware.com/community/developer/forums/managementapi ')
if mibBuilder.loadTexts: vmwNsxManagerMIB.setDescription('This MIB file contains the information that the receiving party needs in order to interpret SNMP traps sent by NSX Manager. VMware NSX for vSphere is a key product in the SDDC architecture. With NSX, virtualization delivers for networking what it has already delivered for compute and storage. In much the same way that server virtualization programmatically creates, snapshots, deletes and restores software-based virtual machines (VMs), NSX network virtualization programmatically creates, snapshots, deletes, and restores software-based virtual networks. The result is a completely transformative approach to networking that not only enables data center managers to achieve orders of magnitude better agility and economics, but also allows for a vastly simplified operational model for the underlying physical network. With the ability to be deployed on any IP network, including both existing traditional networking models and next-generation fabric architectures from any vendor, NSX is a completely non-disruptive solution. In fact, with NSX, the physical network infrastructure you already have is all you need to deploy a software-defined data center. The NSX Manager provides the graphical user interface (GUI) and the REST APIs for creating, configuring, and monitoring NSX components, such as controllers, logical switches, and edge services gateways. The NSX Manager provides an aggregated system view and is the centralized network management component of NSX. NSX Manager is installed as a virtual appliance on any ESX host in your vCenter environment. Support requests can be filed with VMware using KB article: http://kb.vmware.com/kb/2006985 To reach NSX Manager Service Composer UI, login to vSphere UI(https://<vsphere-ip>)->Networking & Security->Service Composer')
class VmwNsxManagerTypeSeverity(TextualConvention, Integer32):
description = 'Severity enumeration definition of NSX Manager events'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))
namedValues = NamedValues(("informational", 1), ("low", 2), ("medium", 3), ("major", 4), ("critical", 5), ("high", 6))
vmwNsxMAlertData = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1))
if mibBuilder.loadTexts: vmwNsxMAlertData.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMAlertData.setDescription('This members of this group are the OIDs for VarBinds that contain data for ALL Alerts.')
vmwNsxMNotification = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2))
if mibBuilder.loadTexts: vmwNsxMNotification.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMNotification.setDescription('All notifications for NSX Manager use this oid prefix.')
vmwNsxMEventCode = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMEventCode.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEventCode.setDescription('The event code of the alert that was generated. To fetch a list of all the events with their code, severity and description please invoke the nsx-manager url https://<nsx-manager-host>/api/2.0/systemevent/eventcode . The event code specifically identifies each individual event type. This event code is uniquely assigned only once to a particular event type.')
vmwNsxMEventTimestamp = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 2), DateAndTime()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMEventTimestamp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEventTimestamp.setDescription('The timestamp when the event was raised in the NSX Manager.')
vmwNsxMEventMessage = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 3), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMEventMessage.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEventMessage.setDescription('This object provides a human readable description of the event or group of events')
vmwNsxMEventSeverity = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 4), VmwNsxManagerTypeSeverity()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMEventSeverity.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEventSeverity.setDescription('The severity for the event that was generated. The severity is pre-defined and can only be changed from the NSX Manager section of vsphere web client if the administrator so wishes.')
vmwNsxMEventComponent = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 5), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMEventComponent.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEventComponent.setDescription('The NSX manager component where this event was generated.')
vmwNsxMUuid = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 6), UUID()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMUuid.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUuid.setDescription('The NSX manager UUID where this event was generated.')
vmwNsxMCount = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMCount.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMCount.setDescription('The count of the number of events for a particular group raised in the last 5 minute interval.')
vmwNsxMBranch = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 0))
if mibBuilder.loadTexts: vmwNsxMBranch.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMBranch.setDescription('Branch segregated out for various groups and other future requirements.')
vmwNsxMGroupsBranch = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 0, 1))
if mibBuilder.loadTexts: vmwNsxMGroupsBranch.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGroupsBranch.setDescription('Grouped Notifications will have this OID prefix.')
vmwNsxMGroupsPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 0, 1, 0))
if mibBuilder.loadTexts: vmwNsxMGroupsPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGroupsPrefix.setDescription('Prefix added to place zero in penultimate sub-identifier of group oids.')
vmwNsxMConfigGroup = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 0, 1, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMCount"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMConfigGroup.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMConfigGroup.setDescription('Configuration notifications that are grouped will have this OID prefix.')
vmwNsxMSnmp = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 1))
if mibBuilder.loadTexts: vmwNsxMSnmp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSnmp.setDescription('Notifications that are Snmp related will have this OID prefix.')
vmwNsxMSnmpPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 1, 0))
if mibBuilder.loadTexts: vmwNsxMSnmpPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSnmpPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Snmp module.")
vmwNsxMSnmpDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 1, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSnmpDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSnmpDisabled.setDescription('This notification is sent when the sending out of Snmp traps is disabled. This would most likely be the last Snmp trap the snmp manager receives. You may some times not receive it in case of high volume of traps. In those cases you can rely on the heartbeat traps not being sent out. Action required: None. If the sending of Snmp traps is enabled a warmStart trap is received. Frequency of traps: Once, whenever the sending snmp traps is disabled.')
vmwNsxMSnmpManagerConfigUpdated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 1, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSnmpManagerConfigUpdated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSnmpManagerConfigUpdated.setDescription("This notification is sent when the snmp manager configuration has been updated. The event message will carry the semicolon separated new snmp managers' details. Action required: None Frequency of traps: Once, whenever the Snmp manager configuration is updated.")
vmwNsxMSecurity = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2))
if mibBuilder.loadTexts: vmwNsxMSecurity.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSecurity.setDescription('Notifications that are security related will have this OID prefix.')
vmwNsxMSecurityPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0))
if mibBuilder.loadTexts: vmwNsxMSecurityPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSecurityPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for security module.")
vmwNsxMIpAddedBlackList = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpAddedBlackList.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpAddedBlackList.setDescription('Whenever user authentication fails for number of times that user is blacklisted and further login attempts are disabled for that user from given IP address for some time. Action required: None Frequency of traps: Whenever user authentication fails consecutively within some time.')
vmwNsxMIpRemovedBlackList = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpRemovedBlackList.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpRemovedBlackList.setDescription('After user is blacklisted, after blacklist duration expires, user is removed from blacklist. Action required: None Frequency of traps: Whenever blacklist duration expires for any user.')
vmwNsxMSsoConfigFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSsoConfigFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSsoConfigFailure.setDescription('Whenever configuration of lookup service / SSO fails due to various reasons like invalid credentials, invalid configuration, time sync problem etc. Action required: Check the event message and reconfigure lookup service with correct details. Frequency of traps: Once per failed configuration of lookup service.')
vmwNsxMSsoUnconfigured = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSsoUnconfigured.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSsoUnconfigured.setDescription('Whenever user unconfigures lookup service. Action required: None Frequency of traps: Once per unconfiguration event of lookup service.')
vmwNsxMUserRoleAssigned = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUserRoleAssigned.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUserRoleAssigned.setDescription('When role is assigned on NSX manager for vCenter user. Action required: None Frequency of traps: Once for each user who is assigned role.')
vmwNsxMUserRoleUnassigned = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUserRoleUnassigned.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUserRoleUnassigned.setDescription('When role is unassigned on NSX manager for vCenter user. Action: None Frequency of traps: Once for each user where role is removed.')
vmwNsxMGroupRoleAssigned = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGroupRoleAssigned.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGroupRoleAssigned.setDescription('When role is assigned on NSX manager for vCenter group. Action required: None Frequency of traps: Once for each group who is assigned role.')
vmwNsxMGroupRoleUnassigned = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGroupRoleUnassigned.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGroupRoleUnassigned.setDescription('When role is unassigned on NSX manager for vCenter group. Action required: None Frequency of traps: Once for each group where role is removed.')
vmwNsxMVcLoginFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVcLoginFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVcLoginFailed.setDescription('Whenever Connection with vCenter starts failing due to invalid credentials. Action required: Reconfigure NSX Manager vCenter configuration with correct credentials.')
vmwNsxMVcDisconnected = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVcDisconnected.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVcDisconnected.setDescription('Whenever there is disconnectivity for default VCenter Connection maintained by NSX. Action required: Administrator needs to check the connectivity with vCenter for network problems or any other reasons.')
vmwNsxMLostVcConnectivity = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLostVcConnectivity.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLostVcConnectivity.setDescription('Whenever there is disconnectivity for default VCenter Connection maintained by NSX. Action required: Administrator needs to check the connectivity with vCenter for network problems or any other reasons.')
vmwNsxMSsoDisconnected = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 12)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSsoDisconnected.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSsoDisconnected.setDescription('Whenever there is disconnection with SSO lookup service. Action required: Please check the configuration for possible disconnection reasons like Invalid Credentials, Time sync issues, Network connectivity problems etc. Navigate to Appliance management Web UI in browser (https://<nsx-manager-host>/) traverse to Manage vCenter Registration tab and verify the configuration for SSO Lookupservice. Frequency of traps: Once per disconnect event, default frequency to check SSO connection state is 1 hour.')
vmwNsxMFirewall = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3))
if mibBuilder.loadTexts: vmwNsxMFirewall.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewall.setDescription('Notifications that are firewall related will have this OID prefix.')
vmwNsxMFirewallPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0))
if mibBuilder.loadTexts: vmwNsxMFirewallPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for firewall module.")
vmwNsxMFltrCnfgUpdateFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFltrCnfgUpdateFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFltrCnfgUpdateFailed.setDescription('NSX Manager failed to enforce DFW. VMs on this host may not be protected by the DFW. Contextual data provided with this event may indicate the cause of this failure. This could happen if the VIB version mismatches on the NSX Manager and ESX host. This may happen during an upgrade. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFltrCnfgNotAppliedToVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFltrCnfgNotAppliedToVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFltrCnfgNotAppliedToVnic.setDescription('NSX Manager failed to enforce DFW configuration on a vnic. This particular VM may not be protected by the DFW. Contextual data provided with this event may indicate the cause of this failure.This could happen if the VIB version mismatches on the NSX Manager and ESX host. This may happen during an upgrade. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFltrCnfgAppliedToVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFltrCnfgAppliedToVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFltrCnfgAppliedToVnic.setDescription('Successfully updated filter config. Action required: None')
vmwNsxMFltrCreatedForVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFltrCreatedForVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFltrCreatedForVnic.setDescription('Filter created. DFW is enforced in the datapath for the vnic. Action required: None')
vmwNsxMFltrDeletedForVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFltrDeletedForVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFltrDeletedForVnic.setDescription('Filter deleted. DFW is removed from the vnic. Action required: None')
vmwNsxMFirewallConfigUpdateFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallConfigUpdateFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallConfigUpdateFailed.setDescription('Firewall rule Configuration between the NSX Manager and the host is not in sync. Contextual data provided with this event may indicate the cause of this failure. Verify that the host in question was properly prepared by NSX Manager. Collect error logs (vsfwd.log) when the host received firewall config. Force sync firewall config using ForceSync API/UI. See kb.vmware.com/kb/2125437 . If the issue persists, please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallRuleFailedVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallRuleFailedVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallRuleFailedVnic.setDescription('Failed to apply Distributed Firewall configuration. Contextual data provided with this event may indicate the cause of this failure. Collect error logs (vmkernel.log) when the firewall configuration was applied to the vnic. vsip kernel heaps may not have enough free memory. Check VSFWD logs . See kb.vmware.com/kb/2125437. If the issue persists, please collect the ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallRuleAppliedVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallRuleAppliedVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallRuleAppliedVnic.setDescription('Applied firewall config. Key value will have context info like generation number and also other debugging info. Action required: None')
vmwNsxMCntnrCnfgUpdateFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMCntnrCnfgUpdateFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMCntnrCnfgUpdateFailed.setDescription('Failed receive, parse or update the container configuration. Contextual data provided with this event may indicate the cause of this failure. Collect error logs (vmkernel.log) when firewall configuration was applied to the vnic. Verify that vsip kernel heaps have enough free memory. Check VSFWD logs. See kb.vmware.com/kb/2125437 . If the issue persists, please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFlowMissed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFlowMissed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFlowMissed.setDescription('Flow missed. Contextual data provided with this event may indicate the cause of this failure. Collect error logs (vmkernel.log) when firewall configuration was applied to the vnic. Verify that vsip kernel heaps have enough free memory and vsfwd memory consumption is within resource limits. Check VSFWD logs. See kb.vmware.com/kb/2125437. If the issue persists, please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMSpoofGuardCnfgUpdateFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardCnfgUpdateFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardCnfgUpdateFailed.setDescription('Failed to receive, parse or Update the spoofguard configuration. Contextual data provided with this event may indicate the cause of this failure. Verify that the host in question was properly prepared by NSX Manager. Collect error logs (vmkernel.log) when the spoofguard configuration was applied to the host. For Sync the firewall configuration . See kb.vmware.com/kb/2125437.')
vmwNsxMSpoofGuardFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 12)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardFailed.setDescription('Failed to apply spoofguard to the vnic. Contextual data provided with this event may indicate the cause of this failure. Verify that vsip kernel heaps have enough free memory. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMSpoofGuardApplied = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 13)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardApplied.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardApplied.setDescription('Enabled spoofguard for vnic. Action required: None')
vmwNsxMSpoofGuardDisableFail = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 14)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardDisableFail.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardDisableFail.setDescription('Failed to disable spoofguard on the vnic. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMSpoofGuardDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 15)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardDisabled.setDescription('Disabled spoofguard for vnic. Action required: None')
vmwNsxMLegacyAppServiceDeletionFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 16)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLegacyAppServiceDeletionFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLegacyAppServiceDeletionFailed.setDescription('A notification generated when legacy application service VM deletion failed.')
vmwNsxMFirewallCpuThresholdCrossed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 17)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallCpuThresholdCrossed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallCpuThresholdCrossed.setDescription('vsfwd CPU usage threshold was exceeded. Reduce the amount of traffic of VMs on the host in question.')
vmwNsxMFirewallMemThresholdCrossed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 18)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallMemThresholdCrossed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallMemThresholdCrossed.setDescription('vsfwd memory threshold exceeded. Reduce the number of of VMs on the host in question, reduce the number of rules or containers in firewall config. Use appliedTo feature to limit the number of rules for the current cluster.')
vmwNsxMConnPerSecThrshldCrossed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 19)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMConnPerSecThrshldCrossed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMConnPerSecThrshldCrossed.setDescription('vsfwd Connectons Per Second (CPS) threshold exceeded. Reduce the amount of new connections of VMs on the host in question.')
vmwNsxMFirewallCnfgUpdateTimedOut = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 20)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallCnfgUpdateTimedOut.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallCnfgUpdateTimedOut.setDescription('NSX Manager waits for 2 minutes after publishing the Firewall configuration to each host in the cluster. If a host takes more than 2 minutes to process the data, it times out. Please check the Host in question. See if VSFWD is functioning or not. Also use CLI commands to verify if the rule realization is working properly or not. See kb.vmware.com/kb/2125437. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMSpoofGuardCnfgUpdateTmOut = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 21)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardCnfgUpdateTmOut.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardCnfgUpdateTmOut.setDescription('NSX Manager waits for 2 minutes after publishing the Spoofguard configuration to each host in the cluster. If a host takes more than 2 minutes to process the data, it times out. Please check the Host in question. See if VSFWD is functioning or not. Also use CLI commands to verify if the rule realization is working properly or not. See kb.vmware.com/kb/2125437. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallPublishFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 22)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallPublishFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallPublishFailed.setDescription('Firewall Configuration Publishing has failed for a given cluster/host. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMCntnrUpdatePublishFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 23)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMCntnrUpdatePublishFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMCntnrUpdatePublishFailed.setDescription('Publishing of container (IP/MAC/vNIC) update pdate failed for a given host/cluster object. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMSpoofGuardUpdatePublishFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 24)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardUpdatePublishFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardUpdatePublishFailed.setDescription('The publishing of the spoofguard updates on this host has failed. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMExcludeListPublishFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 25)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMExcludeListPublishFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMExcludeListPublishFailed.setDescription('The publishing of the exclude list or updates to the exclude list on this host has failed. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallCnfgUpdateOnDltCntnr = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 26)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallCnfgUpdateOnDltCntnr.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallCnfgUpdateOnDltCntnr.setDescription('Deletion of the object referenced in firewall rules. Action required: Go to the NSX manager DFW UI. All the invalid reference are marked invalid on the UI as well. Please remove the orphaned referenced and update the firewall rules.')
vmwNsxMHostSyncFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 27)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMHostSyncFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMHostSyncFailed.setDescription('Host-level force synchronization has failed. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMHostSynced = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 28)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMHostSynced.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMHostSynced.setDescription('Force Sync operation for host succeeded. Action required: None')
vmwNsxMFirewallInstalled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 29)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallInstalled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallInstalled.setDescription('The Distributed Firewall was successfully Installed on the host.')
vmwNsxMFirewallInstallFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 30)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallInstallFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallInstallFailed.setDescription('The Distributed Firewall Installation has failed. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallClusterInstalled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 31)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallClusterInstalled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallClusterInstalled.setDescription('The Distributed Firewall has been installed at the request of a user.')
vmwNsxMFirewallClusterUninstalled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 32)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallClusterUninstalled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallClusterUninstalled.setDescription('The Distributed Firewall has been uninstalled at the request of a user.')
vmwNsxMFirewallClusterDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 33)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallClusterDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallClusterDisabled.setDescription('The Distributed Firewall has been disabeld on the cluster at the request of a user.')
vmwNsxMFirewallForceSyncClusterFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 34)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallForceSyncClusterFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallForceSyncClusterFailed.setDescription('Force Sync operation for the cluster has failed. Use CLI commands to look at the logs and verify if any error messages appeared during the operation. See kb.vmware.com/kb/2125437. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallForceSyncClusterSuccess = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 35)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallForceSyncClusterSuccess.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallForceSyncClusterSuccess.setDescription('Force Sync operation for cluster succeeded. Action required: None')
vmwNsxMFirewallVsfwdProcessStarted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 36)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallVsfwdProcessStarted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallVsfwdProcessStarted.setDescription('vsfwd process started on host. Action required: None')
vmwNsxMEdge = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4))
if mibBuilder.loadTexts: vmwNsxMEdge.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdge.setDescription('Notifications that are edge related will have this OID prefix.')
vmwNsxMEdgePrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0))
if mibBuilder.loadTexts: vmwNsxMEdgePrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgePrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for edge module.")
vmwNsxMEdgeNoVmServing = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeNoVmServing.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeNoVmServing.setDescription('None of the Edge VMs found in serving state. There is a possibility of network disruption. Action required: System auto recovers from this state today. Event should be followed by traps with event code 30202 or 30203')
vmwNsxMEdgeGatewayCreated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayCreated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayCreated.setDescription('Edge Gateway created. Action required: None')
vmwNsxMEdgeVmBadState = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmBadState.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmBadState.setDescription('Edge VM in bad state. Needs a force sync. Action required: System auto triggres force sync but if problem is sustained then manual force sync should be triggered. For ESG force sync is disruptive and will reboot edge VMs.')
vmwNsxMEdgeVmCommFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmCommFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmCommFailed.setDescription('Failed to communicate with the Edge VM. Action required: Need investigation depending upon comunication channel. Log needs to be checked for VIX error code for futher action.')
vmwNsxMEdgeVmCnfgChanged = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmCnfgChanged.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmCnfgChanged.setDescription('A notification generated when NSX Edge VM configuration is changed. Action required: None')
vmwNsxMEdgeGatewayDeleted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayDeleted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayDeleted.setDescription('A notification generated when Edge Gateway is deleted. Action required: None')
vmwNsxMEdgeGatewayReDeployed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayReDeployed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayReDeployed.setDescription('A notification generated when Edge Gateway is redeployed. Action required: None')
vmwNsxMEdgeVmPowerOff = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmPowerOff.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmPowerOff.setDescription('A notification generated when NSX Edge VM is powered off. Action required: None')
vmwNsxMEdgeApplianceSizeChanged = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceSizeChanged.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceSizeChanged.setDescription('A notification generated when Edge appliance size has changed. Action required: None')
vmwNsxMEdgeUpgrade51x = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeUpgrade51x.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeUpgrade51x.setDescription('A notification generated when Edge Gateway is upgraded to 5.1.x. Action required: None')
vmwNsxMEdgeLicenseChanged = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeLicenseChanged.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeLicenseChanged.setDescription('A notification generated when Edge licensing changed on vCenter Server. Action required: None')
vmwNsxMEdgeApplianceMoved = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 12)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceMoved.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceMoved.setDescription('A notification generated when Edge appliance is moved in the vCenter inventory.')
vmwNsxMEdgeApplianceNotFound = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 13)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceNotFound.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceNotFound.setDescription('A notification generated when Edge appliance not found in the vCenter inventory. Action required: If VM is accidentally deleted, redeploy edge.')
vmwNsxMEdgeVMHealthCheckMiss = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 14)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVMHealthCheckMiss.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVMHealthCheckMiss.setDescription('A notification generated when Edge VM is not responding to health check. Action required: Communicaiton issues between manager and edge. Log analysis required to root cause issue.')
vmwNsxMEdgeHealthCheckMiss = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 15)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHealthCheckMiss.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHealthCheckMiss.setDescription('A notification generated when none of the Edge VMs are found in serving state. There is a possibility of network disruption. Action required: Commnunicaiton issues between manager and edge. Log analysis required to root cause issue.')
vmwNsxMEdgeCommAgentNotConnected = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 16)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeCommAgentNotConnected.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeCommAgentNotConnected.setDescription('A notification generated when Edge Communication Agent is not connected to vCenter Server. Action required: Check VSM and VC connectivity. Try registering VSM to VC')
vmwNsxMApplianceWithDifferentId = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 17)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMApplianceWithDifferentId.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMApplianceWithDifferentId.setDescription('A notification generated when Edge VM is discovered with a different vmId. Action required: None')
vmwNsxMFirewallRuleModified = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 18)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallRuleModified.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallRuleModified.setDescription('A notification generated when Edge firewall rule is modified. Action required: Revisit firewall rule and perform required updates')
vmwNsxMEdgeAntiAffinityRuleViolated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 19)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeAntiAffinityRuleViolated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeAntiAffinityRuleViolated.setDescription('A notification generated when powering on NSX Edge appliance violates a virtual machine anti-affinity rule. Action required: Anti affinity rules removed from cluster. Both HA VM may run on same host. Go to VC and please revisit anti affinity rules on Cluster')
vmwNsxMEdgeHaEnabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 20)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaEnabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaEnabled.setDescription('A notification generated when NSX Edge HighAvailability is enabled. Action required: None')
vmwNsxMEdgeHaDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 21)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaDisabled.setDescription('A notification generated when NSX Edge HighAvailability is disabled. Action required: None')
vmwNsxMEdgeGatewayRecovered = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 22)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayRecovered.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayRecovered.setDescription('A notification generated when NSX Edge Gateway has recovered and now responding to health check. Action required: None')
vmwNsxMEdgeVmRecovered = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 23)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmRecovered.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmRecovered.setDescription('A notification generated when NSX Edge VM has recovered and now responding to health check. Actione required: None')
vmwNsxMEdgeGatewayUpgraded = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 24)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayUpgraded.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayUpgraded.setDescription('A notification generated when Edge Gateway is upgraded. Action required: None')
vmwNsxMEdgeVmHlthChkDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 25)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmHlthChkDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmHlthChkDisabled.setDescription('A notification generated when Edge VM health check is disabled on consecutive critical vix errors. Please redeploy or force sync vm to resume health check. Action required: This points to environmental issues that lead to repeated failure over vix. Log analysis needs to be done to identify root cause. Post resoving issues force sync edge vm to resume health check. Force sync and redeploy are disruptive operation.')
vmwNsxMEdgePrePublishFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 26)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgePrePublishFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgePrePublishFailed.setDescription('A notification generated when Pre Publish has failed on Edge VM. Action required: Firewall rules might be out of sync. System auto recovers but if problem persists then trigger force sync.')
vmwNsxMEdgeForcedSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 27)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeForcedSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeForcedSync.setDescription('A notification generated when Edge VM was force synced. Action required: None')
vmwNsxMEdgeVmBooted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 28)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmBooted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmBooted.setDescription('A notification generated when Edge VM was booted. Action required: None')
vmwNsxMEdgeVmInBadState = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 29)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmInBadState.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmInBadState.setDescription('A notification generated when Edge VM is in Bad State. Needs a force sync. Action required: Force sync required.')
vmwNsxMEdgeVmCpuUsageIncreased = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 30)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmCpuUsageIncreased.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmCpuUsageIncreased.setDescription('A notification generated when Edge VM CPU usage has increased. Action required: Spikes are normal but collect tech support logs for further analysis if high CPU sustained for longer duration.')
vmwNsxMEdgeVmMemUsageIncreased = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 31)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmMemUsageIncreased.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmMemUsageIncreased.setDescription('A notification generated when Edge VM Memory usage has increased. Action required: System recovers but collect tech support logs for further analysis.')
vmwNsxMEdgeVmProcessFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 32)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmProcessFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmProcessFailure.setDescription('A notification generated when Edge VM process monitor detects a process failure. Action required: System recovers but collect tech support logs for further analysis.')
vmwNsxMEdgeVmSysTimeBad = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 33)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmSysTimeBad.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmSysTimeBad.setDescription('A notification generated when Edge VM system time is bad. Action required: System recovers. Check NTP setting on hosts.')
vmwNsxMEdgeVmSysTimeSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 34)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmSysTimeSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmSysTimeSync.setDescription('A notification generated when Edge VM system time sync up happens. Action required: None')
vmwNsxMEdgeAesniCryptoEngineUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 35)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeAesniCryptoEngineUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeAesniCryptoEngineUp.setDescription('A notification generated when AESNI crypto engine is up. Action required: None')
vmwNsxMEdgeAesniCryptoEngineDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 36)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeAesniCryptoEngineDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeAesniCryptoEngineDown.setDescription('A notification generated when AESNI crypto engine is down. Action required: None')
vmwNsxMEdgeVmOom = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 37)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmOom.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmOom.setDescription('A notification generated when Edge VM is out of memory. The Edge is rebooting in 3 seconds. Action required: Collect tech support for further analysis.')
vmwNsxMEdgeFileSysRo = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 38)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeFileSysRo.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeFileSysRo.setDescription('A notification generated when Edge file system is read only. Action required: Check datastore issues, once resolved force sync is required.')
vmwNsxMEdgeHaCommDisconnected = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 39)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaCommDisconnected.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaCommDisconnected.setDescription('A notification generated when Edge HighAvailability communication channel is disconnected from peer node. Action required: None')
vmwNsxMEdgeHaSwitchOverSelf = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 40)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverSelf.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverSelf.setDescription("A notification generated when High Availability is disabled for NSX Edge. The primary NSX Edge VM has its state transitioned from ACTIVE to SELF. High Availability (HA) ensures that NSX Edge services are always available, by deploying an additional Edge VM for failover. The primary NSX Edge VM is the ACTIVE node and the secondary VM is the STANDBY node. Whenever the ACTIVE VM is unreachable on account of VM powered off or network connectivity issues, the STANDBY VM takes over the ACTIVE vm's role. In the event NSX Edge High Availability is disabled, the STANDBY VM is deleted and the ACTIVE VM continues to function with its ACTIVE state transitioned to SELF. Action required: None")
vmwNsxMEdgeHaSwitchOverActive = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 41)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverActive.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverActive.setDescription("A notification generated when High Availability switch over has happened for NSX Edge. The secondary NSX Edge VM has its state transitioned from STANDBY to ACTIVE. High Availability (HA) ensures that NSX Edge services are always available, by deploying an additional Edge VM for failover. The primary NSX Edge VM is the ACTIVE node and the secondary VM is the STANDBY node. Whenever the ACTIVE VM is unreachable on account of VM powered off or network connectivity issues, the STANDBY VM takes over the ACTIVE vm's role. Action required: None")
vmwNsxMEdgeHaSwitchOverStandby = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 42)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverStandby.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverStandby.setDescription("A notification generated when High Availability switch over has happened for NSX Edge. The primary NSX Edge VM has its state transitioned from ACTIVE to STANDBY. High Availability (HA) ensures that NSX Edge services are always available, by deploying an additional Edge VM for failover. The primary NSX Edge VM is the ACTIVE node and the secondary VM is the STANDBY node. Whenever the ACTIVE VM is unreachable on account of VM powered off or network connectivity issues, the STANDBY VM takes over the ACTIVE vm's role. When connectivity is re-established between the NSX Edge VM's, one of the VM's state is transitioned from ACTIVE to STANDBY. Action required: None")
vmwNsxMEdgeMonitorProcessFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 43)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeMonitorProcessFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeMonitorProcessFailure.setDescription('A notification generated when Edge process monitor detected a process failure. Action required: Collect tech support logs for further analysis.')
vmwNsxMLbVirtualServerPoolUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 44)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolUp.setDescription('A notification generated when LoadBalancer virtualServer/pool is up. Action required: None')
vmwNsxMLbVirtualServerPoolDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 45)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolDown.setDescription('A notification generated when LoadBalancer virtualServer/pool is down.')
vmwNsxMLbVirtualServerPoolWrong = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 46)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolWrong.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolWrong.setDescription('A notification generated when LoadBalancer virtualServer/pool state is wrong.')
vmwNsxMLbPoolWarning = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 47)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLbPoolWarning.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLbPoolWarning.setDescription('A notification generated when LoadBalancer pool changed to a warning state.')
vmwNsxMIpsecChannelUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 48)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecChannelUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecChannelUp.setDescription('A notification generated when IPsec Channel is up. Action required: None')
vmwNsxMIpsecChannelDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 49)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecChannelDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecChannelDown.setDescription('A notification generated when IPsec Channel is down. Action required: Collect tech support logs for further analysis.')
vmwNsxMIpsecTunnelUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 50)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelUp.setDescription('A notification generated when IPsec Tunnel is up. Action required: None')
vmwNsxMIpsecTunnelDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 51)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelDown.setDescription('A notification generated when IPsec Tunnel is down. Action required: Collect tech support logs for further analysis.')
vmwNsxMIpsecChannelUnknown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 52)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecChannelUnknown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecChannelUnknown.setDescription('A notification generated when IPsec Channel status is unknown. Action required: Collect tech support logs for further analysis.')
vmwNsxMIpsecTunnelUnknown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 53)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelUnknown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelUnknown.setDescription('A notification generated when IPsec Tunnel status is unknown. Action required: Collect tech support logs for further analysis.')
vmwNsxMGlobalLbMemberUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 54)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberUp.setDescription('A notification generated when Global Loadbalancer member status is up. Action required: None')
vmwNsxMGlobalLbMemberWarning = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 55)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberWarning.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberWarning.setDescription('A notification generated when Global Loadbalancer member status is warning.')
vmwNsxMGlobalLbMemberDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 56)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberDown.setDescription('A notification generated when Global Loadbalancer member status is down.')
vmwNsxMGlobalLbMemberUnknown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 57)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberUnknown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberUnknown.setDescription('A notification generated when Global Loadbalancer member status is unknown.')
vmwNsxMGlobalLbPeerUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 58)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbPeerUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbPeerUp.setDescription('A notification generated when Global Loadbalancer peer status is up. Action required: None')
vmwNsxMGlobalLbPeerDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 59)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbPeerDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbPeerDown.setDescription('A notification generated when Global Loadbalancer peer status is down.')
vmwNsxMDhcpServiceDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 60)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDhcpServiceDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDhcpServiceDisabled.setDescription('A notification generated when DHCP Relay Service is disabled.')
vmwNsxMEdgeResourceReservationFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 61)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeResourceReservationFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeResourceReservationFailure.setDescription('Insufficient CPU and/or Memory Resources available on Host or Resource Pool, during resource reservation at the time of NSX Edge deployment. Resources are explicitly reserved to ensure sufficient resources are available for NSX Edge to service High Availability. User can view the available resources vs reserved resources by navigating to the page Home > Hosts and Clusters > [Cluster-name] > Monitor > Resource Reservation. Action required: After checking available resources, re-specify the resources as part of appliance configuration so that resource reservation succeeds.')
vmwNsxMEdgeSplitBrainDetected = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 62)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainDetected.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainDetected.setDescription("Split Brain detected for NSX Edge with HighAvailability. NSX Edge VM's configured for High Availability are unable to determine if the other VM is alive due to network failure. In such scenario, both the VM's think the other is not alive and take on the ACTIVE state. This may cause network disruption. Action required: User will need to check network infrastructure (virtual and physical) to look for any failures, specially on the interfaces and the path configured for HA.")
vmwNsxMEdgeSplitBrainRecovered = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 63)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainRecovered.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainRecovered.setDescription("Resolved Split Brain for NSX Edge with HighAvailability. The network path used by the NSX Edge VM's High Availability has been re-established. NSX Edge VM's are able to communicate with each other, and one of the VM has taken the STANDBY role, resolving the ACTIVE-ACTIVE split brain scenario. Action required: None")
vmwNsxMEdgeSplitBrainRecoveryAttempt = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 64)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainRecoveryAttempt.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainRecoveryAttempt.setDescription('Attempted Split Brain resolution for NSX Edge. Split Brain recovery will be attempted on NSX Edge versions prior to 6.2.3, which are not based on BFD. Action required: None')
vmwNsxMEndpoint = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5))
if mibBuilder.loadTexts: vmwNsxMEndpoint.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEndpoint.setDescription('Notifications that are Endpoint related will have this OID prefix.')
vmwNsxMEndpointPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0))
if mibBuilder.loadTexts: vmwNsxMEndpointPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEndpointPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Endpoint module.")
vmwNsxMEndpointThinAgentEnabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEndpointThinAgentEnabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEndpointThinAgentEnabled.setDescription('A notification generated when Thin agent is enabled.')
vmwNsxMGuestIntrspctnEnabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnEnabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnEnabled.setDescription('A notification generated when Guest Introspection solution is enabled.')
vmwNsxMGuestIntrspctnIncompatibleEsx = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnIncompatibleEsx.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnIncompatibleEsx.setDescription('A notification generated when Guest Introspection solution was contacted by an incompatible version of the ESX module.')
vmwNsxMGuestIntrspctnEsxConnFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnEsxConnFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnEsxConnFailed.setDescription('A notification generated when connection between the ESX module and the Guest Introspection solution failed.')
vmwNsxMGuestIntrspctnStatusRcvFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnStatusRcvFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnStatusRcvFailed.setDescription('A notification generated when failed to receive status from Guest Introspection solution.')
vmwNsxMEsxModuleEnabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEsxModuleEnabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEsxModuleEnabled.setDescription('A notification generated when ESX module is enabled.')
vmwNsxMEsxModuleUninstalled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEsxModuleUninstalled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEsxModuleUninstalled.setDescription('A notification generated when ESX module is uninstalled.')
vmwNsxMGuestIntrspctnHstMxMssngRep = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnHstMxMssngRep.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnHstMxMssngRep.setDescription('A notification generated when Guest Introspection host MUX is missing report.')
vmwNsxMEndpointUndefined = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEndpointUndefined.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEndpointUndefined.setDescription('A notification generated when Endpoint is undefined.')
vmwNsxMEam = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 6))
if mibBuilder.loadTexts: vmwNsxMEam.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEam.setDescription('Notifications that are Eam related will have this OID prefix.')
vmwNsxMEamPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 6, 0))
if mibBuilder.loadTexts: vmwNsxMEamPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEamPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Eam module.")
vmwNsxMEamGenericAlarm = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 6, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEamGenericAlarm.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEamGenericAlarm.setDescription('EAM reports problems to NSX during vib/service VM install/upgrade as these traps. Action required: Use resolve API to resolve the Alarm. Frequency of traps: N times per cluster per user action, where N is number of hosts in a cluster.')
vmwNsxMFabric = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7))
if mibBuilder.loadTexts: vmwNsxMFabric.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabric.setDescription('Notifications that are Fabric related will have this OID prefix.')
vmwNsxMFabricPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0))
if mibBuilder.loadTexts: vmwNsxMFabricPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Fabric module.")
vmwNsxMFabricDplymntStatusChanged = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDplymntStatusChanged.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDplymntStatusChanged.setDescription('The status of a service on a cluster has changed. It can change to RED(Failure), GREEN(Success), YELLOW(in-progress). Action required: RED state would be accompanied with an EAM Alarm/Event/Trap, that indicates root cause. Use resolver API to fix it. Frequency of traps: Once per state change. State could change 2-3 times per user operation [Deploy/Undeploy/Update]')
vmwNsxMFabricDplymntUnitCreated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitCreated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitCreated.setDescription('NSX Manager has created the required objects for deploying a service on a cluster. This would be followed by deployment of the service on all hosts in the cluster. Action required: None Frequency: Once per cluster')
vmwNsxMFabricDplymntUnitUpdated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitUpdated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitUpdated.setDescription('NSX Manager has made changes in the objects required for deploying a service on a cluster. This would be followed by updation of the service on all hosts in the cluster. Action required: None Frequency of traps: Once per cluster per user operation [Update]')
vmwNsxMFabricDplymntUnitDestroyed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitDestroyed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitDestroyed.setDescription('A service has been removed from all hosts in a cluster. NSX Manager has deleted the objects for the service on the cluster. Action required: None Frequency of traps: Once per cluster')
vmwNsxMDataStoreNotCnfgrdOnHost = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDataStoreNotCnfgrdOnHost.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDataStoreNotCnfgrdOnHost.setDescription('Datastore could not be configured on host, probably its not connected. Action required: Ensure that datastore is connected to the host. Use resolve API to resolve the Alarm. Service will be deployed. Frequency of traps: Once per cluster per user operation [Deploy].')
vmwNsxMFabricDplymntInstallationFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDplymntInstallationFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDplymntInstallationFailed.setDescription('Installation of service failed, please check if ovf/vib urls are accessible, in correct format and all the properties in ovf environment have been configured in service attributes. Please check logs for details. Action required: Ensure that ovf/vib urls accessible from VC and are in correct format. Use resolve API to resolve the Alarm. Service will be deployed. Frequency of traps: Once per cluster per user operation [Deploy].')
vmwNsxMFabricAgentCreated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricAgentCreated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricAgentCreated.setDescription('The service has been successfully installed on a host. Action required: None Frequency of traps: N times per cluster, where N is number of hosts in a cluster.')
vmwNsxMFabricAgentDestroyed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricAgentDestroyed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricAgentDestroyed.setDescription('The service has been successfully removed from a host. Action required: None Frequency of traps: N times per cluster, where N is number of hosts in a cluster.')
vmwNsxMFabricSrvceNeedsRedplymnt = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricSrvceNeedsRedplymnt.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricSrvceNeedsRedplymnt.setDescription('Service will need to be redeployed as the location of the OVF / VIB bundles to be deployed has changed. Action required: Use resolve API to resolve the Alarm. Service will be redeployed. Frequency of traps: N times per NSX Manager IP change, where N is number of cluster and service combinations deployed.')
vmwNsxMUpgradeOfDplymntFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUpgradeOfDplymntFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUpgradeOfDplymntFailed.setDescription('Upgrade of deployment unit failed, please check if ovf/vib urls are accessible, in correct format and all the properties in ovf environment have been configured in service attributes. Please check logs for details. Action required: Ensure that ovf/vib urls accessible from VC and are in correct format. Use resolve API to resolve the Alarm. Service will be redeployed. Frequency of traps: Once per cluster per user operation [Upgrade]')
vmwNsxMFabricDependenciesNotInstalled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDependenciesNotInstalled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDependenciesNotInstalled.setDescription('The service being installed is dependent on another service that has not yet been installed. Action required: Deploy the required service on the cluster. Frequency of traps: Once per cluster per user operation [Deploy]')
vmwNsxMFabricErrorNotifSecBfrUpgrade = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 12)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricErrorNotifSecBfrUpgrade.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricErrorNotifSecBfrUpgrade.setDescription('Error while notifying security solution before upgrade. The solution may not be reachable/responding. Action required: Ensure that solution urls are accessible from NSX. Use resolve API to resolve the Alarm. Service will be redeployed. Frequency of traps: Once per cluster per user operation [Upgrade]')
vmwNsxMFabricErrCallbackNtRcvdUpgrade = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 13)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricErrCallbackNtRcvdUpgrade.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricErrCallbackNtRcvdUpgrade.setDescription('Did not receive callback from security solution for upgrade notification even after timeout. Action required: Ensure that solution urls are accessible from NSX, and NSX is reachable from the solution. Use resolve API to resolve the Alarm. Service will be redeployed. Frequency : Once per cluster per user operation [Upgrade]')
vmwNsxMFabricErrCallbackNtRcvdUninstall = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 14)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricErrCallbackNtRcvdUninstall.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricErrCallbackNtRcvdUninstall.setDescription('Uninstallation of service failed. Action required: Ensure that solution urls are accessible from NSX, and NSX is reachable from the solution. Use resolve API to resolve the Alarm. Service will be removed. Frequency of traps: Once per cluster per user operation [Uninstall]')
vmwNsxMFabricUninstallServiceFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 15)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricUninstallServiceFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricUninstallServiceFailed.setDescription('Error while notifying security solution before uninstall. Resolve to notify once again, or delete to uninstall without notification. Action required: Ensure that solution urls are accessible from NSX, and NSX is reachable from the solution. Use resolve API to resolve the Alarm. Service will be removed. Frequency of traps: Once per cluster per user operation [Uninstall]')
vmwNsxMFabricErrorNotifSecBfrUninstall = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 16)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricErrorNotifSecBfrUninstall.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricErrorNotifSecBfrUninstall.setDescription('Error while notifying security solution before uninstall. Resolve to notify once again, or delete to uninstall without notification. Action required: Ensure that solution urls are accessible from NSX, and NSX is reachable from the solution. Use resolve API to resolve the Alarm. Service will be removed. Frequency of traps: Once per cluster per user operation [Uninstall]')
vmwNsxMFabricServerRebootUninstall = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 17)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricServerRebootUninstall.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricServerRebootUninstall.setDescription('Server rebooted while security solution notification for uninstall was going on. Action required: Ensure that solution urls are accessible from NSX. Use resolve API to resolve the Alarm. Service will be uninstalled. Frequency of traps: Once per cluster per user operation [Uninstall]')
vmwNsxMFabricServerRebootUpgrade = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 18)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricServerRebootUpgrade.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricServerRebootUpgrade.setDescription('Server rebooted while security solution notification for upgrade was going on. Action required: Ensure that solution urls are accessible from NSX. Use resolve API to resolve the Alarm. Service will be redeployed. Frequency of traps: Once per cluster per user operation [Upgrade]')
vmwNsxMFabricConnEamFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 19)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricConnEamFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricConnEamFailed.setDescription('NSX Manager relies on the ESX Agent Manager service in VC for deploying/monitoring NSX vibs on ESX. The connection to this EAM service has gone down. This could be due to EAM service or VC restart/stop or an issue in the EAM service. Action required: In the NSX UI, traverse to Manage, then NSX Management Service. Verify that the status of VC connection on this page is Green. Use the VC IP to verify that EAM is UP by visiting https://<vc ip>/eam/mob. Frequency of traps: Once per switch from success to failed EAM connection')
vmwNsxMFabricConnEamRestored = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 20)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricConnEamRestored.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricConnEamRestored.setDescription('NSX Manager relies on the EAM service in VC for deploying/monitoring NSX vibs on ESX. The connection of NSX to this EAM service was re-established successfully. Action required: None Frequency of traps: Once per switch from failed to success EAM connection')
vmwNsxMFabricPreUninstallCleanUpFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 21)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricPreUninstallCleanUpFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricPreUninstallCleanUpFailed.setDescription('Pre Uninstall cleanup failed. Action required: Use resolve API to resolve the Alarm. Service will be removed. Frequency of traps: Once per cluster per user operation [Uninstall]')
vmwNsxMFabricBackingEamNotFound = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 22)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricBackingEamNotFound.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricBackingEamNotFound.setDescription('The backing EAM agency for this deployment could not be found. It is possible that the VC services may still be initializing. Please try to resolve the alarm to check existence of the agency. In case you have deleted the agency manually, please delete the deployment entry from NSX. Action required: Use resolve API to check existence of the agency, if backing agency exists in EAM, else delete the deployment entry from NSX. Frequency of traps: Once per cluster.')
vmwNsxMDepPlugin = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8))
if mibBuilder.loadTexts: vmwNsxMDepPlugin.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPlugin.setDescription('Notifications that are DeploymentPlugin related will have this OID prefix.')
vmwNsxMDepPluginPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8, 0))
if mibBuilder.loadTexts: vmwNsxMDepPluginPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPluginPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for DeploymentPlugin module.")
vmwNsxMDepPluginIpPoolExhausted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDepPluginIpPoolExhausted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPluginIpPoolExhausted.setDescription('When deploying Guest Introspection or other VM based service with static IP, NSX Manager needs to have a IP pool, for IP assignment to the VM. This pool has been exhausted, and new service VMs cannot be provisioned. Action required: Traverse to the Networking & Security page on VMWare vSphere Web Client, then go to Installation, followed by Service Deployments. Note the IP pool name for the failed service. Now traverse to NSX Managers, then go to Manage tab, followed by Grouping Objects sub-tab. Click on IP Pools, and add more Ips to the static IP pool. Use resolve API to resolve the Alarm. Service will be deployed. Frequency of traps: N times per cluster, where N is number of hosts in the cluster.')
vmwNsxMDepPluginGenericAlarm = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDepPluginGenericAlarm.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPluginGenericAlarm.setDescription('Deployment plugin generic alarm. Action required: Use resolve API to resolve the Alarm. Service will be deployed. Frequency of traps: N times per cluster, where N is number of hosts in the cluster.')
vmwNsxMDepPluginGenericException = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDepPluginGenericException.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPluginGenericException.setDescription('Deployment plugin generic exception alarm. Action required: Use resolve API to resolve the Alarm. Service will be deployed. Frequency of traps: N times per cluster, where N is number of hosts in the cluster.')
vmwNsxMDepPluginVmReboot = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDepPluginVmReboot.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPluginVmReboot.setDescription('VM needs to be rebooted for some changes to be made/take effect. Action required: Use resolve API to resolve the Alarm. Frequency of traps: N times per cluster, where N is number of hosts in the cluster.')
vmwNsxMMessaging = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9))
if mibBuilder.loadTexts: vmwNsxMMessaging.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessaging.setDescription('Notifications that are Messaging related will have this OID prefix.')
vmwNsxMMessagingPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0))
if mibBuilder.loadTexts: vmwNsxMMessagingPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Messaging module.")
vmwNsxMMessagingConfigFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingConfigFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingConfigFailed.setDescription('A notification generated when host messaging configuration failed.')
vmwNsxMMessagingReconfigFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingReconfigFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingReconfigFailed.setDescription('A notification generated when host messaging connection reconfiguration failed.')
vmwNsxMMessagingConfigFailedNotifSkip = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingConfigFailedNotifSkip.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingConfigFailedNotifSkip.setDescription('A notification generated when host messaging configuration failed and notifications were skipped.')
vmwNsxMMessagingInfraUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingInfraUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingInfraUp.setDescription('Manager runs a heartbeat with all hosts it manages. Missing heartbeat responses from a host indicate a communication issue between manager and the host. Such instances are indicated by event code 391002. When the communication is restored after such an instance, it is indicated by this event/trap. Action required: Refer to KB article https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2133897 Frequency of traps: Will be seen within 3 minutes of communication being restored between manager and a host. URL: https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2133897')
vmwNsxMMessagingInfraDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingInfraDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingInfraDown.setDescription('Manager runs a heartbeat with all hosts it manages. Missing heartbeat responses from a host indicate a communication issue between manager and the host. In the case of such a communication issue, this trap will be sent. Action required: Refer to KB article https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2133897 Frequency of traps: Will be seen within 6 minutes of a communication failure between manager and a host. URL: https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2133897')
vmwNsxMMessagingDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingDisabled.setDescription("A messaging client such as a Host, an Edge appliance or a USVM appliance is expected to change its password within 2 hours of being prepped or deployed. If the password isn't changed in this duration, the messaging account for the client is disabled. Action required: This event will indicate communication issue between the manager and the client. Verify if the client is running. If running, in case of a Host, re-sync messaging. In case of an Edge or a USVM, redeploy. Frequency of traps: Will be seen 2 hours after prep, host re-sync or deployment of appliance. URL: https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2133897")
vmwNsxMServiceComposer = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10))
if mibBuilder.loadTexts: vmwNsxMServiceComposer.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposer.setDescription('Notifications that are ServiceComposer related will have this OID prefix.')
vmwNsxMServiceComposerPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0))
if mibBuilder.loadTexts: vmwNsxMServiceComposerPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for ServiceComposer module.")
vmwNsxMServiceComposerPolicyOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerPolicyOutOfSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerPolicyOutOfSync.setDescription("Service Composer encountered an error while attempting to enforce rules on this Policy. Action required: Administrator needs to check the rules on the given Policy for any errors, as reported in the message. After fixing the rules in the Policy, user would need to resolve the alarm to bring this Policy back in sync. Policy's alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, if an error is encountered while enforcing the Policy.")
vmwNsxMServiceComposerPolicyDeleted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerPolicyDeleted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerPolicyDeleted.setDescription('A Policy got deleted as a result of the internal SecurityGroup, over which the Policy was created, got deleted. Frequency of traps: This event is generated once every time any internal SecurityGroup, that is being consumed by a policy, gets deleted.')
vmwNsxMServiceComposerFirewallPolicyOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerFirewallPolicyOutOfSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerFirewallPolicyOutOfSync.setDescription("Service Composer encountered an error while attempting to enforce Firewall rules on this Policy. Firewall related changes on this Policy will not take effect, until this alarm is resolved. Action required: Administrator needs to check the rules on the given Policy for any errors, as reported in the message. After fixing the rules in the Policy, user would need to resolve the alarm to bring this Policy back in sync. Policy's alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, if an error is encountered while enforcing the Policy.")
vmwNsxMServiceComposerNetworkPolicyOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerNetworkPolicyOutOfSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerNetworkPolicyOutOfSync.setDescription("Service Composer encountered an error while attempting to enforce Network Introspection rules on this Policy. Network Introspection related changes on this Policy will not take effect, until this alarm is resolved. Action required: Administrator needs to check the rules on the given Policy for any errors, as reported in the message. After fixing the rules in the Policy, user would need to resolve the alarm to bring this Policy back in sync. Policy's alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, if an error is encountered while enforcing the Policy.")
vmwNsxMServiceComposerGuestPolicyOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerGuestPolicyOutOfSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerGuestPolicyOutOfSync.setDescription("Service Composer encountered an error while attempting to enforce Guest Introspection rules on this Policy. Guest Introspection related changes on this Policy will not take effect, until this alarm is resolved. Action required: Administrator needs to check the rules on the given Policy for any errors, as reported in the message. After fixing the rules in the Policy, user would need to resolve the alarm to bring this Policy back in sync. Policy's alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, if an error is encountered while enforcing the Policy.")
vmwNsxMServiceComposerOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSync.setDescription('Service Composer encountered an error synchronizing Policies. Any changes on Service Composer will not be pushed to Firewall/Network Introspection Services, until this alarm is resolved. Action required: Administrator needs to check Policies and/or Firewall sections for any errors, as reported in the message. After fixing the errors, user would need to resolve the alarm to bring Service Composer back in sync. Alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, whenever an error is encountered.')
vmwNsxMServiceComposerOutOfSyncRebootFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncRebootFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncRebootFailure.setDescription('Service Composer encountered an error while synchronizing Policies on reboot. Action required: Administrator needs to check Policies and/or Firewall config for any errors, as reported in the message. After fixing the errors, user would need to resolve the alarm to bring Service Composer back in sync. Alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once on NSX Manager reboot, if an error is encountered.')
vmwNsxMServiceComposerOutOfSyncDraftRollback = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncDraftRollback.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncDraftRollback.setDescription('Service Composer went out of sync due to rollback of drafts from Firewall. Any changes on Service Composer will not be pushed to Firewall/Network Introspection Services, until this alarm is resolved. Action required: Administrator needs to resolve the alarm to bring Service Composer back in sync. Alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, whenever Firewall config is reverted to an older version of drafts.')
vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure.setDescription("Service Composer encountered an error while deleting the section corresponding to the Policy. This generally happens if third party(NetX) service's Manager is not reachable. Action required: Administrator needs to check connectivity with third party(NetX) service's Manager. Once the connectivity is restored, user would need to resolve the alarm. Alarm can either be resolved from Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once if a failure is encountered while deleting a Policy's section on Policy deletion.")
vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure.setDescription("Service Composer encountered an error reordering sections to reflect Policy's precedence change. This generally happens if there are Alarms on any other Policy. Action required: Administrator needs to check Policies and/or Firewall sections for any errors, as reported in the message. After fixing the errors, user would need to resolve the alarm. Alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once if a failure is encountered while reordering section to reflect precedence change.")
vmwNsxMServiceComposerOutOfSyncDraftSettingFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncDraftSettingFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncDraftSettingFailure.setDescription('Service Composer encountered an error while initializing auto save drafts setting. Action required: Administrator needs to check Policies and/or Firewall sections for any errors, as reported in the message. After fixing the errors, user would need to resolve the alarm. Alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once if a failure is encountered while initializing auto save drafts setting.')
vmwNsxMSvmOperations = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 11))
if mibBuilder.loadTexts: vmwNsxMSvmOperations.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSvmOperations.setDescription('Notifications that are SvmOperations related will have this OID prefix.')
vmwNsxMSvmOperationsPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 11, 0))
if mibBuilder.loadTexts: vmwNsxMSvmOperationsPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSvmOperationsPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for SvmOperations module.")
vmwNsxMInconsistentSvmAlarm = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 11, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMInconsistentSvmAlarm.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMInconsistentSvmAlarm.setDescription('Service VMs are deployed per ESX host, to provide functionality like guest introspection and McAfee/Trend virus checking in VMs on the host. An issue is detected with the state of the deployed Service VM. Follow instructions in http://kb.vmware.com/kb/2125482 to analyze the logs further. Warning: Resolving this alarm will delete the VM. After deletion you will see a different alarm saying VM is deleted. If you resolve same, it will reinstall the VM. If redeployment of the VM does not fix the original issue, the original alarm will be added back immediately. Action required: Use resolve API to resolve the Alarm. Frequency of traps: Once per host.')
vmwNsxMSvmRestartAlarm = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 11, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSvmRestartAlarm.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSvmRestartAlarm.setDescription('Service VMs are deployed per ESX host, to provide functionality like guest introspection and McAfee/Trend virus checking in VMs on the host. An issue is detected with the state of the deployed Service VM. Follow instructions in http://kb.vmware.com/kb/2125482 to analyze the logs further. Warning: Resolving this alarm will restart the VM. If the root cause here is not solved, the same alarm will be added back immediately. Action required: Use resolve API to resolve the Alarm. Frequency of traps: Once per host.')
vmwNsxMSvmAgentUnavailable = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 11, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSvmAgentUnavailable.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSvmAgentUnavailable.setDescription('An issue is detected while marking agent as available. Kindly check the logs. Resolving this alarm will attempt to mark the agent as available. Action required: Use resolve API to resolve the Alarm. Frequency of traps: Once per host.')
vmwNsxMTranslation = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 12))
if mibBuilder.loadTexts: vmwNsxMTranslation.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMTranslation.setDescription('Notifications that are Translation related will have this OID prefix.')
vmwNsxMTranslationPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 12, 0))
if mibBuilder.loadTexts: vmwNsxMTranslationPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMTranslationPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Translation module.")
vmwNsxMVmAddedToSg = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 12, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVmAddedToSg.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVmAddedToSg.setDescription('A VM has got added to the SecurityGroup. Frequency of traps: Once for every VM getting added to any SecurityGroup.')
vmwNsxMVmRemovedFromSg = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 12, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVmRemovedFromSg.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVmRemovedFromSg.setDescription('A VM has got removed from the SecurityGroup. Frequency of traps: Once for every VM getting removed from any SecurityGroup.')
vmwNsxMUniversalSync = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 13))
if mibBuilder.loadTexts: vmwNsxMUniversalSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUniversalSync.setDescription('Notifications that are UniversalSync related will have this OID prefix.')
vmwNsxMUniversalSyncPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 13, 0))
if mibBuilder.loadTexts: vmwNsxMUniversalSyncPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUniversalSyncPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for UniversalSync module.")
vmwNsxMFullUniversalSyncFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 13, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFullUniversalSyncFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFullUniversalSyncFailed.setDescription("A failure is encountered when doing full sync of universal objects on a secondary NSX manager. IP address of the secondary NSX manager is present in event's message variable. Action required: Kindly check NSX manager logs on the secondary NSX manager on which the full sync has failed. Frequency of traps: This trap is generated once per NSX manager on which full sync failure is seen.")
vmwNsxMSecondaryDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 13, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSecondaryDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSecondaryDown.setDescription("Secondary NSX manager is unreachable. Action required: Kindly check if NSX manager is running and is reachable from primary NSX manager. IP address of the secondary NSX manager is present in event's message variable. Frequency of traps: This trap is generated once per NSX manager for which connection issue is seen.")
vmwNsxMUniversalSyncFailedForEntity = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 13, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUniversalSyncFailedForEntity.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUniversalSyncFailedForEntity.setDescription("A failure is encountered when doing sync of universal object on a secondary NSX manager. IP address of the secondary NSX manager is present in event's message variable. Action required: Kindly check NSX manager logs on the secondary NSX manager on which the sync has failed. Frequency of traps: This trap is generated once per universal object on a NSX manager on which sync failure is seen.")
vmwNsxMAsyncRest = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 14))
if mibBuilder.loadTexts: vmwNsxMAsyncRest.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMAsyncRest.setDescription('Notifications that are AsyncRest related will have this OID prefix.')
vmwNsxMAsyncRestPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 14, 0))
if mibBuilder.loadTexts: vmwNsxMAsyncRestPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMAsyncRestPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for AsyncRest module.")
vmwNsxMServerUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 14, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServerUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServerUp.setDescription('Denotes that NSX manager server is up and in running state, Informs clients of NSX Manager of the current state. Action required: None Frequency of traps: Once for every query')
vmwNsxMExtensionRegistration = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 15))
if mibBuilder.loadTexts: vmwNsxMExtensionRegistration.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMExtensionRegistration.setDescription('Notifications that are ExtensionRegistration related will have this OID prefix.')
vmwNsxMExtensionRegistrationPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 15, 0))
if mibBuilder.loadTexts: vmwNsxMExtensionRegistrationPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMExtensionRegistrationPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for ExtensionRegistration module.")
vmwNsxMExtensionRegistered = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 15, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMExtensionRegistered.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMExtensionRegistered.setDescription('Registers NSX manager as a vCenter extenstion. This is applicable when no other NSX Manager is registered with vCenter and the current NSX manager is the one registering with vCenter. Action required: None Frequency of traps: Only once when the extension is registered for the very first time.')
vmwNsxMExtensionUpdated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 15, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMExtensionUpdated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMExtensionUpdated.setDescription('Updates the vCenter extension registration with the new NSX Manager. This is applicable when there already exists another NSX manager that is registered as a vCenter extension and the current one overwrites it. Action required: None Frequency of traps: Every time a NSX Manager registers as a vCenter extension when there already exists another NSX manager registered with vCenter')
vmwNsxMDlp = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 16))
if mibBuilder.loadTexts: vmwNsxMDlp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDlp.setDescription('Notifications that are Dlp related will have this OID prefix.')
vmwNsxMDlpPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 16, 0))
if mibBuilder.loadTexts: vmwNsxMDlpPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDlpPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Dlp module.")
vmwNsxMDataSecScanStarted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 16, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDataSecScanStarted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDataSecScanStarted.setDescription('A notification generated when NSX Data Security scan started on VirtualMachine.')
vmwNsxMDataSecScanEnded = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 16, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDataSecScanEnded.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDataSecScanEnded.setDescription('A notification generated when NSX Data Security scan ended on VirtualMachine.')
vmwNsxMSamSystem = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17))
if mibBuilder.loadTexts: vmwNsxMSamSystem.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamSystem.setDescription('Notifications that are SamSystem related will have this OID prefix.')
vmwNsxMSamSystemPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17, 0))
if mibBuilder.loadTexts: vmwNsxMSamSystemPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamSystemPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for SamSystem module.")
vmwNsxMSamDataCollectionEnabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSamDataCollectionEnabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamDataCollectionEnabled.setDescription('Service Activity Monitoring will start collecting data. Action required: None Frequency of traps: Event is triggered when SAM data collection state is toggled.')
vmwNsxMSamDataCollectionDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSamDataCollectionDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamDataCollectionDisabled.setDescription('Service Activity Monitoring will stop collecting data. Action required: SAM data collection can be enabled to start collectiing data. Frequency of traps: Event is triggered when SAM data collection state is toggled')
vmwNsxMSamDataStoppedFlowing = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSamDataStoppedFlowing.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamDataStoppedFlowing.setDescription('Service Activity Monitoring data stopped flowing from USVM Action required: Check the following - USVM log to see if heartbeats are recieved and sent - is the USVM running - is the Mux - USVM connection healthy - is the USVM - RMQ connection healthy - does the VM have endpoint driver installed Frequency of traps: Event is triggered when NSX Manager does not receives SAM data from USVM')
vmwNsxMSamDataResumedFlowing = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSamDataResumedFlowing.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamDataResumedFlowing.setDescription('Service Activity Monitoring data resumes flowing from USVM Action required: None Frequency of traps: Event is triggered when SAM data is received from USVM.')
vmwNsxMUsvm = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 18))
if mibBuilder.loadTexts: vmwNsxMUsvm.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUsvm.setDescription('Notifications that are Usvm related will have this OID prefix.')
vmwNsxMUsvmPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 18, 0))
if mibBuilder.loadTexts: vmwNsxMUsvmPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUsvmPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Usvm module.")
vmwNsxMUsvmHeartbeatStopped = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 18, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUsvmHeartbeatStopped.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUsvmHeartbeatStopped.setDescription("USVM stopped sending heartbeats to management plane. Action required: Connection to NSX Manager was lost. Check why the Manager didn't send a heartbeat. Frequency of traps: Event is triggered when NSX Manager does not receives heartbeats from USVM")
vmwNsxMUsvmHeartbeatResumed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 18, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUsvmHeartbeatResumed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUsvmHeartbeatResumed.setDescription('USVM will start sending heartbeats to management plane. Action required: None Frequency of traps: Event is triggered when NSX Manager receives heartbeats from USVM')
vmwNsxMUsvmReceivedHello = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 18, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUsvmReceivedHello.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUsvmReceivedHello.setDescription('USVM sent a HELLO message to Mux Action: None Frequency of traps: Event is triggered when Epsec Mux receives HELLO message from USVM during initial connection establishement.')
vmwNsxMVsmCore = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 19))
if mibBuilder.loadTexts: vmwNsxMVsmCore.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVsmCore.setDescription('Notifications that are VsmCore related will have this OID prefix.')
vmwNsxMVsmCorePrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 19, 0))
if mibBuilder.loadTexts: vmwNsxMVsmCorePrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVsmCorePrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for VsmCore module.")
vmwNsxMUpgradeSuccess = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 19, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUpgradeSuccess.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUpgradeSuccess.setDescription('A notification generated when NSX Manager upgraded successfully.')
vmwNsxMRestoreSuccess = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 19, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMRestoreSuccess.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMRestoreSuccess.setDescription('A notification generated when NSX Manager restored successfully.')
vmwNsxMDuplicateIp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 19, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDuplicateIp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDuplicateIp.setDescription('The NSX Manager IP has been assigned to another machine Action: None Frequency: This is triggered whenever NSX Manager detects that its IP address is being used by another machine in the same network')
vmwNsxMVxlan = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20))
if mibBuilder.loadTexts: vmwNsxMVxlan.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlan.setDescription('Notifications that are Vxlan related will have this OID prefix.')
vmwNsxMVxlanPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0))
if mibBuilder.loadTexts: vmwNsxMVxlanPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Vxlan module.")
vmwNsxMVxlanLogicalSwitchImproperlyCnfg = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchImproperlyCnfg.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchImproperlyCnfg.setDescription('This event is triggered if one or more distributed virtual port groups backing a certain Logical Switch were modified and/or removed. Or if migration of Control plane mode for a Logical Switch/Transport Zone failed. Action required: (1) If the event was triggered due to deletion/modification of backing distributed virtual port groups, then the error will be visible on Logical Switch UI page. Resolve from there will try and create missing distributed virtual port groups for the Logical Switch. (2) If event was triggered due to failure of Control plan mode migration, redo the migration for that Logical Switch or Transport Zone. Frequency of traps: Event is triggered due to user actions as explained in description. Affects: Logical Switch network traffic.')
vmwNsxMVxlanLogicalSwitchProperlyCnfg = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchProperlyCnfg.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchProperlyCnfg.setDescription('Logical Switch status has been marked good, most probably as result of resolving any errors on it. Action required: None Frequency of traps: Event is triggered when user resolves the Logical Switch error and as a result missing backing distributed virtual port groups are recreated.')
vmwNsxMVxlanInitFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanInitFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanInitFailed.setDescription("Failed to configure vmknic as a VTEP, VXLAN traffic through this interface will be dropped until this is resolved. Action required: Check the host's vmkernel.log for more details. Frequency of traps: Every time a VTEP vmknic tries to connect to it's Distributed Virtual Port. Affects: VXLAN traffic on the affected Host.")
vmwNsxMVxlanPortInitFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanPortInitFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanPortInitFailed.setDescription("Failed to configure VXLAN on the Distributed Virtual Port, the port will be disconnected. Action required: Check the host's vmkernel.log for more details. Frequency of traps: Every time a VXLAN vNic tries to connect to it's Distributed Virtual Port on the host. Affects: VXLAN traffic on the affected Host.")
vmwNsxMVxlanInstanceDoesNotExist = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanInstanceDoesNotExist.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanInstanceDoesNotExist.setDescription("VXLAN configuration was received for a Distributed Virtual Port, but the host has not yet enabled VXLAN on the vSphere Distributed Switch. VXLAN ports on affected Host will fail to connect until resolved. Action required: See KB 2107951 (https://kb.vmware.com/selfservice/microsites/search.do?cmd=displayKC&docType=kc&externalId=2107951&sliceId=1&docTypeID=DT_KB_1_1&dialogID=40732862&stateId=0%200%2040754197) Frequency of traps: Every time any VXLAN related port (vNic or vmknic) tries to connect to it's Distributed Virtual Port on the host. Affects: VXLAN Traffic on that Host.")
vmwNsxMVxlanLogicalSwitchWrkngImproperly = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchWrkngImproperly.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchWrkngImproperly.setDescription("VTEP interface was unable to join the specified multicast address, the VTEP will be unable to receive some traffic from other hosts until this is resolved. The host will periodically retry joining the group until it is successful. Action required: Check the host's vmkernel.log for more details. Frequency of traps: NSX retries joining failed mcast groups every 5 seconds. Affects: Logical Switch associated with problem VTEP interface won't work properly.")
vmwNsxMVxlanTransportZoneIncorrectlyWrkng = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanTransportZoneIncorrectlyWrkng.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanTransportZoneIncorrectlyWrkng.setDescription('The IP address of a VTEP vmknic has changed. Action required: None. Frequency of traps: Every time a VTEP IP changes')
vmwNsxMVxlanTransportZoneNotUsed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanTransportZoneNotUsed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanTransportZoneNotUsed.setDescription("VTEP vmknic does not have a valid IP address assigned, all VXLAN traffic through this vmknic will be dropped. Action required: Verify the IP configuration for the interface, and the DHCP server if DHCP is used. Frequency of traps: Once per VTEP loosing it's IP address.")
vmwNsxMVxlanOverlayClassMissingOnDvs = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanOverlayClassMissingOnDvs.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanOverlayClassMissingOnDvs.setDescription('NSX packages where not installed prior to DVS configuration for VXLAN. All VXLAN ports will fail to connect until resolved. Action required: See KB 2107951 https://kb.vmware.com/selfservice/microsites/search.do?cmd=displayKC&docType=kc&externalId=2107951&sliceId=1&docTypeID=DT_KB_1_1&dialogID=40732862&stateId=0%200%2040754197 Frequency of traps: Once per setting of the com.vmware.netoverlay.layer0=vxlan opaque property or whenver the host is configured for vxlan or Host reconnects to VCEnter and host has some problem. Affects: VXLAN Traffic for that Host will be affected.')
vmwNsxMVxlanControllerRemoved = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanControllerRemoved.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanControllerRemoved.setDescription('A notification generated when VXLAN Controller has been removed due to the connection cant be built, please check controller IP configuration and deploy again.')
vmwNsxMVxlanControllerConnProblem = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanControllerConnProblem.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanControllerConnProblem.setDescription('NSX manager detected the connection between two controller nodes is broken. Action required: It is a warning event, users need to check the controller cluster for the further steps. Check following KB 2127655 https://kb.vmware.com/selfservice/microsites/search.do?cmd=displayKC&docType=kc&externalId=2127655&sliceId=1&docTypeID=DT_KB_1_1&dialogID=40732913&stateId=0%200%2040754965 to see if issue matches. Frequency of traps: Whenever the controller reports the issue. Affects: Networking might get affected.')
vmwNsxMVxlanControllerInactive = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 12)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanControllerInactive.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanControllerInactive.setDescription("Host Certification information couldn't be sent to NSX Controllers. Action required: Ensure that NSX Controller cluster is in healthy state before preparing a new Host. Invoke Controller Sync API to try and rectify this error. Frequency of traps: When a new host is prepared for NSX networking. Affects: Newly prepared Host. Communication channel between Host and NSX Controllers might have issues.")
vmwNsxMVxlanControllerActive = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 13)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanControllerActive.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanControllerActive.setDescription('A notification generated when Controller cluster state is now active. Controller Synchronization job is in progress. Frequency of traps: Controller cluster becomes active again from a previous inactive state. Action required: User doesnt have to take any corrective action. NSX will auto-sync the controllers.')
vmwNsxMVxlanVmknicMissingOrDeleted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 14)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicMissingOrDeleted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicMissingOrDeleted.setDescription('VXLAN vmknic is missing or deleted from host. Action required: Issue can be resolved from Logical Network Preparation - VXLAN Transport UI section. Clicking on resolve will try to rectify the issue. Frequency of traps: First time NSX Manager finds that VXLAN vmknic is missing or deleted from Host. Affects: VXLAN Traffic to/from the mentioned Host will be affected.')
vmwNsxMVxlanInfo = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 15)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanInfo.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanInfo.setDescription('NSX Manager will raise this event when connection between either of the following component is established/re-established (i) connection between NSX Manager and Host Firewall agent. (ii) connection between NSX Manager and Control Plane Agent. (iii) connection between Control Plane Agent to Controllers. Action required: None Frequency of traps: NSX Manager will raise this event when connection between either of the following component is established/re-established (i) connection between NSX Manager and Host Firewall agent. (ii) connection between NSX Manager and Control Plane Agent (iii) connection between Control Plane Agent to Controllers.')
vmwNsxMVxlanVmknicPortGrpMissing = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 16)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicPortGrpMissing.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicPortGrpMissing.setDescription('NSX manager detected one vxlan vmknic is missing on VC. Action required: Check the host, if that vmknic is deleted, click on the resolve button on UI, or call the remediate API (POST /api/2.0/vdn/config/host/{hostId}/vxlan/vteps?action=remediate) to recreate the vxlan vmknic. Frequency of traps: First time when vxlan vmknic is detected missing (manually deleted by user or inventory report the incorrect information) Affects: The VXLAN traffic on that host may be interrupted.')
vmwNsxMVxlanVmknicPortGrpAppears = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 17)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicPortGrpAppears.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicPortGrpAppears.setDescription('NSX manager detected one vxlan vmknic that was marked as missing has now reappeared on VC. Action required: None Frequency of traps: When that missing vmknic re-appears again. Affects: The VXLAN traffic on that host may be resumed.')
vmwNsxMVxlanConnDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 18)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanConnDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanConnDown.setDescription('This event is triggered when either of the following connections are detected down by NSX Manager: (i) connection between NSX Manager and Host Firewall agent. (ii) connection between NSX Manager and Control Plane Agent. (iii) connection between Control Plane Agent to Controllers. Action required: (i) If NSX Manager to Host Firewall Agent connection is down, check NSX Manager and Firewall Agent logs to get error details. You can try Fabric Synchronize API to try and retificy this issue. (ii) If NSX Manager to Control Plane Agent connection is down, please check NSX Manager and Control Plane Agent logs to get the error detail, check whether the Control Plane Agent process is down. (iii) If Control Plane Agent to Controllers connection is down, please go to UI Installation page to check the connection status for crossponding Host. Frequency of traps: When (i) NSX Manager looses connection with Firewall agent on host or (ii) NSX Manager losses connection with Control plane agent on host or (iii) Control plane agent on Host looses connection with NSX Controllers. Affects: VMs on that Host might get affected.')
vmwNsxMBackingPortgroupMissing = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 19)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMBackingPortgroupMissing.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMBackingPortgroupMissing.setDescription('NSX manager detected one backing portgroup of a logical switch is missing on VCenter. Action required: Click on the resolve button on UI or call the API (POST https://<nsx-manager-host>/api/2.0/vdn/virtualwires/<virtualwireId>/backing?action=remediate) to recreate that backing portgroup. Frequency of traps: Whenever logical switch backing portgroup is missing on VC. Affects: VMs cannot be connected to this Logical Switch.')
vmwNsxMBackingPortgroupReappears = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 20)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMBackingPortgroupReappears.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMBackingPortgroupReappears.setDescription('NSX manager detected one backing portgroup of a logical switch that was missing reappears on VC. Action required: None Frequency of traps: Whenever user triggered remediate API on Logical Switch which has missing backing portgroup.')
vmwNsxMManagedObjectIdChanged = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 21)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMManagedObjectIdChanged.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMManagedObjectIdChanged.setDescription('NSX manager detected the Managed Objectid of one backing portgroup of a logical switch changed. Action required: None Frequnecy of traps: This typically happens when user restores a backup of Logical Switch backing portgroup.')
vmwNsxMHighLatencyOnDisk = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 22)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMHighLatencyOnDisk.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMHighLatencyOnDisk.setDescription('NSX manager detected some disk on a NSX Controller has high latency. Action required: Rectify the issue on specified device and controller. Frequency of traps: First time NSX detected this issue as reported by Controller. When this issue gets resolved another Informational event will be raised by NSX Manager indicating the same. Affects: NSX Controller.')
vmwNsxMHighLatencyOnDiskResolved = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 23)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMHighLatencyOnDiskResolved.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMHighLatencyOnDiskResolved.setDescription('NSX manager detected the disk high latency alert on a some disk on a NSX Controller has been resolved. Frequency of traps: First time NSX detected, previously raised disk latency issue has been resolved.')
vmwNsxMControllerVmPoweredOff = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 24)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMControllerVmPoweredOff.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMControllerVmPoweredOff.setDescription("NSX manager detected a Controller Virtual Machine is powered off from vCenter. Action required: Click on the 'Resolve' button on Controller page on UI or call the API (POST https://<nsx-manager-host>/api/2.0/vdn/controller/{controllerId}?action=remediate) to power on the Controller Virtual Machine. Frequency of traps: This event wil be raised when controller Virtual Machine is powered off from vCenter. Affects: Controller cluster status might go to disconnected if a controller Virtual Machine is powered off. Any operation that requires an active Controller Cluster may be affected.")
vmwNsxMControllerVmDeleted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 25)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMControllerVmDeleted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMControllerVmDeleted.setDescription("NSX manager detected a Controller Virtual Machine is deleted from vCenter. Action required: Click on the Resolve button on Controller page on UI or call the API (POST https://<nsx-manager-host>/api/2.0/vdn/controller/{controllerId}?action=remediate) to clean up NSX manager's database state. Frequency of traps: This event will be raised when Controller Virtual Machine is deleted from vCenter. Affects: Controller cluster status might go to disconnected if a controller Virtual Machine is powered off. Any operation that requires an active Controller Cluster may be affected.")
vmwNsxMVxlanConfigNotSet = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 26)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanConfigNotSet.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanConfigNotSet.setDescription('NSX manager detected the VXLAN configuration is not set on the host (would-block issue). And this event indicates NSX Manager tried to rectify this issue by resending the VXLAN configuration on Host. Action required: See KB 2107951 https://kb.vmware.com/selfservice/microsites/search.do?cmd=displayKC&docType=kc&externalId=2107951&sliceId=1&docTypeID=DT_KB_1_1&dialogID=40732862&stateId=0%200%2040754197 for more information. Frequency of traps: This event will generate when host preparation task is triggered for a host and Host encounters would-block issue. Affects: It is a notification, no specific guide for the next step.')
vmwNsxManagerMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99))
vmwNsxManagerMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99, 1))
vmwNsxManagerMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99, 2))
vmwNsxManagerMIBBasicCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99, 1, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxManagerNotificationInfoGroup1"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxManagerNotificationGroup1"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
vmwNsxManagerMIBBasicCompliance = vmwNsxManagerMIBBasicCompliance.setStatus('current')
if mibBuilder.loadTexts: vmwNsxManagerMIBBasicCompliance.setDescription('The compliance statement for entities which implement VMWARE-NSX-MANAGER-MIB.')
vmwNsxManagerNotificationInfoGroup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99, 2, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMCount"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
vmwNsxManagerNotificationInfoGroup1 = vmwNsxManagerNotificationInfoGroup1.setStatus('current')
if mibBuilder.loadTexts: vmwNsxManagerNotificationInfoGroup1.setDescription('These objects provide details in NSX Manager notifications.')
vmwNsxManagerNotificationGroup1 = NotificationGroup((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99, 2, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMConfigGroup"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpAddedBlackList"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpRemovedBlackList"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSsoConfigFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSsoUnconfigured"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUserRoleAssigned"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUserRoleUnassigned"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGroupRoleAssigned"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGroupRoleUnassigned"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVcLoginFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVcDisconnected"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLostVcConnectivity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFltrCnfgUpdateFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFltrCnfgNotAppliedToVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFltrCnfgAppliedToVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFltrCreatedForVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFltrDeletedForVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallConfigUpdateFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallRuleFailedVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallRuleAppliedVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMCntnrCnfgUpdateFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFlowMissed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardCnfgUpdateFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardApplied"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardDisableFail"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLegacyAppServiceDeletionFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallCpuThresholdCrossed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallMemThresholdCrossed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMConnPerSecThrshldCrossed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallCnfgUpdateTimedOut"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardCnfgUpdateTmOut"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallPublishFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMCntnrUpdatePublishFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardUpdatePublishFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMExcludeListPublishFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallCnfgUpdateOnDltCntnr"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMHostSyncFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMHostSynced"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallInstalled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallClusterInstalled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallClusterUninstalled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallClusterDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeNoVmServing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeGatewayCreated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmBadState"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmCommFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmCnfgChanged"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeGatewayDeleted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeGatewayReDeployed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmPowerOff"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeApplianceSizeChanged"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeUpgrade51x"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeLicenseChanged"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeApplianceMoved"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeApplianceNotFound"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVMHealthCheckMiss"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHealthCheckMiss"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeCommAgentNotConnected"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMApplianceWithDifferentId"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallRuleModified"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeAntiAffinityRuleViolated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaEnabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeGatewayRecovered"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmRecovered"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeGatewayUpgraded"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmHlthChkDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgePrePublishFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeForcedSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmBooted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmInBadState"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmCpuUsageIncreased"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmMemUsageIncreased"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmProcessFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmSysTimeBad"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmSysTimeSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeAesniCryptoEngineUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeAesniCryptoEngineDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmOom"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeFileSysRo"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaCommDisconnected"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaSwitchOverSelf"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaSwitchOverActive"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaSwitchOverStandby"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeMonitorProcessFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLbVirtualServerPoolUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLbVirtualServerPoolDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLbVirtualServerPoolWrong"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLbPoolWarning"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecChannelUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecChannelDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecTunnelUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecTunnelDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecChannelUnknown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecTunnelUnknown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbMemberUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbMemberWarning"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbMemberDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbMemberUnknown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbPeerUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbPeerDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDhcpServiceDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEndpointThinAgentEnabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGuestIntrspctnEnabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGuestIntrspctnIncompatibleEsx"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGuestIntrspctnEsxConnFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGuestIntrspctnStatusRcvFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEsxModuleEnabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEsxModuleUninstalled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGuestIntrspctnHstMxMssngRep"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEndpointUndefined"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEamGenericAlarm"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDplymntStatusChanged"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDplymntUnitCreated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDplymntUnitUpdated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDplymntUnitDestroyed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDataStoreNotCnfgrdOnHost"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDplymntInstallationFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricAgentCreated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricAgentDestroyed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricSrvceNeedsRedplymnt"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUpgradeOfDplymntFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDependenciesNotInstalled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricErrorNotifSecBfrUpgrade"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricErrCallbackNtRcvdUpgrade"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricErrCallbackNtRcvdUninstall"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricUninstallServiceFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricErrorNotifSecBfrUninstall"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricServerRebootUninstall"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricServerRebootUpgrade"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricConnEamFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricConnEamRestored"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricPreUninstallCleanUpFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricBackingEamNotFound"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDepPluginIpPoolExhausted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDepPluginGenericAlarm"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDepPluginGenericException"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDepPluginVmReboot"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingConfigFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingReconfigFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingConfigFailedNotifSkip"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingInfraUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingInfraDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerPolicyOutOfSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerPolicyDeleted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMInconsistentSvmAlarm"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSvmRestartAlarm"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSvmAgentUnavailable"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVmAddedToSg"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVmRemovedFromSg"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFullUniversalSyncFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSecondaryDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUniversalSyncFailedForEntity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServerUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMExtensionRegistered"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMExtensionUpdated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDataSecScanStarted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDataSecScanEnded"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataCollectionEnabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataCollectionDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataStoppedFlowing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataResumedFlowing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUsvmHeartbeatStopped"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUsvmHeartbeatResumed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUsvmReceivedHello"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUpgradeSuccess"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMRestoreSuccess"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanLogicalSwitchImproperlyCnfg"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanLogicalSwitchProperlyCnfg"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanInitFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanPortInitFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanInstanceDoesNotExist"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanLogicalSwitchWrkngImproperly"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanTransportZoneIncorrectlyWrkng"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanTransportZoneNotUsed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanOverlayClassMissingOnDvs"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerRemoved"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerConnProblem"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerInactive"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerActive"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanVmknicMissingOrDeleted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanInfo"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanVmknicPortGrpMissing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanVmknicPortGrpAppears"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanConnDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataCollectionDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataStoppedFlowing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataResumedFlowing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanOverlayClassMissingOnDvs"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerRemoved"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerConnProblem"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerInactive"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSsoDisconnected"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallInstallFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallForceSyncClusterFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallForceSyncClusterSuccess"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallVsfwdProcessStarted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeResourceReservationFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeSplitBrainDetected"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeSplitBrainRecovered"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeSplitBrainRecoveryAttempt"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerFirewallPolicyOutOfSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerNetworkPolicyOutOfSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerGuestPolicyOutOfSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSyncRebootFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSyncDraftRollback"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSyncDraftSettingFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMBackingPortgroupMissing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMBackingPortgroupReappears"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMManagedObjectIdChanged"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMHighLatencyOnDisk"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMHighLatencyOnDiskResolved"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMControllerVmPoweredOff"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMControllerVmDeleted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanConfigNotSet"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSnmpDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSnmpManagerConfigUpdated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDuplicateIp"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
vmwNsxManagerNotificationGroup1 = vmwNsxManagerNotificationGroup1.setStatus('current')
if mibBuilder.loadTexts: vmwNsxManagerNotificationGroup1.setDescription('Group of objects describing notifications (traps, informs).')
mibBuilder.exportSymbols("VMWARE-NSX-MANAGER-MIB", vmwNsxManagerNotificationGroup1=vmwNsxManagerNotificationGroup1, vmwNsxMIpsecChannelUp=vmwNsxMIpsecChannelUp, vmwNsxMIpsecChannelUnknown=vmwNsxMIpsecChannelUnknown, vmwNsxMGlobalLbMemberUp=vmwNsxMGlobalLbMemberUp, vmwNsxMMessagingInfraDown=vmwNsxMMessagingInfraDown, vmwNsxMFabric=vmwNsxMFabric, vmwNsxMEdgeSplitBrainRecovered=vmwNsxMEdgeSplitBrainRecovered, vmwNsxMFirewallForceSyncClusterSuccess=vmwNsxMFirewallForceSyncClusterSuccess, vmwNsxMFabricDplymntUnitUpdated=vmwNsxMFabricDplymntUnitUpdated, vmwNsxMMessaging=vmwNsxMMessaging, vmwNsxMLbPoolWarning=vmwNsxMLbPoolWarning, vmwNsxMHostSynced=vmwNsxMHostSynced, vmwNsxMVcLoginFailed=vmwNsxMVcLoginFailed, vmwNsxMVxlanConnDown=vmwNsxMVxlanConnDown, vmwNsxManagerMIBBasicCompliance=vmwNsxManagerMIBBasicCompliance, vmwNsxMEdgeVmBadState=vmwNsxMEdgeVmBadState, vmwNsxMEventMessage=vmwNsxMEventMessage, vmwNsxMFirewallMemThresholdCrossed=vmwNsxMFirewallMemThresholdCrossed, vmwNsxMFabricErrorNotifSecBfrUninstall=vmwNsxMFabricErrorNotifSecBfrUninstall, vmwNsxMServiceComposerNetworkPolicyOutOfSync=vmwNsxMServiceComposerNetworkPolicyOutOfSync, vmwNsxMDepPluginGenericException=vmwNsxMDepPluginGenericException, vmwNsxMFabricServerRebootUpgrade=vmwNsxMFabricServerRebootUpgrade, vmwNsxMEdgeVmCnfgChanged=vmwNsxMEdgeVmCnfgChanged, vmwNsxMExcludeListPublishFailed=vmwNsxMExcludeListPublishFailed, vmwNsxMVxlanInitFailed=vmwNsxMVxlanInitFailed, vmwNsxMEdgeGatewayUpgraded=vmwNsxMEdgeGatewayUpgraded, vmwNsxMVxlanVmknicPortGrpAppears=vmwNsxMVxlanVmknicPortGrpAppears, vmwNsxMTranslationPrefix=vmwNsxMTranslationPrefix, vmwNsxMEamGenericAlarm=vmwNsxMEamGenericAlarm, vmwNsxManagerMIBConformance=vmwNsxManagerMIBConformance, vmwNsxMDuplicateIp=vmwNsxMDuplicateIp, vmwNsxMBranch=vmwNsxMBranch, vmwNsxMVxlanConfigNotSet=vmwNsxMVxlanConfigNotSet, vmwNsxMLegacyAppServiceDeletionFailed=vmwNsxMLegacyAppServiceDeletionFailed, vmwNsxMEdgeFileSysRo=vmwNsxMEdgeFileSysRo, vmwNsxMGuestIntrspctnStatusRcvFailed=vmwNsxMGuestIntrspctnStatusRcvFailed, vmwNsxMFlowMissed=vmwNsxMFlowMissed, vmwNsxMEventCode=vmwNsxMEventCode, vmwNsxMSecurityPrefix=vmwNsxMSecurityPrefix, vmwNsxMEdgeApplianceNotFound=vmwNsxMEdgeApplianceNotFound, vmwNsxMGlobalLbPeerUp=vmwNsxMGlobalLbPeerUp, vmwNsxMFabricErrorNotifSecBfrUpgrade=vmwNsxMFabricErrorNotifSecBfrUpgrade, vmwNsxMMessagingConfigFailed=vmwNsxMMessagingConfigFailed, vmwNsxMVsmCorePrefix=vmwNsxMVsmCorePrefix, vmwNsxMVxlanControllerConnProblem=vmwNsxMVxlanControllerConnProblem, vmwNsxManagerMIBCompliances=vmwNsxManagerMIBCompliances, vmwNsxMFirewallClusterDisabled=vmwNsxMFirewallClusterDisabled, vmwNsxMSpoofGuardApplied=vmwNsxMSpoofGuardApplied, vmwNsxMFirewallCnfgUpdateTimedOut=vmwNsxMFirewallCnfgUpdateTimedOut, vmwNsxManagerMIB=vmwNsxManagerMIB, vmwNsxMEdgeHaCommDisconnected=vmwNsxMEdgeHaCommDisconnected, vmwNsxMVxlan=vmwNsxMVxlan, vmwNsxMFabricAgentDestroyed=vmwNsxMFabricAgentDestroyed, vmwNsxMVxlanInfo=vmwNsxMVxlanInfo, vmwNsxMFirewallRuleFailedVnic=vmwNsxMFirewallRuleFailedVnic, vmwNsxMServiceComposerFirewallPolicyOutOfSync=vmwNsxMServiceComposerFirewallPolicyOutOfSync, vmwNsxMEdgePrePublishFailed=vmwNsxMEdgePrePublishFailed, vmwNsxMLbVirtualServerPoolDown=vmwNsxMLbVirtualServerPoolDown, VmwNsxManagerTypeSeverity=VmwNsxManagerTypeSeverity, vmwNsxMSvmOperations=vmwNsxMSvmOperations, vmwNsxMSvmAgentUnavailable=vmwNsxMSvmAgentUnavailable, vmwNsxMEdgeAesniCryptoEngineUp=vmwNsxMEdgeAesniCryptoEngineUp, vmwNsxMSecurity=vmwNsxMSecurity, vmwNsxMEamPrefix=vmwNsxMEamPrefix, vmwNsxMEndpoint=vmwNsxMEndpoint, vmwNsxMEam=vmwNsxMEam, vmwNsxMGlobalLbPeerDown=vmwNsxMGlobalLbPeerDown, vmwNsxMEdgeAntiAffinityRuleViolated=vmwNsxMEdgeAntiAffinityRuleViolated, vmwNsxMDlpPrefix=vmwNsxMDlpPrefix, vmwNsxMEdgeApplianceMoved=vmwNsxMEdgeApplianceMoved, vmwNsxMApplianceWithDifferentId=vmwNsxMApplianceWithDifferentId, vmwNsxMGuestIntrspctnEnabled=vmwNsxMGuestIntrspctnEnabled, vmwNsxMEdgeApplianceSizeChanged=vmwNsxMEdgeApplianceSizeChanged, vmwNsxMFabricErrCallbackNtRcvdUninstall=vmwNsxMFabricErrCallbackNtRcvdUninstall, vmwNsxMEndpointUndefined=vmwNsxMEndpointUndefined, vmwNsxMCntnrCnfgUpdateFailed=vmwNsxMCntnrCnfgUpdateFailed, vmwNsxMEdgeVmOom=vmwNsxMEdgeVmOom, vmwNsxMHostSyncFailed=vmwNsxMHostSyncFailed, vmwNsxMMessagingDisabled=vmwNsxMMessagingDisabled, vmwNsxMControllerVmPoweredOff=vmwNsxMControllerVmPoweredOff, vmwNsxMFirewallCpuThresholdCrossed=vmwNsxMFirewallCpuThresholdCrossed, vmwNsxMUsvmReceivedHello=vmwNsxMUsvmReceivedHello, vmwNsxMBackingPortgroupMissing=vmwNsxMBackingPortgroupMissing, vmwNsxManagerMIBGroups=vmwNsxManagerMIBGroups, vmwNsxMEdgeVmRecovered=vmwNsxMEdgeVmRecovered, vmwNsxMEdgeHaSwitchOverSelf=vmwNsxMEdgeHaSwitchOverSelf, vmwNsxMUniversalSync=vmwNsxMUniversalSync, vmwNsxMGroupsPrefix=vmwNsxMGroupsPrefix, vmwNsxMManagedObjectIdChanged=vmwNsxMManagedObjectIdChanged, vmwNsxMServiceComposerPolicyDeleted=vmwNsxMServiceComposerPolicyDeleted, vmwNsxMSvmRestartAlarm=vmwNsxMSvmRestartAlarm, vmwNsxMFabricServerRebootUninstall=vmwNsxMFabricServerRebootUninstall, vmwNsxMSpoofGuardFailed=vmwNsxMSpoofGuardFailed, vmwNsxMEdgeMonitorProcessFailure=vmwNsxMEdgeMonitorProcessFailure, vmwNsxMFabricSrvceNeedsRedplymnt=vmwNsxMFabricSrvceNeedsRedplymnt, vmwNsxMRestoreSuccess=vmwNsxMRestoreSuccess, vmwNsxMSnmp=vmwNsxMSnmp, vmwNsxMSecondaryDown=vmwNsxMSecondaryDown, vmwNsxMGroupRoleAssigned=vmwNsxMGroupRoleAssigned, vmwNsxMEdgeAesniCryptoEngineDown=vmwNsxMEdgeAesniCryptoEngineDown, vmwNsxMEsxModuleEnabled=vmwNsxMEsxModuleEnabled, vmwNsxMServiceComposerOutOfSyncRebootFailure=vmwNsxMServiceComposerOutOfSyncRebootFailure, vmwNsxMFabricUninstallServiceFailed=vmwNsxMFabricUninstallServiceFailed, vmwNsxMIpRemovedBlackList=vmwNsxMIpRemovedBlackList, vmwNsxMEdgeVmSysTimeBad=vmwNsxMEdgeVmSysTimeBad, vmwNsxMEdgeHaDisabled=vmwNsxMEdgeHaDisabled, vmwNsxMUserRoleAssigned=vmwNsxMUserRoleAssigned, vmwNsxMLostVcConnectivity=vmwNsxMLostVcConnectivity, vmwNsxMFltrCnfgNotAppliedToVnic=vmwNsxMFltrCnfgNotAppliedToVnic, vmwNsxMFabricErrCallbackNtRcvdUpgrade=vmwNsxMFabricErrCallbackNtRcvdUpgrade, vmwNsxMVxlanTransportZoneNotUsed=vmwNsxMVxlanTransportZoneNotUsed, vmwNsxMVxlanVmknicPortGrpMissing=vmwNsxMVxlanVmknicPortGrpMissing, vmwNsxMEdgeGatewayReDeployed=vmwNsxMEdgeGatewayReDeployed, vmwNsxMEdgeHaSwitchOverStandby=vmwNsxMEdgeHaSwitchOverStandby, vmwNsxMGroupsBranch=vmwNsxMGroupsBranch, vmwNsxMSpoofGuardUpdatePublishFailed=vmwNsxMSpoofGuardUpdatePublishFailed, vmwNsxMExtensionRegistrationPrefix=vmwNsxMExtensionRegistrationPrefix, vmwNsxMHighLatencyOnDisk=vmwNsxMHighLatencyOnDisk, vmwNsxMServiceComposerOutOfSync=vmwNsxMServiceComposerOutOfSync, vmwNsxMUpgradeSuccess=vmwNsxMUpgradeSuccess, vmwNsxMServiceComposerOutOfSyncDraftRollback=vmwNsxMServiceComposerOutOfSyncDraftRollback, vmwNsxMTranslation=vmwNsxMTranslation, vmwNsxMEdgeLicenseChanged=vmwNsxMEdgeLicenseChanged, vmwNsxMVxlanControllerRemoved=vmwNsxMVxlanControllerRemoved, vmwNsxMDhcpServiceDisabled=vmwNsxMDhcpServiceDisabled, vmwNsxMFabricConnEamFailed=vmwNsxMFabricConnEamFailed, vmwNsxMVxlanOverlayClassMissingOnDvs=vmwNsxMVxlanOverlayClassMissingOnDvs, PYSNMP_MODULE_ID=vmwNsxManagerMIB, vmwNsxMEdgeSplitBrainRecoveryAttempt=vmwNsxMEdgeSplitBrainRecoveryAttempt, vmwNsxMEventComponent=vmwNsxMEventComponent, vmwNsxMVxlanPrefix=vmwNsxMVxlanPrefix, vmwNsxMEdgeForcedSync=vmwNsxMEdgeForcedSync, vmwNsxMIpsecTunnelUnknown=vmwNsxMIpsecTunnelUnknown, vmwNsxMUpgradeOfDplymntFailed=vmwNsxMUpgradeOfDplymntFailed, vmwNsxMUserRoleUnassigned=vmwNsxMUserRoleUnassigned, vmwNsxMSamDataCollectionDisabled=vmwNsxMSamDataCollectionDisabled, vmwNsxMEventTimestamp=vmwNsxMEventTimestamp, vmwNsxMGuestIntrspctnHstMxMssngRep=vmwNsxMGuestIntrspctnHstMxMssngRep, vmwNsxMVxlanPortInitFailed=vmwNsxMVxlanPortInitFailed, vmwNsxMSsoDisconnected=vmwNsxMSsoDisconnected, vmwNsxMSamSystem=vmwNsxMSamSystem, vmwNsxMUsvm=vmwNsxMUsvm, vmwNsxMSnmpManagerConfigUpdated=vmwNsxMSnmpManagerConfigUpdated, vmwNsxMExtensionUpdated=vmwNsxMExtensionUpdated, vmwNsxMBackingPortgroupReappears=vmwNsxMBackingPortgroupReappears, vmwNsxMMessagingInfraUp=vmwNsxMMessagingInfraUp, vmwNsxMEdgeVmProcessFailure=vmwNsxMEdgeVmProcessFailure, vmwNsxMEndpointThinAgentEnabled=vmwNsxMEndpointThinAgentEnabled, vmwNsxMEdgeVmMemUsageIncreased=vmwNsxMEdgeVmMemUsageIncreased, vmwNsxMDlp=vmwNsxMDlp, vmwNsxMAlertData=vmwNsxMAlertData, vmwNsxMDepPluginPrefix=vmwNsxMDepPluginPrefix, vmwNsxMDepPluginIpPoolExhausted=vmwNsxMDepPluginIpPoolExhausted, vmwNsxMFirewallInstalled=vmwNsxMFirewallInstalled, vmwNsxMFabricPreUninstallCleanUpFailed=vmwNsxMFabricPreUninstallCleanUpFailed, vmwNsxMFabricDplymntStatusChanged=vmwNsxMFabricDplymntStatusChanged, vmwNsxMVxlanLogicalSwitchWrkngImproperly=vmwNsxMVxlanLogicalSwitchWrkngImproperly, vmwNsxMFirewallClusterUninstalled=vmwNsxMFirewallClusterUninstalled, vmwNsxMEdgeGatewayDeleted=vmwNsxMEdgeGatewayDeleted, vmwNsxMFabricDplymntInstallationFailed=vmwNsxMFabricDplymntInstallationFailed, vmwNsxMEdgeVmCommFailed=vmwNsxMEdgeVmCommFailed, vmwNsxMInconsistentSvmAlarm=vmwNsxMInconsistentSvmAlarm, vmwNsxMFltrCnfgAppliedToVnic=vmwNsxMFltrCnfgAppliedToVnic, vmwNsxMControllerVmDeleted=vmwNsxMControllerVmDeleted, vmwNsxMUsvmPrefix=vmwNsxMUsvmPrefix, vmwNsxMDepPlugin=vmwNsxMDepPlugin, vmwNsxMServiceComposerPolicyOutOfSync=vmwNsxMServiceComposerPolicyOutOfSync, vmwNsxMNotification=vmwNsxMNotification, vmwNsxMSsoConfigFailure=vmwNsxMSsoConfigFailure, vmwNsxMMessagingReconfigFailed=vmwNsxMMessagingReconfigFailed, vmwNsxMEdgeVmCpuUsageIncreased=vmwNsxMEdgeVmCpuUsageIncreased, vmwNsxMUniversalSyncPrefix=vmwNsxMUniversalSyncPrefix, vmwNsxMIpsecTunnelDown=vmwNsxMIpsecTunnelDown, vmwNsxMFirewallInstallFailed=vmwNsxMFirewallInstallFailed, vmwNsxMDepPluginGenericAlarm=vmwNsxMDepPluginGenericAlarm, vmwNsxMEdgeVmPowerOff=vmwNsxMEdgeVmPowerOff, vmwNsxMSamDataStoppedFlowing=vmwNsxMSamDataStoppedFlowing, vmwNsxMEdgeHaEnabled=vmwNsxMEdgeHaEnabled, vmwNsxMIpsecChannelDown=vmwNsxMIpsecChannelDown, vmwNsxMFabricPrefix=vmwNsxMFabricPrefix, vmwNsxMFullUniversalSyncFailed=vmwNsxMFullUniversalSyncFailed, vmwNsxMFabricDplymntUnitCreated=vmwNsxMFabricDplymntUnitCreated, vmwNsxMGroupRoleUnassigned=vmwNsxMGroupRoleUnassigned, vmwNsxMUuid=vmwNsxMUuid, vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure=vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure, vmwNsxMEdgeGatewayRecovered=vmwNsxMEdgeGatewayRecovered, vmwNsxMSamSystemPrefix=vmwNsxMSamSystemPrefix, vmwNsxMEdgeVmHlthChkDisabled=vmwNsxMEdgeVmHlthChkDisabled, vmwNsxMFabricAgentCreated=vmwNsxMFabricAgentCreated, vmwNsxMServiceComposerPrefix=vmwNsxMServiceComposerPrefix, vmwNsxMEdgeVMHealthCheckMiss=vmwNsxMEdgeVMHealthCheckMiss, vmwNsxMSpoofGuardCnfgUpdateFailed=vmwNsxMSpoofGuardCnfgUpdateFailed, vmwNsxMSpoofGuardCnfgUpdateTmOut=vmwNsxMSpoofGuardCnfgUpdateTmOut, vmwNsxMUsvmHeartbeatStopped=vmwNsxMUsvmHeartbeatStopped, vmwNsxMHighLatencyOnDiskResolved=vmwNsxMHighLatencyOnDiskResolved, vmwNsxMEdgeVmSysTimeSync=vmwNsxMEdgeVmSysTimeSync, vmwNsxMLbVirtualServerPoolWrong=vmwNsxMLbVirtualServerPoolWrong, vmwNsxMConfigGroup=vmwNsxMConfigGroup, vmwNsxMGuestIntrspctnEsxConnFailed=vmwNsxMGuestIntrspctnEsxConnFailed, vmwNsxMCntnrUpdatePublishFailed=vmwNsxMCntnrUpdatePublishFailed, vmwNsxMSnmpDisabled=vmwNsxMSnmpDisabled, vmwNsxMFirewallClusterInstalled=vmwNsxMFirewallClusterInstalled, vmwNsxMFabricDependenciesNotInstalled=vmwNsxMFabricDependenciesNotInstalled, vmwNsxMMessagingConfigFailedNotifSkip=vmwNsxMMessagingConfigFailedNotifSkip, vmwNsxMAsyncRest=vmwNsxMAsyncRest, vmwNsxMFirewallRuleModified=vmwNsxMFirewallRuleModified, vmwNsxMAsyncRestPrefix=vmwNsxMAsyncRestPrefix, vmwNsxMExtensionRegistration=vmwNsxMExtensionRegistration, vmwNsxMEdgePrefix=vmwNsxMEdgePrefix, vmwNsxMServiceComposerGuestPolicyOutOfSync=vmwNsxMServiceComposerGuestPolicyOutOfSync, vmwNsxMVxlanLogicalSwitchImproperlyCnfg=vmwNsxMVxlanLogicalSwitchImproperlyCnfg, vmwNsxMEdgeCommAgentNotConnected=vmwNsxMEdgeCommAgentNotConnected, vmwNsxMFirewallForceSyncClusterFailed=vmwNsxMFirewallForceSyncClusterFailed, vmwNsxMEdgeVmBooted=vmwNsxMEdgeVmBooted, vmwNsxMDepPluginVmReboot=vmwNsxMDepPluginVmReboot, vmwNsxMDataSecScanStarted=vmwNsxMDataSecScanStarted, vmwNsxMVxlanVmknicMissingOrDeleted=vmwNsxMVxlanVmknicMissingOrDeleted, vmwNsxMVmRemovedFromSg=vmwNsxMVmRemovedFromSg, vmwNsxMEdge=vmwNsxMEdge, vmwNsxMVmAddedToSg=vmwNsxMVmAddedToSg, vmwNsxMUsvmHeartbeatResumed=vmwNsxMUsvmHeartbeatResumed, vmwNsxManagerNotificationInfoGroup1=vmwNsxManagerNotificationInfoGroup1, vmwNsxMVxlanInstanceDoesNotExist=vmwNsxMVxlanInstanceDoesNotExist, vmwNsxMSpoofGuardDisableFail=vmwNsxMSpoofGuardDisableFail, vmwNsxMConnPerSecThrshldCrossed=vmwNsxMConnPerSecThrshldCrossed, vmwNsxMSamDataCollectionEnabled=vmwNsxMSamDataCollectionEnabled, vmwNsxMGlobalLbMemberDown=vmwNsxMGlobalLbMemberDown, vmwNsxMVxlanTransportZoneIncorrectlyWrkng=vmwNsxMVxlanTransportZoneIncorrectlyWrkng, vmwNsxMFirewallConfigUpdateFailed=vmwNsxMFirewallConfigUpdateFailed, vmwNsxMGuestIntrspctnIncompatibleEsx=vmwNsxMGuestIntrspctnIncompatibleEsx, vmwNsxMGlobalLbMemberWarning=vmwNsxMGlobalLbMemberWarning, vmwNsxMEdgeVmInBadState=vmwNsxMEdgeVmInBadState, vmwNsxMFltrCreatedForVnic=vmwNsxMFltrCreatedForVnic, vmwNsxMMessagingPrefix=vmwNsxMMessagingPrefix, vmwNsxMFirewallCnfgUpdateOnDltCntnr=vmwNsxMFirewallCnfgUpdateOnDltCntnr, vmwNsxMFirewall=vmwNsxMFirewall, vmwNsxMEdgeSplitBrainDetected=vmwNsxMEdgeSplitBrainDetected, vmwNsxMSamDataResumedFlowing=vmwNsxMSamDataResumedFlowing, vmwNsxMGlobalLbMemberUnknown=vmwNsxMGlobalLbMemberUnknown, vmwNsxMServerUp=vmwNsxMServerUp, vmwNsxMFltrCnfgUpdateFailed=vmwNsxMFltrCnfgUpdateFailed, vmwNsxMServiceComposer=vmwNsxMServiceComposer, vmwNsxMIpAddedBlackList=vmwNsxMIpAddedBlackList, vmwNsxMEventSeverity=vmwNsxMEventSeverity, vmwNsxMFirewallPrefix=vmwNsxMFirewallPrefix, vmwNsxMEdgeGatewayCreated=vmwNsxMEdgeGatewayCreated, vmwNsxMEndpointPrefix=vmwNsxMEndpointPrefix, vmwNsxMFirewallPublishFailed=vmwNsxMFirewallPublishFailed, vmwNsxMVxlanControllerInactive=vmwNsxMVxlanControllerInactive, vmwNsxMSpoofGuardDisabled=vmwNsxMSpoofGuardDisabled, vmwNsxMLbVirtualServerPoolUp=vmwNsxMLbVirtualServerPoolUp, vmwNsxMVcDisconnected=vmwNsxMVcDisconnected, vmwNsxMSvmOperationsPrefix=vmwNsxMSvmOperationsPrefix, vmwNsxMSsoUnconfigured=vmwNsxMSsoUnconfigured)
mibBuilder.exportSymbols("VMWARE-NSX-MANAGER-MIB", vmwNsxMEsxModuleUninstalled=vmwNsxMEsxModuleUninstalled, vmwNsxMDataSecScanEnded=vmwNsxMDataSecScanEnded, vmwNsxMVxlanLogicalSwitchProperlyCnfg=vmwNsxMVxlanLogicalSwitchProperlyCnfg, vmwNsxMDataStoreNotCnfgrdOnHost=vmwNsxMDataStoreNotCnfgrdOnHost, vmwNsxMVxlanControllerActive=vmwNsxMVxlanControllerActive, vmwNsxMFabricConnEamRestored=vmwNsxMFabricConnEamRestored, vmwNsxMExtensionRegistered=vmwNsxMExtensionRegistered, vmwNsxMEdgeNoVmServing=vmwNsxMEdgeNoVmServing, vmwNsxMServiceComposerOutOfSyncDraftSettingFailure=vmwNsxMServiceComposerOutOfSyncDraftSettingFailure, vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure=vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure, vmwNsxMFltrDeletedForVnic=vmwNsxMFltrDeletedForVnic, vmwNsxMEdgeUpgrade51x=vmwNsxMEdgeUpgrade51x, vmwNsxMFirewallRuleAppliedVnic=vmwNsxMFirewallRuleAppliedVnic, vmwNsxMEdgeHealthCheckMiss=vmwNsxMEdgeHealthCheckMiss, vmwNsxMEdgeHaSwitchOverActive=vmwNsxMEdgeHaSwitchOverActive, vmwNsxMFabricDplymntUnitDestroyed=vmwNsxMFabricDplymntUnitDestroyed, vmwNsxMFabricBackingEamNotFound=vmwNsxMFabricBackingEamNotFound, vmwNsxMFirewallVsfwdProcessStarted=vmwNsxMFirewallVsfwdProcessStarted, vmwNsxMCount=vmwNsxMCount, vmwNsxMEdgeResourceReservationFailure=vmwNsxMEdgeResourceReservationFailure, vmwNsxMUniversalSyncFailedForEntity=vmwNsxMUniversalSyncFailedForEntity, vmwNsxMVsmCore=vmwNsxMVsmCore, vmwNsxMIpsecTunnelUp=vmwNsxMIpsecTunnelUp, vmwNsxMSnmpPrefix=vmwNsxMSnmpPrefix)
| 266.031542 | 14,186 | 0.793095 |
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
MibIdentifier, Bits, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Unsigned32, iso, NotificationType, Counter64, ObjectIdentity, Counter32, ModuleIdentity, Gauge32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "Bits", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Unsigned32", "iso", "NotificationType", "Counter64", "ObjectIdentity", "Counter32", "ModuleIdentity", "Gauge32", "IpAddress")
TextualConvention, DisplayString, DateAndTime = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "DateAndTime")
UUID, = mibBuilder.importSymbols("UUID-TC-MIB", "UUID")
vmwNsxManager, = mibBuilder.importSymbols("VMWARE-ROOT-MIB", "vmwNsxManager")
vmwNsxManagerMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1))
vmwNsxManagerMIB.setRevisions(('2016-06-02 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: vmwNsxManagerMIB.setRevisionsDescriptions(('This is the initial version of the NSX Manager MIB. It describes all the notifications sent from the NSX Manager appliance. WARNING: This mib module will not be backward compatible with next version. ',))
if mibBuilder.loadTexts: vmwNsxManagerMIB.setLastUpdated('201606020000Z')
if mibBuilder.loadTexts: vmwNsxManagerMIB.setOrganization('VMware, Inc')
if mibBuilder.loadTexts: vmwNsxManagerMIB.setContactInfo('VMware, Inc 3401 Hillview Ave Palo Alto, CA 94304 Tel: 1-877-486-9273 or 650-427-5000 Fax: 650-427-5001 Web: http://communities.vmware.com/community/developer/forums/managementapi ')
if mibBuilder.loadTexts: vmwNsxManagerMIB.setDescription('This MIB file contains the information that the receiving party needs in order to interpret SNMP traps sent by NSX Manager. VMware NSX for vSphere is a key product in the SDDC architecture. With NSX, virtualization delivers for networking what it has already delivered for compute and storage. In much the same way that server virtualization programmatically creates, snapshots, deletes and restores software-based virtual machines (VMs), NSX network virtualization programmatically creates, snapshots, deletes, and restores software-based virtual networks. The result is a completely transformative approach to networking that not only enables data center managers to achieve orders of magnitude better agility and economics, but also allows for a vastly simplified operational model for the underlying physical network. With the ability to be deployed on any IP network, including both existing traditional networking models and next-generation fabric architectures from any vendor, NSX is a completely non-disruptive solution. In fact, with NSX, the physical network infrastructure you already have is all you need to deploy a software-defined data center. The NSX Manager provides the graphical user interface (GUI) and the REST APIs for creating, configuring, and monitoring NSX components, such as controllers, logical switches, and edge services gateways. The NSX Manager provides an aggregated system view and is the centralized network management component of NSX. NSX Manager is installed as a virtual appliance on any ESX host in your vCenter environment. Support requests can be filed with VMware using KB article: http://kb.vmware.com/kb/2006985 To reach NSX Manager Service Composer UI, login to vSphere UI(https://<vsphere-ip>)->Networking & Security->Service Composer')
class VmwNsxManagerTypeSeverity(TextualConvention, Integer32):
description = 'Severity enumeration definition of NSX Manager events'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))
namedValues = NamedValues(("informational", 1), ("low", 2), ("medium", 3), ("major", 4), ("critical", 5), ("high", 6))
vmwNsxMAlertData = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1))
if mibBuilder.loadTexts: vmwNsxMAlertData.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMAlertData.setDescription('This members of this group are the OIDs for VarBinds that contain data for ALL Alerts.')
vmwNsxMNotification = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2))
if mibBuilder.loadTexts: vmwNsxMNotification.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMNotification.setDescription('All notifications for NSX Manager use this oid prefix.')
vmwNsxMEventCode = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMEventCode.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEventCode.setDescription('The event code of the alert that was generated. To fetch a list of all the events with their code, severity and description please invoke the nsx-manager url https://<nsx-manager-host>/api/2.0/systemevent/eventcode . The event code specifically identifies each individual event type. This event code is uniquely assigned only once to a particular event type.')
vmwNsxMEventTimestamp = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 2), DateAndTime()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMEventTimestamp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEventTimestamp.setDescription('The timestamp when the event was raised in the NSX Manager.')
vmwNsxMEventMessage = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 3), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMEventMessage.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEventMessage.setDescription('This object provides a human readable description of the event or group of events')
vmwNsxMEventSeverity = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 4), VmwNsxManagerTypeSeverity()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMEventSeverity.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEventSeverity.setDescription('The severity for the event that was generated. The severity is pre-defined and can only be changed from the NSX Manager section of vsphere web client if the administrator so wishes.')
vmwNsxMEventComponent = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 5), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMEventComponent.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEventComponent.setDescription('The NSX manager component where this event was generated.')
vmwNsxMUuid = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 6), UUID()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMUuid.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUuid.setDescription('The NSX manager UUID where this event was generated.')
vmwNsxMCount = MibScalar((1, 3, 6, 1, 4, 1, 6876, 90, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: vmwNsxMCount.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMCount.setDescription('The count of the number of events for a particular group raised in the last 5 minute interval.')
vmwNsxMBranch = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 0))
if mibBuilder.loadTexts: vmwNsxMBranch.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMBranch.setDescription('Branch segregated out for various groups and other future requirements.')
vmwNsxMGroupsBranch = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 0, 1))
if mibBuilder.loadTexts: vmwNsxMGroupsBranch.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGroupsBranch.setDescription('Grouped Notifications will have this OID prefix.')
vmwNsxMGroupsPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 0, 1, 0))
if mibBuilder.loadTexts: vmwNsxMGroupsPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGroupsPrefix.setDescription('Prefix added to place zero in penultimate sub-identifier of group oids.')
vmwNsxMConfigGroup = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 0, 1, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMCount"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMConfigGroup.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMConfigGroup.setDescription('Configuration notifications that are grouped will have this OID prefix.')
vmwNsxMSnmp = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 1))
if mibBuilder.loadTexts: vmwNsxMSnmp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSnmp.setDescription('Notifications that are Snmp related will have this OID prefix.')
vmwNsxMSnmpPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 1, 0))
if mibBuilder.loadTexts: vmwNsxMSnmpPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSnmpPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Snmp module.")
vmwNsxMSnmpDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 1, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSnmpDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSnmpDisabled.setDescription('This notification is sent when the sending out of Snmp traps is disabled. This would most likely be the last Snmp trap the snmp manager receives. You may some times not receive it in case of high volume of traps. In those cases you can rely on the heartbeat traps not being sent out. Action required: None. If the sending of Snmp traps is enabled a warmStart trap is received. Frequency of traps: Once, whenever the sending snmp traps is disabled.')
vmwNsxMSnmpManagerConfigUpdated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 1, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSnmpManagerConfigUpdated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSnmpManagerConfigUpdated.setDescription("This notification is sent when the snmp manager configuration has been updated. The event message will carry the semicolon separated new snmp managers' details. Action required: None Frequency of traps: Once, whenever the Snmp manager configuration is updated.")
vmwNsxMSecurity = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2))
if mibBuilder.loadTexts: vmwNsxMSecurity.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSecurity.setDescription('Notifications that are security related will have this OID prefix.')
vmwNsxMSecurityPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0))
if mibBuilder.loadTexts: vmwNsxMSecurityPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSecurityPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for security module.")
vmwNsxMIpAddedBlackList = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpAddedBlackList.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpAddedBlackList.setDescription('Whenever user authentication fails for number of times that user is blacklisted and further login attempts are disabled for that user from given IP address for some time. Action required: None Frequency of traps: Whenever user authentication fails consecutively within some time.')
vmwNsxMIpRemovedBlackList = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpRemovedBlackList.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpRemovedBlackList.setDescription('After user is blacklisted, after blacklist duration expires, user is removed from blacklist. Action required: None Frequency of traps: Whenever blacklist duration expires for any user.')
vmwNsxMSsoConfigFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSsoConfigFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSsoConfigFailure.setDescription('Whenever configuration of lookup service / SSO fails due to various reasons like invalid credentials, invalid configuration, time sync problem etc. Action required: Check the event message and reconfigure lookup service with correct details. Frequency of traps: Once per failed configuration of lookup service.')
vmwNsxMSsoUnconfigured = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSsoUnconfigured.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSsoUnconfigured.setDescription('Whenever user unconfigures lookup service. Action required: None Frequency of traps: Once per unconfiguration event of lookup service.')
vmwNsxMUserRoleAssigned = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUserRoleAssigned.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUserRoleAssigned.setDescription('When role is assigned on NSX manager for vCenter user. Action required: None Frequency of traps: Once for each user who is assigned role.')
vmwNsxMUserRoleUnassigned = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUserRoleUnassigned.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUserRoleUnassigned.setDescription('When role is unassigned on NSX manager for vCenter user. Action: None Frequency of traps: Once for each user where role is removed.')
vmwNsxMGroupRoleAssigned = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGroupRoleAssigned.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGroupRoleAssigned.setDescription('When role is assigned on NSX manager for vCenter group. Action required: None Frequency of traps: Once for each group who is assigned role.')
vmwNsxMGroupRoleUnassigned = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGroupRoleUnassigned.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGroupRoleUnassigned.setDescription('When role is unassigned on NSX manager for vCenter group. Action required: None Frequency of traps: Once for each group where role is removed.')
vmwNsxMVcLoginFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVcLoginFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVcLoginFailed.setDescription('Whenever Connection with vCenter starts failing due to invalid credentials. Action required: Reconfigure NSX Manager vCenter configuration with correct credentials.')
vmwNsxMVcDisconnected = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVcDisconnected.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVcDisconnected.setDescription('Whenever there is disconnectivity for default VCenter Connection maintained by NSX. Action required: Administrator needs to check the connectivity with vCenter for network problems or any other reasons.')
vmwNsxMLostVcConnectivity = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLostVcConnectivity.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLostVcConnectivity.setDescription('Whenever there is disconnectivity for default VCenter Connection maintained by NSX. Action required: Administrator needs to check the connectivity with vCenter for network problems or any other reasons.')
vmwNsxMSsoDisconnected = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 2, 0, 12)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSsoDisconnected.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSsoDisconnected.setDescription('Whenever there is disconnection with SSO lookup service. Action required: Please check the configuration for possible disconnection reasons like Invalid Credentials, Time sync issues, Network connectivity problems etc. Navigate to Appliance management Web UI in browser (https://<nsx-manager-host>/) traverse to Manage vCenter Registration tab and verify the configuration for SSO Lookupservice. Frequency of traps: Once per disconnect event, default frequency to check SSO connection state is 1 hour.')
vmwNsxMFirewall = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3))
if mibBuilder.loadTexts: vmwNsxMFirewall.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewall.setDescription('Notifications that are firewall related will have this OID prefix.')
vmwNsxMFirewallPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0))
if mibBuilder.loadTexts: vmwNsxMFirewallPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for firewall module.")
vmwNsxMFltrCnfgUpdateFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFltrCnfgUpdateFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFltrCnfgUpdateFailed.setDescription('NSX Manager failed to enforce DFW. VMs on this host may not be protected by the DFW. Contextual data provided with this event may indicate the cause of this failure. This could happen if the VIB version mismatches on the NSX Manager and ESX host. This may happen during an upgrade. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFltrCnfgNotAppliedToVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFltrCnfgNotAppliedToVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFltrCnfgNotAppliedToVnic.setDescription('NSX Manager failed to enforce DFW configuration on a vnic. This particular VM may not be protected by the DFW. Contextual data provided with this event may indicate the cause of this failure.This could happen if the VIB version mismatches on the NSX Manager and ESX host. This may happen during an upgrade. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFltrCnfgAppliedToVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFltrCnfgAppliedToVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFltrCnfgAppliedToVnic.setDescription('Successfully updated filter config. Action required: None')
vmwNsxMFltrCreatedForVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFltrCreatedForVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFltrCreatedForVnic.setDescription('Filter created. DFW is enforced in the datapath for the vnic. Action required: None')
vmwNsxMFltrDeletedForVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFltrDeletedForVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFltrDeletedForVnic.setDescription('Filter deleted. DFW is removed from the vnic. Action required: None')
vmwNsxMFirewallConfigUpdateFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallConfigUpdateFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallConfigUpdateFailed.setDescription('Firewall rule Configuration between the NSX Manager and the host is not in sync. Contextual data provided with this event may indicate the cause of this failure. Verify that the host in question was properly prepared by NSX Manager. Collect error logs (vsfwd.log) when the host received firewall config. Force sync firewall config using ForceSync API/UI. See kb.vmware.com/kb/2125437 . If the issue persists, please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallRuleFailedVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallRuleFailedVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallRuleFailedVnic.setDescription('Failed to apply Distributed Firewall configuration. Contextual data provided with this event may indicate the cause of this failure. Collect error logs (vmkernel.log) when the firewall configuration was applied to the vnic. vsip kernel heaps may not have enough free memory. Check VSFWD logs . See kb.vmware.com/kb/2125437. If the issue persists, please collect the ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallRuleAppliedVnic = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallRuleAppliedVnic.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallRuleAppliedVnic.setDescription('Applied firewall config. Key value will have context info like generation number and also other debugging info. Action required: None')
vmwNsxMCntnrCnfgUpdateFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMCntnrCnfgUpdateFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMCntnrCnfgUpdateFailed.setDescription('Failed receive, parse or update the container configuration. Contextual data provided with this event may indicate the cause of this failure. Collect error logs (vmkernel.log) when firewall configuration was applied to the vnic. Verify that vsip kernel heaps have enough free memory. Check VSFWD logs. See kb.vmware.com/kb/2125437 . If the issue persists, please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFlowMissed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFlowMissed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFlowMissed.setDescription('Flow missed. Contextual data provided with this event may indicate the cause of this failure. Collect error logs (vmkernel.log) when firewall configuration was applied to the vnic. Verify that vsip kernel heaps have enough free memory and vsfwd memory consumption is within resource limits. Check VSFWD logs. See kb.vmware.com/kb/2125437. If the issue persists, please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMSpoofGuardCnfgUpdateFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardCnfgUpdateFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardCnfgUpdateFailed.setDescription('Failed to receive, parse or Update the spoofguard configuration. Contextual data provided with this event may indicate the cause of this failure. Verify that the host in question was properly prepared by NSX Manager. Collect error logs (vmkernel.log) when the spoofguard configuration was applied to the host. For Sync the firewall configuration . See kb.vmware.com/kb/2125437.')
vmwNsxMSpoofGuardFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 12)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardFailed.setDescription('Failed to apply spoofguard to the vnic. Contextual data provided with this event may indicate the cause of this failure. Verify that vsip kernel heaps have enough free memory. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMSpoofGuardApplied = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 13)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardApplied.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardApplied.setDescription('Enabled spoofguard for vnic. Action required: None')
vmwNsxMSpoofGuardDisableFail = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 14)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardDisableFail.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardDisableFail.setDescription('Failed to disable spoofguard on the vnic. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMSpoofGuardDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 15)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardDisabled.setDescription('Disabled spoofguard for vnic. Action required: None')
vmwNsxMLegacyAppServiceDeletionFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 16)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLegacyAppServiceDeletionFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLegacyAppServiceDeletionFailed.setDescription('A notification generated when legacy application service VM deletion failed.')
vmwNsxMFirewallCpuThresholdCrossed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 17)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallCpuThresholdCrossed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallCpuThresholdCrossed.setDescription('vsfwd CPU usage threshold was exceeded. Reduce the amount of traffic of VMs on the host in question.')
vmwNsxMFirewallMemThresholdCrossed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 18)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallMemThresholdCrossed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallMemThresholdCrossed.setDescription('vsfwd memory threshold exceeded. Reduce the number of of VMs on the host in question, reduce the number of rules or containers in firewall config. Use appliedTo feature to limit the number of rules for the current cluster.')
vmwNsxMConnPerSecThrshldCrossed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 19)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMConnPerSecThrshldCrossed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMConnPerSecThrshldCrossed.setDescription('vsfwd Connectons Per Second (CPS) threshold exceeded. Reduce the amount of new connections of VMs on the host in question.')
vmwNsxMFirewallCnfgUpdateTimedOut = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 20)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallCnfgUpdateTimedOut.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallCnfgUpdateTimedOut.setDescription('NSX Manager waits for 2 minutes after publishing the Firewall configuration to each host in the cluster. If a host takes more than 2 minutes to process the data, it times out. Please check the Host in question. See if VSFWD is functioning or not. Also use CLI commands to verify if the rule realization is working properly or not. See kb.vmware.com/kb/2125437. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMSpoofGuardCnfgUpdateTmOut = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 21)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardCnfgUpdateTmOut.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardCnfgUpdateTmOut.setDescription('NSX Manager waits for 2 minutes after publishing the Spoofguard configuration to each host in the cluster. If a host takes more than 2 minutes to process the data, it times out. Please check the Host in question. See if VSFWD is functioning or not. Also use CLI commands to verify if the rule realization is working properly or not. See kb.vmware.com/kb/2125437. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallPublishFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 22)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallPublishFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallPublishFailed.setDescription('Firewall Configuration Publishing has failed for a given cluster/host. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMCntnrUpdatePublishFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 23)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMCntnrUpdatePublishFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMCntnrUpdatePublishFailed.setDescription('Publishing of container (IP/MAC/vNIC) update pdate failed for a given host/cluster object. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMSpoofGuardUpdatePublishFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 24)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSpoofGuardUpdatePublishFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSpoofGuardUpdatePublishFailed.setDescription('The publishing of the spoofguard updates on this host has failed. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMExcludeListPublishFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 25)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMExcludeListPublishFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMExcludeListPublishFailed.setDescription('The publishing of the exclude list or updates to the exclude list on this host has failed. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallCnfgUpdateOnDltCntnr = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 26)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallCnfgUpdateOnDltCntnr.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallCnfgUpdateOnDltCntnr.setDescription('Deletion of the object referenced in firewall rules. Action required: Go to the NSX manager DFW UI. All the invalid reference are marked invalid on the UI as well. Please remove the orphaned referenced and update the firewall rules.')
vmwNsxMHostSyncFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 27)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMHostSyncFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMHostSyncFailed.setDescription('Host-level force synchronization has failed. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMHostSynced = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 28)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMHostSynced.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMHostSynced.setDescription('Force Sync operation for host succeeded. Action required: None')
vmwNsxMFirewallInstalled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 29)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallInstalled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallInstalled.setDescription('The Distributed Firewall was successfully Installed on the host.')
vmwNsxMFirewallInstallFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 30)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallInstallFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallInstallFailed.setDescription('The Distributed Firewall Installation has failed. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallClusterInstalled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 31)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallClusterInstalled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallClusterInstalled.setDescription('The Distributed Firewall has been installed at the request of a user.')
vmwNsxMFirewallClusterUninstalled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 32)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallClusterUninstalled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallClusterUninstalled.setDescription('The Distributed Firewall has been uninstalled at the request of a user.')
vmwNsxMFirewallClusterDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 33)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallClusterDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallClusterDisabled.setDescription('The Distributed Firewall has been disabeld on the cluster at the request of a user.')
vmwNsxMFirewallForceSyncClusterFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 34)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallForceSyncClusterFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallForceSyncClusterFailed.setDescription('Force Sync operation for the cluster has failed. Use CLI commands to look at the logs and verify if any error messages appeared during the operation. See kb.vmware.com/kb/2125437. Please collect ESX and NSX Manager tech support bundle and open a SR with Vmware techsupport. See http://kb.vmware.com/kb/2074678 and http://kb.vmware.com/kb/1010705')
vmwNsxMFirewallForceSyncClusterSuccess = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 35)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallForceSyncClusterSuccess.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallForceSyncClusterSuccess.setDescription('Force Sync operation for cluster succeeded. Action required: None')
vmwNsxMFirewallVsfwdProcessStarted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 3, 0, 36)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallVsfwdProcessStarted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallVsfwdProcessStarted.setDescription('vsfwd process started on host. Action required: None')
vmwNsxMEdge = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4))
if mibBuilder.loadTexts: vmwNsxMEdge.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdge.setDescription('Notifications that are edge related will have this OID prefix.')
vmwNsxMEdgePrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0))
if mibBuilder.loadTexts: vmwNsxMEdgePrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgePrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for edge module.")
vmwNsxMEdgeNoVmServing = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeNoVmServing.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeNoVmServing.setDescription('None of the Edge VMs found in serving state. There is a possibility of network disruption. Action required: System auto recovers from this state today. Event should be followed by traps with event code 30202 or 30203')
vmwNsxMEdgeGatewayCreated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayCreated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayCreated.setDescription('Edge Gateway created. Action required: None')
vmwNsxMEdgeVmBadState = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmBadState.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmBadState.setDescription('Edge VM in bad state. Needs a force sync. Action required: System auto triggres force sync but if problem is sustained then manual force sync should be triggered. For ESG force sync is disruptive and will reboot edge VMs.')
vmwNsxMEdgeVmCommFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmCommFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmCommFailed.setDescription('Failed to communicate with the Edge VM. Action required: Need investigation depending upon comunication channel. Log needs to be checked for VIX error code for futher action.')
vmwNsxMEdgeVmCnfgChanged = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmCnfgChanged.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmCnfgChanged.setDescription('A notification generated when NSX Edge VM configuration is changed. Action required: None')
vmwNsxMEdgeGatewayDeleted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayDeleted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayDeleted.setDescription('A notification generated when Edge Gateway is deleted. Action required: None')
vmwNsxMEdgeGatewayReDeployed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayReDeployed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayReDeployed.setDescription('A notification generated when Edge Gateway is redeployed. Action required: None')
vmwNsxMEdgeVmPowerOff = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmPowerOff.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmPowerOff.setDescription('A notification generated when NSX Edge VM is powered off. Action required: None')
vmwNsxMEdgeApplianceSizeChanged = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceSizeChanged.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceSizeChanged.setDescription('A notification generated when Edge appliance size has changed. Action required: None')
vmwNsxMEdgeUpgrade51x = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeUpgrade51x.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeUpgrade51x.setDescription('A notification generated when Edge Gateway is upgraded to 5.1.x. Action required: None')
vmwNsxMEdgeLicenseChanged = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeLicenseChanged.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeLicenseChanged.setDescription('A notification generated when Edge licensing changed on vCenter Server. Action required: None')
vmwNsxMEdgeApplianceMoved = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 12)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceMoved.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceMoved.setDescription('A notification generated when Edge appliance is moved in the vCenter inventory.')
vmwNsxMEdgeApplianceNotFound = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 13)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceNotFound.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeApplianceNotFound.setDescription('A notification generated when Edge appliance not found in the vCenter inventory. Action required: If VM is accidentally deleted, redeploy edge.')
vmwNsxMEdgeVMHealthCheckMiss = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 14)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVMHealthCheckMiss.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVMHealthCheckMiss.setDescription('A notification generated when Edge VM is not responding to health check. Action required: Communicaiton issues between manager and edge. Log analysis required to root cause issue.')
vmwNsxMEdgeHealthCheckMiss = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 15)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHealthCheckMiss.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHealthCheckMiss.setDescription('A notification generated when none of the Edge VMs are found in serving state. There is a possibility of network disruption. Action required: Commnunicaiton issues between manager and edge. Log analysis required to root cause issue.')
vmwNsxMEdgeCommAgentNotConnected = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 16)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeCommAgentNotConnected.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeCommAgentNotConnected.setDescription('A notification generated when Edge Communication Agent is not connected to vCenter Server. Action required: Check VSM and VC connectivity. Try registering VSM to VC')
vmwNsxMApplianceWithDifferentId = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 17)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMApplianceWithDifferentId.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMApplianceWithDifferentId.setDescription('A notification generated when Edge VM is discovered with a different vmId. Action required: None')
vmwNsxMFirewallRuleModified = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 18)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFirewallRuleModified.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFirewallRuleModified.setDescription('A notification generated when Edge firewall rule is modified. Action required: Revisit firewall rule and perform required updates')
vmwNsxMEdgeAntiAffinityRuleViolated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 19)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeAntiAffinityRuleViolated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeAntiAffinityRuleViolated.setDescription('A notification generated when powering on NSX Edge appliance violates a virtual machine anti-affinity rule. Action required: Anti affinity rules removed from cluster. Both HA VM may run on same host. Go to VC and please revisit anti affinity rules on Cluster')
vmwNsxMEdgeHaEnabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 20)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaEnabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaEnabled.setDescription('A notification generated when NSX Edge HighAvailability is enabled. Action required: None')
vmwNsxMEdgeHaDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 21)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaDisabled.setDescription('A notification generated when NSX Edge HighAvailability is disabled. Action required: None')
vmwNsxMEdgeGatewayRecovered = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 22)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayRecovered.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayRecovered.setDescription('A notification generated when NSX Edge Gateway has recovered and now responding to health check. Action required: None')
vmwNsxMEdgeVmRecovered = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 23)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmRecovered.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmRecovered.setDescription('A notification generated when NSX Edge VM has recovered and now responding to health check. Actione required: None')
vmwNsxMEdgeGatewayUpgraded = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 24)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayUpgraded.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeGatewayUpgraded.setDescription('A notification generated when Edge Gateway is upgraded. Action required: None')
vmwNsxMEdgeVmHlthChkDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 25)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmHlthChkDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmHlthChkDisabled.setDescription('A notification generated when Edge VM health check is disabled on consecutive critical vix errors. Please redeploy or force sync vm to resume health check. Action required: This points to environmental issues that lead to repeated failure over vix. Log analysis needs to be done to identify root cause. Post resoving issues force sync edge vm to resume health check. Force sync and redeploy are disruptive operation.')
vmwNsxMEdgePrePublishFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 26)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgePrePublishFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgePrePublishFailed.setDescription('A notification generated when Pre Publish has failed on Edge VM. Action required: Firewall rules might be out of sync. System auto recovers but if problem persists then trigger force sync.')
vmwNsxMEdgeForcedSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 27)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeForcedSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeForcedSync.setDescription('A notification generated when Edge VM was force synced. Action required: None')
vmwNsxMEdgeVmBooted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 28)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmBooted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmBooted.setDescription('A notification generated when Edge VM was booted. Action required: None')
vmwNsxMEdgeVmInBadState = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 29)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmInBadState.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmInBadState.setDescription('A notification generated when Edge VM is in Bad State. Needs a force sync. Action required: Force sync required.')
vmwNsxMEdgeVmCpuUsageIncreased = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 30)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmCpuUsageIncreased.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmCpuUsageIncreased.setDescription('A notification generated when Edge VM CPU usage has increased. Action required: Spikes are normal but collect tech support logs for further analysis if high CPU sustained for longer duration.')
vmwNsxMEdgeVmMemUsageIncreased = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 31)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmMemUsageIncreased.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmMemUsageIncreased.setDescription('A notification generated when Edge VM Memory usage has increased. Action required: System recovers but collect tech support logs for further analysis.')
vmwNsxMEdgeVmProcessFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 32)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmProcessFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmProcessFailure.setDescription('A notification generated when Edge VM process monitor detects a process failure. Action required: System recovers but collect tech support logs for further analysis.')
vmwNsxMEdgeVmSysTimeBad = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 33)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmSysTimeBad.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmSysTimeBad.setDescription('A notification generated when Edge VM system time is bad. Action required: System recovers. Check NTP setting on hosts.')
vmwNsxMEdgeVmSysTimeSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 34)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmSysTimeSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmSysTimeSync.setDescription('A notification generated when Edge VM system time sync up happens. Action required: None')
vmwNsxMEdgeAesniCryptoEngineUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 35)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeAesniCryptoEngineUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeAesniCryptoEngineUp.setDescription('A notification generated when AESNI crypto engine is up. Action required: None')
vmwNsxMEdgeAesniCryptoEngineDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 36)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeAesniCryptoEngineDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeAesniCryptoEngineDown.setDescription('A notification generated when AESNI crypto engine is down. Action required: None')
vmwNsxMEdgeVmOom = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 37)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeVmOom.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeVmOom.setDescription('A notification generated when Edge VM is out of memory. The Edge is rebooting in 3 seconds. Action required: Collect tech support for further analysis.')
vmwNsxMEdgeFileSysRo = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 38)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeFileSysRo.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeFileSysRo.setDescription('A notification generated when Edge file system is read only. Action required: Check datastore issues, once resolved force sync is required.')
vmwNsxMEdgeHaCommDisconnected = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 39)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaCommDisconnected.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaCommDisconnected.setDescription('A notification generated when Edge HighAvailability communication channel is disconnected from peer node. Action required: None')
vmwNsxMEdgeHaSwitchOverSelf = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 40)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverSelf.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverSelf.setDescription("A notification generated when High Availability is disabled for NSX Edge. The primary NSX Edge VM has its state transitioned from ACTIVE to SELF. High Availability (HA) ensures that NSX Edge services are always available, by deploying an additional Edge VM for failover. The primary NSX Edge VM is the ACTIVE node and the secondary VM is the STANDBY node. Whenever the ACTIVE VM is unreachable on account of VM powered off or network connectivity issues, the STANDBY VM takes over the ACTIVE vm's role. In the event NSX Edge High Availability is disabled, the STANDBY VM is deleted and the ACTIVE VM continues to function with its ACTIVE state transitioned to SELF. Action required: None")
vmwNsxMEdgeHaSwitchOverActive = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 41)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverActive.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverActive.setDescription("A notification generated when High Availability switch over has happened for NSX Edge. The secondary NSX Edge VM has its state transitioned from STANDBY to ACTIVE. High Availability (HA) ensures that NSX Edge services are always available, by deploying an additional Edge VM for failover. The primary NSX Edge VM is the ACTIVE node and the secondary VM is the STANDBY node. Whenever the ACTIVE VM is unreachable on account of VM powered off or network connectivity issues, the STANDBY VM takes over the ACTIVE vm's role. Action required: None")
vmwNsxMEdgeHaSwitchOverStandby = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 42)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverStandby.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeHaSwitchOverStandby.setDescription("A notification generated when High Availability switch over has happened for NSX Edge. The primary NSX Edge VM has its state transitioned from ACTIVE to STANDBY. High Availability (HA) ensures that NSX Edge services are always available, by deploying an additional Edge VM for failover. The primary NSX Edge VM is the ACTIVE node and the secondary VM is the STANDBY node. Whenever the ACTIVE VM is unreachable on account of VM powered off or network connectivity issues, the STANDBY VM takes over the ACTIVE vm's role. When connectivity is re-established between the NSX Edge VM's, one of the VM's state is transitioned from ACTIVE to STANDBY. Action required: None")
vmwNsxMEdgeMonitorProcessFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 43)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeMonitorProcessFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeMonitorProcessFailure.setDescription('A notification generated when Edge process monitor detected a process failure. Action required: Collect tech support logs for further analysis.')
vmwNsxMLbVirtualServerPoolUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 44)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolUp.setDescription('A notification generated when LoadBalancer virtualServer/pool is up. Action required: None')
vmwNsxMLbVirtualServerPoolDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 45)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolDown.setDescription('A notification generated when LoadBalancer virtualServer/pool is down.')
vmwNsxMLbVirtualServerPoolWrong = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 46)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolWrong.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLbVirtualServerPoolWrong.setDescription('A notification generated when LoadBalancer virtualServer/pool state is wrong.')
vmwNsxMLbPoolWarning = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 47)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMLbPoolWarning.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMLbPoolWarning.setDescription('A notification generated when LoadBalancer pool changed to a warning state.')
vmwNsxMIpsecChannelUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 48)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecChannelUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecChannelUp.setDescription('A notification generated when IPsec Channel is up. Action required: None')
vmwNsxMIpsecChannelDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 49)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecChannelDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecChannelDown.setDescription('A notification generated when IPsec Channel is down. Action required: Collect tech support logs for further analysis.')
vmwNsxMIpsecTunnelUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 50)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelUp.setDescription('A notification generated when IPsec Tunnel is up. Action required: None')
vmwNsxMIpsecTunnelDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 51)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelDown.setDescription('A notification generated when IPsec Tunnel is down. Action required: Collect tech support logs for further analysis.')
vmwNsxMIpsecChannelUnknown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 52)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecChannelUnknown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecChannelUnknown.setDescription('A notification generated when IPsec Channel status is unknown. Action required: Collect tech support logs for further analysis.')
vmwNsxMIpsecTunnelUnknown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 53)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelUnknown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMIpsecTunnelUnknown.setDescription('A notification generated when IPsec Tunnel status is unknown. Action required: Collect tech support logs for further analysis.')
vmwNsxMGlobalLbMemberUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 54)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberUp.setDescription('A notification generated when Global Loadbalancer member status is up. Action required: None')
vmwNsxMGlobalLbMemberWarning = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 55)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberWarning.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberWarning.setDescription('A notification generated when Global Loadbalancer member status is warning.')
vmwNsxMGlobalLbMemberDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 56)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberDown.setDescription('A notification generated when Global Loadbalancer member status is down.')
vmwNsxMGlobalLbMemberUnknown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 57)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberUnknown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbMemberUnknown.setDescription('A notification generated when Global Loadbalancer member status is unknown.')
vmwNsxMGlobalLbPeerUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 58)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbPeerUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbPeerUp.setDescription('A notification generated when Global Loadbalancer peer status is up. Action required: None')
vmwNsxMGlobalLbPeerDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 59)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGlobalLbPeerDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGlobalLbPeerDown.setDescription('A notification generated when Global Loadbalancer peer status is down.')
vmwNsxMDhcpServiceDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 60)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDhcpServiceDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDhcpServiceDisabled.setDescription('A notification generated when DHCP Relay Service is disabled.')
vmwNsxMEdgeResourceReservationFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 61)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeResourceReservationFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeResourceReservationFailure.setDescription('Insufficient CPU and/or Memory Resources available on Host or Resource Pool, during resource reservation at the time of NSX Edge deployment. Resources are explicitly reserved to ensure sufficient resources are available for NSX Edge to service High Availability. User can view the available resources vs reserved resources by navigating to the page Home > Hosts and Clusters > [Cluster-name] > Monitor > Resource Reservation. Action required: After checking available resources, re-specify the resources as part of appliance configuration so that resource reservation succeeds.')
vmwNsxMEdgeSplitBrainDetected = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 62)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainDetected.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainDetected.setDescription("Split Brain detected for NSX Edge with HighAvailability. NSX Edge VM's configured for High Availability are unable to determine if the other VM is alive due to network failure. In such scenario, both the VM's think the other is not alive and take on the ACTIVE state. This may cause network disruption. Action required: User will need to check network infrastructure (virtual and physical) to look for any failures, specially on the interfaces and the path configured for HA.")
vmwNsxMEdgeSplitBrainRecovered = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 63)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainRecovered.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainRecovered.setDescription("Resolved Split Brain for NSX Edge with HighAvailability. The network path used by the NSX Edge VM's High Availability has been re-established. NSX Edge VM's are able to communicate with each other, and one of the VM has taken the STANDBY role, resolving the ACTIVE-ACTIVE split brain scenario. Action required: None")
vmwNsxMEdgeSplitBrainRecoveryAttempt = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 4, 0, 64)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainRecoveryAttempt.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEdgeSplitBrainRecoveryAttempt.setDescription('Attempted Split Brain resolution for NSX Edge. Split Brain recovery will be attempted on NSX Edge versions prior to 6.2.3, which are not based on BFD. Action required: None')
vmwNsxMEndpoint = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5))
if mibBuilder.loadTexts: vmwNsxMEndpoint.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEndpoint.setDescription('Notifications that are Endpoint related will have this OID prefix.')
vmwNsxMEndpointPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0))
if mibBuilder.loadTexts: vmwNsxMEndpointPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEndpointPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Endpoint module.")
vmwNsxMEndpointThinAgentEnabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEndpointThinAgentEnabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEndpointThinAgentEnabled.setDescription('A notification generated when Thin agent is enabled.')
vmwNsxMGuestIntrspctnEnabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnEnabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnEnabled.setDescription('A notification generated when Guest Introspection solution is enabled.')
vmwNsxMGuestIntrspctnIncompatibleEsx = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnIncompatibleEsx.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnIncompatibleEsx.setDescription('A notification generated when Guest Introspection solution was contacted by an incompatible version of the ESX module.')
vmwNsxMGuestIntrspctnEsxConnFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnEsxConnFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnEsxConnFailed.setDescription('A notification generated when connection between the ESX module and the Guest Introspection solution failed.')
vmwNsxMGuestIntrspctnStatusRcvFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnStatusRcvFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnStatusRcvFailed.setDescription('A notification generated when failed to receive status from Guest Introspection solution.')
vmwNsxMEsxModuleEnabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEsxModuleEnabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEsxModuleEnabled.setDescription('A notification generated when ESX module is enabled.')
vmwNsxMEsxModuleUninstalled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEsxModuleUninstalled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEsxModuleUninstalled.setDescription('A notification generated when ESX module is uninstalled.')
vmwNsxMGuestIntrspctnHstMxMssngRep = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnHstMxMssngRep.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMGuestIntrspctnHstMxMssngRep.setDescription('A notification generated when Guest Introspection host MUX is missing report.')
vmwNsxMEndpointUndefined = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 5, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEndpointUndefined.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEndpointUndefined.setDescription('A notification generated when Endpoint is undefined.')
vmwNsxMEam = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 6))
if mibBuilder.loadTexts: vmwNsxMEam.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEam.setDescription('Notifications that are Eam related will have this OID prefix.')
vmwNsxMEamPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 6, 0))
if mibBuilder.loadTexts: vmwNsxMEamPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEamPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Eam module.")
vmwNsxMEamGenericAlarm = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 6, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMEamGenericAlarm.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMEamGenericAlarm.setDescription('EAM reports problems to NSX during vib/service VM install/upgrade as these traps. Action required: Use resolve API to resolve the Alarm. Frequency of traps: N times per cluster per user action, where N is number of hosts in a cluster.')
vmwNsxMFabric = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7))
if mibBuilder.loadTexts: vmwNsxMFabric.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabric.setDescription('Notifications that are Fabric related will have this OID prefix.')
vmwNsxMFabricPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0))
if mibBuilder.loadTexts: vmwNsxMFabricPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Fabric module.")
vmwNsxMFabricDplymntStatusChanged = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDplymntStatusChanged.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDplymntStatusChanged.setDescription('The status of a service on a cluster has changed. It can change to RED(Failure), GREEN(Success), YELLOW(in-progress). Action required: RED state would be accompanied with an EAM Alarm/Event/Trap, that indicates root cause. Use resolver API to fix it. Frequency of traps: Once per state change. State could change 2-3 times per user operation [Deploy/Undeploy/Update]')
vmwNsxMFabricDplymntUnitCreated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitCreated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitCreated.setDescription('NSX Manager has created the required objects for deploying a service on a cluster. This would be followed by deployment of the service on all hosts in the cluster. Action required: None Frequency: Once per cluster')
vmwNsxMFabricDplymntUnitUpdated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitUpdated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitUpdated.setDescription('NSX Manager has made changes in the objects required for deploying a service on a cluster. This would be followed by updation of the service on all hosts in the cluster. Action required: None Frequency of traps: Once per cluster per user operation [Update]')
vmwNsxMFabricDplymntUnitDestroyed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitDestroyed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDplymntUnitDestroyed.setDescription('A service has been removed from all hosts in a cluster. NSX Manager has deleted the objects for the service on the cluster. Action required: None Frequency of traps: Once per cluster')
vmwNsxMDataStoreNotCnfgrdOnHost = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDataStoreNotCnfgrdOnHost.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDataStoreNotCnfgrdOnHost.setDescription('Datastore could not be configured on host, probably its not connected. Action required: Ensure that datastore is connected to the host. Use resolve API to resolve the Alarm. Service will be deployed. Frequency of traps: Once per cluster per user operation [Deploy].')
vmwNsxMFabricDplymntInstallationFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDplymntInstallationFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDplymntInstallationFailed.setDescription('Installation of service failed, please check if ovf/vib urls are accessible, in correct format and all the properties in ovf environment have been configured in service attributes. Please check logs for details. Action required: Ensure that ovf/vib urls accessible from VC and are in correct format. Use resolve API to resolve the Alarm. Service will be deployed. Frequency of traps: Once per cluster per user operation [Deploy].')
vmwNsxMFabricAgentCreated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricAgentCreated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricAgentCreated.setDescription('The service has been successfully installed on a host. Action required: None Frequency of traps: N times per cluster, where N is number of hosts in a cluster.')
vmwNsxMFabricAgentDestroyed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricAgentDestroyed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricAgentDestroyed.setDescription('The service has been successfully removed from a host. Action required: None Frequency of traps: N times per cluster, where N is number of hosts in a cluster.')
vmwNsxMFabricSrvceNeedsRedplymnt = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricSrvceNeedsRedplymnt.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricSrvceNeedsRedplymnt.setDescription('Service will need to be redeployed as the location of the OVF / VIB bundles to be deployed has changed. Action required: Use resolve API to resolve the Alarm. Service will be redeployed. Frequency of traps: N times per NSX Manager IP change, where N is number of cluster and service combinations deployed.')
vmwNsxMUpgradeOfDplymntFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUpgradeOfDplymntFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUpgradeOfDplymntFailed.setDescription('Upgrade of deployment unit failed, please check if ovf/vib urls are accessible, in correct format and all the properties in ovf environment have been configured in service attributes. Please check logs for details. Action required: Ensure that ovf/vib urls accessible from VC and are in correct format. Use resolve API to resolve the Alarm. Service will be redeployed. Frequency of traps: Once per cluster per user operation [Upgrade]')
vmwNsxMFabricDependenciesNotInstalled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricDependenciesNotInstalled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricDependenciesNotInstalled.setDescription('The service being installed is dependent on another service that has not yet been installed. Action required: Deploy the required service on the cluster. Frequency of traps: Once per cluster per user operation [Deploy]')
vmwNsxMFabricErrorNotifSecBfrUpgrade = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 12)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricErrorNotifSecBfrUpgrade.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricErrorNotifSecBfrUpgrade.setDescription('Error while notifying security solution before upgrade. The solution may not be reachable/responding. Action required: Ensure that solution urls are accessible from NSX. Use resolve API to resolve the Alarm. Service will be redeployed. Frequency of traps: Once per cluster per user operation [Upgrade]')
vmwNsxMFabricErrCallbackNtRcvdUpgrade = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 13)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricErrCallbackNtRcvdUpgrade.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricErrCallbackNtRcvdUpgrade.setDescription('Did not receive callback from security solution for upgrade notification even after timeout. Action required: Ensure that solution urls are accessible from NSX, and NSX is reachable from the solution. Use resolve API to resolve the Alarm. Service will be redeployed. Frequency : Once per cluster per user operation [Upgrade]')
vmwNsxMFabricErrCallbackNtRcvdUninstall = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 14)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricErrCallbackNtRcvdUninstall.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricErrCallbackNtRcvdUninstall.setDescription('Uninstallation of service failed. Action required: Ensure that solution urls are accessible from NSX, and NSX is reachable from the solution. Use resolve API to resolve the Alarm. Service will be removed. Frequency of traps: Once per cluster per user operation [Uninstall]')
vmwNsxMFabricUninstallServiceFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 15)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricUninstallServiceFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricUninstallServiceFailed.setDescription('Error while notifying security solution before uninstall. Resolve to notify once again, or delete to uninstall without notification. Action required: Ensure that solution urls are accessible from NSX, and NSX is reachable from the solution. Use resolve API to resolve the Alarm. Service will be removed. Frequency of traps: Once per cluster per user operation [Uninstall]')
vmwNsxMFabricErrorNotifSecBfrUninstall = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 16)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricErrorNotifSecBfrUninstall.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricErrorNotifSecBfrUninstall.setDescription('Error while notifying security solution before uninstall. Resolve to notify once again, or delete to uninstall without notification. Action required: Ensure that solution urls are accessible from NSX, and NSX is reachable from the solution. Use resolve API to resolve the Alarm. Service will be removed. Frequency of traps: Once per cluster per user operation [Uninstall]')
vmwNsxMFabricServerRebootUninstall = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 17)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricServerRebootUninstall.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricServerRebootUninstall.setDescription('Server rebooted while security solution notification for uninstall was going on. Action required: Ensure that solution urls are accessible from NSX. Use resolve API to resolve the Alarm. Service will be uninstalled. Frequency of traps: Once per cluster per user operation [Uninstall]')
vmwNsxMFabricServerRebootUpgrade = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 18)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricServerRebootUpgrade.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricServerRebootUpgrade.setDescription('Server rebooted while security solution notification for upgrade was going on. Action required: Ensure that solution urls are accessible from NSX. Use resolve API to resolve the Alarm. Service will be redeployed. Frequency of traps: Once per cluster per user operation [Upgrade]')
vmwNsxMFabricConnEamFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 19)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricConnEamFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricConnEamFailed.setDescription('NSX Manager relies on the ESX Agent Manager service in VC for deploying/monitoring NSX vibs on ESX. The connection to this EAM service has gone down. This could be due to EAM service or VC restart/stop or an issue in the EAM service. Action required: In the NSX UI, traverse to Manage, then NSX Management Service. Verify that the status of VC connection on this page is Green. Use the VC IP to verify that EAM is UP by visiting https://<vc ip>/eam/mob. Frequency of traps: Once per switch from success to failed EAM connection')
vmwNsxMFabricConnEamRestored = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 20)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricConnEamRestored.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricConnEamRestored.setDescription('NSX Manager relies on the EAM service in VC for deploying/monitoring NSX vibs on ESX. The connection of NSX to this EAM service was re-established successfully. Action required: None Frequency of traps: Once per switch from failed to success EAM connection')
vmwNsxMFabricPreUninstallCleanUpFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 21)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricPreUninstallCleanUpFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricPreUninstallCleanUpFailed.setDescription('Pre Uninstall cleanup failed. Action required: Use resolve API to resolve the Alarm. Service will be removed. Frequency of traps: Once per cluster per user operation [Uninstall]')
vmwNsxMFabricBackingEamNotFound = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 7, 0, 22)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFabricBackingEamNotFound.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFabricBackingEamNotFound.setDescription('The backing EAM agency for this deployment could not be found. It is possible that the VC services may still be initializing. Please try to resolve the alarm to check existence of the agency. In case you have deleted the agency manually, please delete the deployment entry from NSX. Action required: Use resolve API to check existence of the agency, if backing agency exists in EAM, else delete the deployment entry from NSX. Frequency of traps: Once per cluster.')
vmwNsxMDepPlugin = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8))
if mibBuilder.loadTexts: vmwNsxMDepPlugin.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPlugin.setDescription('Notifications that are DeploymentPlugin related will have this OID prefix.')
vmwNsxMDepPluginPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8, 0))
if mibBuilder.loadTexts: vmwNsxMDepPluginPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPluginPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for DeploymentPlugin module.")
vmwNsxMDepPluginIpPoolExhausted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDepPluginIpPoolExhausted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPluginIpPoolExhausted.setDescription('When deploying Guest Introspection or other VM based service with static IP, NSX Manager needs to have a IP pool, for IP assignment to the VM. This pool has been exhausted, and new service VMs cannot be provisioned. Action required: Traverse to the Networking & Security page on VMWare vSphere Web Client, then go to Installation, followed by Service Deployments. Note the IP pool name for the failed service. Now traverse to NSX Managers, then go to Manage tab, followed by Grouping Objects sub-tab. Click on IP Pools, and add more Ips to the static IP pool. Use resolve API to resolve the Alarm. Service will be deployed. Frequency of traps: N times per cluster, where N is number of hosts in the cluster.')
vmwNsxMDepPluginGenericAlarm = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDepPluginGenericAlarm.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPluginGenericAlarm.setDescription('Deployment plugin generic alarm. Action required: Use resolve API to resolve the Alarm. Service will be deployed. Frequency of traps: N times per cluster, where N is number of hosts in the cluster.')
vmwNsxMDepPluginGenericException = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDepPluginGenericException.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPluginGenericException.setDescription('Deployment plugin generic exception alarm. Action required: Use resolve API to resolve the Alarm. Service will be deployed. Frequency of traps: N times per cluster, where N is number of hosts in the cluster.')
vmwNsxMDepPluginVmReboot = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 8, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDepPluginVmReboot.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDepPluginVmReboot.setDescription('VM needs to be rebooted for some changes to be made/take effect. Action required: Use resolve API to resolve the Alarm. Frequency of traps: N times per cluster, where N is number of hosts in the cluster.')
vmwNsxMMessaging = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9))
if mibBuilder.loadTexts: vmwNsxMMessaging.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessaging.setDescription('Notifications that are Messaging related will have this OID prefix.')
vmwNsxMMessagingPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0))
if mibBuilder.loadTexts: vmwNsxMMessagingPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Messaging module.")
vmwNsxMMessagingConfigFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingConfigFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingConfigFailed.setDescription('A notification generated when host messaging configuration failed.')
vmwNsxMMessagingReconfigFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingReconfigFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingReconfigFailed.setDescription('A notification generated when host messaging connection reconfiguration failed.')
vmwNsxMMessagingConfigFailedNotifSkip = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingConfigFailedNotifSkip.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingConfigFailedNotifSkip.setDescription('A notification generated when host messaging configuration failed and notifications were skipped.')
vmwNsxMMessagingInfraUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingInfraUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingInfraUp.setDescription('Manager runs a heartbeat with all hosts it manages. Missing heartbeat responses from a host indicate a communication issue between manager and the host. Such instances are indicated by event code 391002. When the communication is restored after such an instance, it is indicated by this event/trap. Action required: Refer to KB article https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2133897 Frequency of traps: Will be seen within 3 minutes of communication being restored between manager and a host. URL: https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2133897')
vmwNsxMMessagingInfraDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingInfraDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingInfraDown.setDescription('Manager runs a heartbeat with all hosts it manages. Missing heartbeat responses from a host indicate a communication issue between manager and the host. In the case of such a communication issue, this trap will be sent. Action required: Refer to KB article https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2133897 Frequency of traps: Will be seen within 6 minutes of a communication failure between manager and a host. URL: https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2133897')
vmwNsxMMessagingDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 9, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMMessagingDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMMessagingDisabled.setDescription("A messaging client such as a Host, an Edge appliance or a USVM appliance is expected to change its password within 2 hours of being prepped or deployed. If the password isn't changed in this duration, the messaging account for the client is disabled. Action required: This event will indicate communication issue between the manager and the client. Verify if the client is running. If running, in case of a Host, re-sync messaging. In case of an Edge or a USVM, redeploy. Frequency of traps: Will be seen 2 hours after prep, host re-sync or deployment of appliance. URL: https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2133897")
vmwNsxMServiceComposer = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10))
if mibBuilder.loadTexts: vmwNsxMServiceComposer.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposer.setDescription('Notifications that are ServiceComposer related will have this OID prefix.')
vmwNsxMServiceComposerPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0))
if mibBuilder.loadTexts: vmwNsxMServiceComposerPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for ServiceComposer module.")
vmwNsxMServiceComposerPolicyOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerPolicyOutOfSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerPolicyOutOfSync.setDescription("Service Composer encountered an error while attempting to enforce rules on this Policy. Action required: Administrator needs to check the rules on the given Policy for any errors, as reported in the message. After fixing the rules in the Policy, user would need to resolve the alarm to bring this Policy back in sync. Policy's alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, if an error is encountered while enforcing the Policy.")
vmwNsxMServiceComposerPolicyDeleted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerPolicyDeleted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerPolicyDeleted.setDescription('A Policy got deleted as a result of the internal SecurityGroup, over which the Policy was created, got deleted. Frequency of traps: This event is generated once every time any internal SecurityGroup, that is being consumed by a policy, gets deleted.')
vmwNsxMServiceComposerFirewallPolicyOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerFirewallPolicyOutOfSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerFirewallPolicyOutOfSync.setDescription("Service Composer encountered an error while attempting to enforce Firewall rules on this Policy. Firewall related changes on this Policy will not take effect, until this alarm is resolved. Action required: Administrator needs to check the rules on the given Policy for any errors, as reported in the message. After fixing the rules in the Policy, user would need to resolve the alarm to bring this Policy back in sync. Policy's alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, if an error is encountered while enforcing the Policy.")
vmwNsxMServiceComposerNetworkPolicyOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerNetworkPolicyOutOfSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerNetworkPolicyOutOfSync.setDescription("Service Composer encountered an error while attempting to enforce Network Introspection rules on this Policy. Network Introspection related changes on this Policy will not take effect, until this alarm is resolved. Action required: Administrator needs to check the rules on the given Policy for any errors, as reported in the message. After fixing the rules in the Policy, user would need to resolve the alarm to bring this Policy back in sync. Policy's alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, if an error is encountered while enforcing the Policy.")
vmwNsxMServiceComposerGuestPolicyOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerGuestPolicyOutOfSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerGuestPolicyOutOfSync.setDescription("Service Composer encountered an error while attempting to enforce Guest Introspection rules on this Policy. Guest Introspection related changes on this Policy will not take effect, until this alarm is resolved. Action required: Administrator needs to check the rules on the given Policy for any errors, as reported in the message. After fixing the rules in the Policy, user would need to resolve the alarm to bring this Policy back in sync. Policy's alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, if an error is encountered while enforcing the Policy.")
vmwNsxMServiceComposerOutOfSync = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSync.setDescription('Service Composer encountered an error synchronizing Policies. Any changes on Service Composer will not be pushed to Firewall/Network Introspection Services, until this alarm is resolved. Action required: Administrator needs to check Policies and/or Firewall sections for any errors, as reported in the message. After fixing the errors, user would need to resolve the alarm to bring Service Composer back in sync. Alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, whenever an error is encountered.')
vmwNsxMServiceComposerOutOfSyncRebootFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncRebootFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncRebootFailure.setDescription('Service Composer encountered an error while synchronizing Policies on reboot. Action required: Administrator needs to check Policies and/or Firewall config for any errors, as reported in the message. After fixing the errors, user would need to resolve the alarm to bring Service Composer back in sync. Alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once on NSX Manager reboot, if an error is encountered.')
vmwNsxMServiceComposerOutOfSyncDraftRollback = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncDraftRollback.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncDraftRollback.setDescription('Service Composer went out of sync due to rollback of drafts from Firewall. Any changes on Service Composer will not be pushed to Firewall/Network Introspection Services, until this alarm is resolved. Action required: Administrator needs to resolve the alarm to bring Service Composer back in sync. Alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once, whenever Firewall config is reverted to an older version of drafts.')
vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure.setDescription("Service Composer encountered an error while deleting the section corresponding to the Policy. This generally happens if third party(NetX) service's Manager is not reachable. Action required: Administrator needs to check connectivity with third party(NetX) service's Manager. Once the connectivity is restored, user would need to resolve the alarm. Alarm can either be resolved from Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once if a failure is encountered while deleting a Policy's section on Policy deletion.")
vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure.setDescription("Service Composer encountered an error reordering sections to reflect Policy's precedence change. This generally happens if there are Alarms on any other Policy. Action required: Administrator needs to check Policies and/or Firewall sections for any errors, as reported in the message. After fixing the errors, user would need to resolve the alarm. Alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once if a failure is encountered while reordering section to reflect precedence change.")
vmwNsxMServiceComposerOutOfSyncDraftSettingFailure = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 10, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncDraftSettingFailure.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServiceComposerOutOfSyncDraftSettingFailure.setDescription('Service Composer encountered an error while initializing auto save drafts setting. Action required: Administrator needs to check Policies and/or Firewall sections for any errors, as reported in the message. After fixing the errors, user would need to resolve the alarm. Alarm can either be resolved from NSX Manager Service Composer UI or by using alarms API. Frequency of traps: This trap is generated only once if a failure is encountered while initializing auto save drafts setting.')
vmwNsxMSvmOperations = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 11))
if mibBuilder.loadTexts: vmwNsxMSvmOperations.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSvmOperations.setDescription('Notifications that are SvmOperations related will have this OID prefix.')
vmwNsxMSvmOperationsPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 11, 0))
if mibBuilder.loadTexts: vmwNsxMSvmOperationsPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSvmOperationsPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for SvmOperations module.")
vmwNsxMInconsistentSvmAlarm = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 11, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMInconsistentSvmAlarm.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMInconsistentSvmAlarm.setDescription('Service VMs are deployed per ESX host, to provide functionality like guest introspection and McAfee/Trend virus checking in VMs on the host. An issue is detected with the state of the deployed Service VM. Follow instructions in http://kb.vmware.com/kb/2125482 to analyze the logs further. Warning: Resolving this alarm will delete the VM. After deletion you will see a different alarm saying VM is deleted. If you resolve same, it will reinstall the VM. If redeployment of the VM does not fix the original issue, the original alarm will be added back immediately. Action required: Use resolve API to resolve the Alarm. Frequency of traps: Once per host.')
vmwNsxMSvmRestartAlarm = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 11, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSvmRestartAlarm.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSvmRestartAlarm.setDescription('Service VMs are deployed per ESX host, to provide functionality like guest introspection and McAfee/Trend virus checking in VMs on the host. An issue is detected with the state of the deployed Service VM. Follow instructions in http://kb.vmware.com/kb/2125482 to analyze the logs further. Warning: Resolving this alarm will restart the VM. If the root cause here is not solved, the same alarm will be added back immediately. Action required: Use resolve API to resolve the Alarm. Frequency of traps: Once per host.')
vmwNsxMSvmAgentUnavailable = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 11, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSvmAgentUnavailable.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSvmAgentUnavailable.setDescription('An issue is detected while marking agent as available. Kindly check the logs. Resolving this alarm will attempt to mark the agent as available. Action required: Use resolve API to resolve the Alarm. Frequency of traps: Once per host.')
vmwNsxMTranslation = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 12))
if mibBuilder.loadTexts: vmwNsxMTranslation.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMTranslation.setDescription('Notifications that are Translation related will have this OID prefix.')
vmwNsxMTranslationPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 12, 0))
if mibBuilder.loadTexts: vmwNsxMTranslationPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMTranslationPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Translation module.")
vmwNsxMVmAddedToSg = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 12, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVmAddedToSg.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVmAddedToSg.setDescription('A VM has got added to the SecurityGroup. Frequency of traps: Once for every VM getting added to any SecurityGroup.')
vmwNsxMVmRemovedFromSg = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 12, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVmRemovedFromSg.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVmRemovedFromSg.setDescription('A VM has got removed from the SecurityGroup. Frequency of traps: Once for every VM getting removed from any SecurityGroup.')
vmwNsxMUniversalSync = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 13))
if mibBuilder.loadTexts: vmwNsxMUniversalSync.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUniversalSync.setDescription('Notifications that are UniversalSync related will have this OID prefix.')
vmwNsxMUniversalSyncPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 13, 0))
if mibBuilder.loadTexts: vmwNsxMUniversalSyncPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUniversalSyncPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for UniversalSync module.")
vmwNsxMFullUniversalSyncFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 13, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMFullUniversalSyncFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMFullUniversalSyncFailed.setDescription("A failure is encountered when doing full sync of universal objects on a secondary NSX manager. IP address of the secondary NSX manager is present in event's message variable. Action required: Kindly check NSX manager logs on the secondary NSX manager on which the full sync has failed. Frequency of traps: This trap is generated once per NSX manager on which full sync failure is seen.")
vmwNsxMSecondaryDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 13, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSecondaryDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSecondaryDown.setDescription("Secondary NSX manager is unreachable. Action required: Kindly check if NSX manager is running and is reachable from primary NSX manager. IP address of the secondary NSX manager is present in event's message variable. Frequency of traps: This trap is generated once per NSX manager for which connection issue is seen.")
vmwNsxMUniversalSyncFailedForEntity = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 13, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUniversalSyncFailedForEntity.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUniversalSyncFailedForEntity.setDescription("A failure is encountered when doing sync of universal object on a secondary NSX manager. IP address of the secondary NSX manager is present in event's message variable. Action required: Kindly check NSX manager logs on the secondary NSX manager on which the sync has failed. Frequency of traps: This trap is generated once per universal object on a NSX manager on which sync failure is seen.")
vmwNsxMAsyncRest = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 14))
if mibBuilder.loadTexts: vmwNsxMAsyncRest.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMAsyncRest.setDescription('Notifications that are AsyncRest related will have this OID prefix.')
vmwNsxMAsyncRestPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 14, 0))
if mibBuilder.loadTexts: vmwNsxMAsyncRestPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMAsyncRestPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for AsyncRest module.")
vmwNsxMServerUp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 14, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMServerUp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMServerUp.setDescription('Denotes that NSX manager server is up and in running state, Informs clients of NSX Manager of the current state. Action required: None Frequency of traps: Once for every query')
vmwNsxMExtensionRegistration = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 15))
if mibBuilder.loadTexts: vmwNsxMExtensionRegistration.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMExtensionRegistration.setDescription('Notifications that are ExtensionRegistration related will have this OID prefix.')
vmwNsxMExtensionRegistrationPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 15, 0))
if mibBuilder.loadTexts: vmwNsxMExtensionRegistrationPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMExtensionRegistrationPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for ExtensionRegistration module.")
vmwNsxMExtensionRegistered = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 15, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMExtensionRegistered.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMExtensionRegistered.setDescription('Registers NSX manager as a vCenter extenstion. This is applicable when no other NSX Manager is registered with vCenter and the current NSX manager is the one registering with vCenter. Action required: None Frequency of traps: Only once when the extension is registered for the very first time.')
vmwNsxMExtensionUpdated = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 15, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMExtensionUpdated.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMExtensionUpdated.setDescription('Updates the vCenter extension registration with the new NSX Manager. This is applicable when there already exists another NSX manager that is registered as a vCenter extension and the current one overwrites it. Action required: None Frequency of traps: Every time a NSX Manager registers as a vCenter extension when there already exists another NSX manager registered with vCenter')
vmwNsxMDlp = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 16))
if mibBuilder.loadTexts: vmwNsxMDlp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDlp.setDescription('Notifications that are Dlp related will have this OID prefix.')
vmwNsxMDlpPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 16, 0))
if mibBuilder.loadTexts: vmwNsxMDlpPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDlpPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Dlp module.")
vmwNsxMDataSecScanStarted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 16, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDataSecScanStarted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDataSecScanStarted.setDescription('A notification generated when NSX Data Security scan started on VirtualMachine.')
vmwNsxMDataSecScanEnded = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 16, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDataSecScanEnded.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDataSecScanEnded.setDescription('A notification generated when NSX Data Security scan ended on VirtualMachine.')
vmwNsxMSamSystem = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17))
if mibBuilder.loadTexts: vmwNsxMSamSystem.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamSystem.setDescription('Notifications that are SamSystem related will have this OID prefix.')
vmwNsxMSamSystemPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17, 0))
if mibBuilder.loadTexts: vmwNsxMSamSystemPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamSystemPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for SamSystem module.")
vmwNsxMSamDataCollectionEnabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSamDataCollectionEnabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamDataCollectionEnabled.setDescription('Service Activity Monitoring will start collecting data. Action required: None Frequency of traps: Event is triggered when SAM data collection state is toggled.')
vmwNsxMSamDataCollectionDisabled = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSamDataCollectionDisabled.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamDataCollectionDisabled.setDescription('Service Activity Monitoring will stop collecting data. Action required: SAM data collection can be enabled to start collectiing data. Frequency of traps: Event is triggered when SAM data collection state is toggled')
vmwNsxMSamDataStoppedFlowing = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSamDataStoppedFlowing.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamDataStoppedFlowing.setDescription('Service Activity Monitoring data stopped flowing from USVM Action required: Check the following - USVM log to see if heartbeats are recieved and sent - is the USVM running - is the Mux - USVM connection healthy - is the USVM - RMQ connection healthy - does the VM have endpoint driver installed Frequency of traps: Event is triggered when NSX Manager does not receives SAM data from USVM')
vmwNsxMSamDataResumedFlowing = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 17, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMSamDataResumedFlowing.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMSamDataResumedFlowing.setDescription('Service Activity Monitoring data resumes flowing from USVM Action required: None Frequency of traps: Event is triggered when SAM data is received from USVM.')
vmwNsxMUsvm = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 18))
if mibBuilder.loadTexts: vmwNsxMUsvm.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUsvm.setDescription('Notifications that are Usvm related will have this OID prefix.')
vmwNsxMUsvmPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 18, 0))
if mibBuilder.loadTexts: vmwNsxMUsvmPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUsvmPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Usvm module.")
vmwNsxMUsvmHeartbeatStopped = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 18, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUsvmHeartbeatStopped.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUsvmHeartbeatStopped.setDescription("USVM stopped sending heartbeats to management plane. Action required: Connection to NSX Manager was lost. Check why the Manager didn't send a heartbeat. Frequency of traps: Event is triggered when NSX Manager does not receives heartbeats from USVM")
vmwNsxMUsvmHeartbeatResumed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 18, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUsvmHeartbeatResumed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUsvmHeartbeatResumed.setDescription('USVM will start sending heartbeats to management plane. Action required: None Frequency of traps: Event is triggered when NSX Manager receives heartbeats from USVM')
vmwNsxMUsvmReceivedHello = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 18, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUsvmReceivedHello.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUsvmReceivedHello.setDescription('USVM sent a HELLO message to Mux Action: None Frequency of traps: Event is triggered when Epsec Mux receives HELLO message from USVM during initial connection establishement.')
vmwNsxMVsmCore = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 19))
if mibBuilder.loadTexts: vmwNsxMVsmCore.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVsmCore.setDescription('Notifications that are VsmCore related will have this OID prefix.')
vmwNsxMVsmCorePrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 19, 0))
if mibBuilder.loadTexts: vmwNsxMVsmCorePrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVsmCorePrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for VsmCore module.")
vmwNsxMUpgradeSuccess = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 19, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMUpgradeSuccess.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMUpgradeSuccess.setDescription('A notification generated when NSX Manager upgraded successfully.')
vmwNsxMRestoreSuccess = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 19, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMRestoreSuccess.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMRestoreSuccess.setDescription('A notification generated when NSX Manager restored successfully.')
vmwNsxMDuplicateIp = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 19, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMDuplicateIp.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMDuplicateIp.setDescription('The NSX Manager IP has been assigned to another machine Action: None Frequency: This is triggered whenever NSX Manager detects that its IP address is being used by another machine in the same network')
vmwNsxMVxlan = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20))
if mibBuilder.loadTexts: vmwNsxMVxlan.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlan.setDescription('Notifications that are Vxlan related will have this OID prefix.')
vmwNsxMVxlanPrefix = ObjectIdentity((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0))
if mibBuilder.loadTexts: vmwNsxMVxlanPrefix.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanPrefix.setDescription("This group is actually the prefix one uses when creating vmware NSX manager specific trap OID's for Vxlan module.")
vmwNsxMVxlanLogicalSwitchImproperlyCnfg = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 1)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchImproperlyCnfg.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchImproperlyCnfg.setDescription('This event is triggered if one or more distributed virtual port groups backing a certain Logical Switch were modified and/or removed. Or if migration of Control plane mode for a Logical Switch/Transport Zone failed. Action required: (1) If the event was triggered due to deletion/modification of backing distributed virtual port groups, then the error will be visible on Logical Switch UI page. Resolve from there will try and create missing distributed virtual port groups for the Logical Switch. (2) If event was triggered due to failure of Control plan mode migration, redo the migration for that Logical Switch or Transport Zone. Frequency of traps: Event is triggered due to user actions as explained in description. Affects: Logical Switch network traffic.')
vmwNsxMVxlanLogicalSwitchProperlyCnfg = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchProperlyCnfg.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchProperlyCnfg.setDescription('Logical Switch status has been marked good, most probably as result of resolving any errors on it. Action required: None Frequency of traps: Event is triggered when user resolves the Logical Switch error and as a result missing backing distributed virtual port groups are recreated.')
vmwNsxMVxlanInitFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanInitFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanInitFailed.setDescription("Failed to configure vmknic as a VTEP, VXLAN traffic through this interface will be dropped until this is resolved. Action required: Check the host's vmkernel.log for more details. Frequency of traps: Every time a VTEP vmknic tries to connect to it's Distributed Virtual Port. Affects: VXLAN traffic on the affected Host.")
vmwNsxMVxlanPortInitFailed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 4)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanPortInitFailed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanPortInitFailed.setDescription("Failed to configure VXLAN on the Distributed Virtual Port, the port will be disconnected. Action required: Check the host's vmkernel.log for more details. Frequency of traps: Every time a VXLAN vNic tries to connect to it's Distributed Virtual Port on the host. Affects: VXLAN traffic on the affected Host.")
vmwNsxMVxlanInstanceDoesNotExist = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 5)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanInstanceDoesNotExist.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanInstanceDoesNotExist.setDescription("VXLAN configuration was received for a Distributed Virtual Port, but the host has not yet enabled VXLAN on the vSphere Distributed Switch. VXLAN ports on affected Host will fail to connect until resolved. Action required: See KB 2107951 (https://kb.vmware.com/selfservice/microsites/search.do?cmd=displayKC&docType=kc&externalId=2107951&sliceId=1&docTypeID=DT_KB_1_1&dialogID=40732862&stateId=0%200%2040754197) Frequency of traps: Every time any VXLAN related port (vNic or vmknic) tries to connect to it's Distributed Virtual Port on the host. Affects: VXLAN Traffic on that Host.")
vmwNsxMVxlanLogicalSwitchWrkngImproperly = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 6)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchWrkngImproperly.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanLogicalSwitchWrkngImproperly.setDescription("VTEP interface was unable to join the specified multicast address, the VTEP will be unable to receive some traffic from other hosts until this is resolved. The host will periodically retry joining the group until it is successful. Action required: Check the host's vmkernel.log for more details. Frequency of traps: NSX retries joining failed mcast groups every 5 seconds. Affects: Logical Switch associated with problem VTEP interface won't work properly.")
vmwNsxMVxlanTransportZoneIncorrectlyWrkng = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 7)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanTransportZoneIncorrectlyWrkng.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanTransportZoneIncorrectlyWrkng.setDescription('The IP address of a VTEP vmknic has changed. Action required: None. Frequency of traps: Every time a VTEP IP changes')
vmwNsxMVxlanTransportZoneNotUsed = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 8)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanTransportZoneNotUsed.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanTransportZoneNotUsed.setDescription("VTEP vmknic does not have a valid IP address assigned, all VXLAN traffic through this vmknic will be dropped. Action required: Verify the IP configuration for the interface, and the DHCP server if DHCP is used. Frequency of traps: Once per VTEP loosing it's IP address.")
vmwNsxMVxlanOverlayClassMissingOnDvs = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 9)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanOverlayClassMissingOnDvs.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanOverlayClassMissingOnDvs.setDescription('NSX packages where not installed prior to DVS configuration for VXLAN. All VXLAN ports will fail to connect until resolved. Action required: See KB 2107951 https://kb.vmware.com/selfservice/microsites/search.do?cmd=displayKC&docType=kc&externalId=2107951&sliceId=1&docTypeID=DT_KB_1_1&dialogID=40732862&stateId=0%200%2040754197 Frequency of traps: Once per setting of the com.vmware.netoverlay.layer0=vxlan opaque property or whenver the host is configured for vxlan or Host reconnects to VCEnter and host has some problem. Affects: VXLAN Traffic for that Host will be affected.')
vmwNsxMVxlanControllerRemoved = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 10)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanControllerRemoved.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanControllerRemoved.setDescription('A notification generated when VXLAN Controller has been removed due to the connection cant be built, please check controller IP configuration and deploy again.')
vmwNsxMVxlanControllerConnProblem = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 11)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanControllerConnProblem.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanControllerConnProblem.setDescription('NSX manager detected the connection between two controller nodes is broken. Action required: It is a warning event, users need to check the controller cluster for the further steps. Check following KB 2127655 https://kb.vmware.com/selfservice/microsites/search.do?cmd=displayKC&docType=kc&externalId=2127655&sliceId=1&docTypeID=DT_KB_1_1&dialogID=40732913&stateId=0%200%2040754965 to see if issue matches. Frequency of traps: Whenever the controller reports the issue. Affects: Networking might get affected.')
vmwNsxMVxlanControllerInactive = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 12)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanControllerInactive.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanControllerInactive.setDescription("Host Certification information couldn't be sent to NSX Controllers. Action required: Ensure that NSX Controller cluster is in healthy state before preparing a new Host. Invoke Controller Sync API to try and rectify this error. Frequency of traps: When a new host is prepared for NSX networking. Affects: Newly prepared Host. Communication channel between Host and NSX Controllers might have issues.")
vmwNsxMVxlanControllerActive = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 13)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanControllerActive.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanControllerActive.setDescription('A notification generated when Controller cluster state is now active. Controller Synchronization job is in progress. Frequency of traps: Controller cluster becomes active again from a previous inactive state. Action required: User doesnt have to take any corrective action. NSX will auto-sync the controllers.')
vmwNsxMVxlanVmknicMissingOrDeleted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 14)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicMissingOrDeleted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicMissingOrDeleted.setDescription('VXLAN vmknic is missing or deleted from host. Action required: Issue can be resolved from Logical Network Preparation - VXLAN Transport UI section. Clicking on resolve will try to rectify the issue. Frequency of traps: First time NSX Manager finds that VXLAN vmknic is missing or deleted from Host. Affects: VXLAN Traffic to/from the mentioned Host will be affected.')
vmwNsxMVxlanInfo = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 15)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanInfo.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanInfo.setDescription('NSX Manager will raise this event when connection between either of the following component is established/re-established (i) connection between NSX Manager and Host Firewall agent. (ii) connection between NSX Manager and Control Plane Agent. (iii) connection between Control Plane Agent to Controllers. Action required: None Frequency of traps: NSX Manager will raise this event when connection between either of the following component is established/re-established (i) connection between NSX Manager and Host Firewall agent. (ii) connection between NSX Manager and Control Plane Agent (iii) connection between Control Plane Agent to Controllers.')
vmwNsxMVxlanVmknicPortGrpMissing = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 16)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicPortGrpMissing.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicPortGrpMissing.setDescription('NSX manager detected one vxlan vmknic is missing on VC. Action required: Check the host, if that vmknic is deleted, click on the resolve button on UI, or call the remediate API (POST /api/2.0/vdn/config/host/{hostId}/vxlan/vteps?action=remediate) to recreate the vxlan vmknic. Frequency of traps: First time when vxlan vmknic is detected missing (manually deleted by user or inventory report the incorrect information) Affects: The VXLAN traffic on that host may be interrupted.')
vmwNsxMVxlanVmknicPortGrpAppears = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 17)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicPortGrpAppears.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanVmknicPortGrpAppears.setDescription('NSX manager detected one vxlan vmknic that was marked as missing has now reappeared on VC. Action required: None Frequency of traps: When that missing vmknic re-appears again. Affects: The VXLAN traffic on that host may be resumed.')
vmwNsxMVxlanConnDown = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 18)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanConnDown.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanConnDown.setDescription('This event is triggered when either of the following connections are detected down by NSX Manager: (i) connection between NSX Manager and Host Firewall agent. (ii) connection between NSX Manager and Control Plane Agent. (iii) connection between Control Plane Agent to Controllers. Action required: (i) If NSX Manager to Host Firewall Agent connection is down, check NSX Manager and Firewall Agent logs to get error details. You can try Fabric Synchronize API to try and retificy this issue. (ii) If NSX Manager to Control Plane Agent connection is down, please check NSX Manager and Control Plane Agent logs to get the error detail, check whether the Control Plane Agent process is down. (iii) If Control Plane Agent to Controllers connection is down, please go to UI Installation page to check the connection status for crossponding Host. Frequency of traps: When (i) NSX Manager looses connection with Firewall agent on host or (ii) NSX Manager losses connection with Control plane agent on host or (iii) Control plane agent on Host looses connection with NSX Controllers. Affects: VMs on that Host might get affected.')
vmwNsxMBackingPortgroupMissing = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 19)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMBackingPortgroupMissing.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMBackingPortgroupMissing.setDescription('NSX manager detected one backing portgroup of a logical switch is missing on VCenter. Action required: Click on the resolve button on UI or call the API (POST https://<nsx-manager-host>/api/2.0/vdn/virtualwires/<virtualwireId>/backing?action=remediate) to recreate that backing portgroup. Frequency of traps: Whenever logical switch backing portgroup is missing on VC. Affects: VMs cannot be connected to this Logical Switch.')
vmwNsxMBackingPortgroupReappears = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 20)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMBackingPortgroupReappears.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMBackingPortgroupReappears.setDescription('NSX manager detected one backing portgroup of a logical switch that was missing reappears on VC. Action required: None Frequency of traps: Whenever user triggered remediate API on Logical Switch which has missing backing portgroup.')
vmwNsxMManagedObjectIdChanged = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 21)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMManagedObjectIdChanged.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMManagedObjectIdChanged.setDescription('NSX manager detected the Managed Objectid of one backing portgroup of a logical switch changed. Action required: None Frequnecy of traps: This typically happens when user restores a backup of Logical Switch backing portgroup.')
vmwNsxMHighLatencyOnDisk = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 22)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMHighLatencyOnDisk.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMHighLatencyOnDisk.setDescription('NSX manager detected some disk on a NSX Controller has high latency. Action required: Rectify the issue on specified device and controller. Frequency of traps: First time NSX detected this issue as reported by Controller. When this issue gets resolved another Informational event will be raised by NSX Manager indicating the same. Affects: NSX Controller.')
vmwNsxMHighLatencyOnDiskResolved = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 23)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMHighLatencyOnDiskResolved.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMHighLatencyOnDiskResolved.setDescription('NSX manager detected the disk high latency alert on a some disk on a NSX Controller has been resolved. Frequency of traps: First time NSX detected, previously raised disk latency issue has been resolved.')
vmwNsxMControllerVmPoweredOff = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 24)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMControllerVmPoweredOff.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMControllerVmPoweredOff.setDescription("NSX manager detected a Controller Virtual Machine is powered off from vCenter. Action required: Click on the 'Resolve' button on Controller page on UI or call the API (POST https://<nsx-manager-host>/api/2.0/vdn/controller/{controllerId}?action=remediate) to power on the Controller Virtual Machine. Frequency of traps: This event wil be raised when controller Virtual Machine is powered off from vCenter. Affects: Controller cluster status might go to disconnected if a controller Virtual Machine is powered off. Any operation that requires an active Controller Cluster may be affected.")
vmwNsxMControllerVmDeleted = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 25)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMControllerVmDeleted.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMControllerVmDeleted.setDescription("NSX manager detected a Controller Virtual Machine is deleted from vCenter. Action required: Click on the Resolve button on Controller page on UI or call the API (POST https://<nsx-manager-host>/api/2.0/vdn/controller/{controllerId}?action=remediate) to clean up NSX manager's database state. Frequency of traps: This event will be raised when Controller Virtual Machine is deleted from vCenter. Affects: Controller cluster status might go to disconnected if a controller Virtual Machine is powered off. Any operation that requires an active Controller Cluster may be affected.")
vmwNsxMVxlanConfigNotSet = NotificationType((1, 3, 6, 1, 4, 1, 6876, 90, 1, 2, 20, 0, 26)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"))
if mibBuilder.loadTexts: vmwNsxMVxlanConfigNotSet.setStatus('current')
if mibBuilder.loadTexts: vmwNsxMVxlanConfigNotSet.setDescription('NSX manager detected the VXLAN configuration is not set on the host (would-block issue). And this event indicates NSX Manager tried to rectify this issue by resending the VXLAN configuration on Host. Action required: See KB 2107951 https://kb.vmware.com/selfservice/microsites/search.do?cmd=displayKC&docType=kc&externalId=2107951&sliceId=1&docTypeID=DT_KB_1_1&dialogID=40732862&stateId=0%200%2040754197 for more information. Frequency of traps: This event will generate when host preparation task is triggered for a host and Host encounters would-block issue. Affects: It is a notification, no specific guide for the next step.')
vmwNsxManagerMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99))
vmwNsxManagerMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99, 1))
vmwNsxManagerMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99, 2))
vmwNsxManagerMIBBasicCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99, 1, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxManagerNotificationInfoGroup1"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxManagerNotificationGroup1"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
vmwNsxManagerMIBBasicCompliance = vmwNsxManagerMIBBasicCompliance.setStatus('current')
if mibBuilder.loadTexts: vmwNsxManagerMIBBasicCompliance.setDescription('The compliance statement for entities which implement VMWARE-NSX-MANAGER-MIB.')
vmwNsxManagerNotificationInfoGroup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99, 2, 2)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventCode"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventTimestamp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventMessage"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventSeverity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEventComponent"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUuid"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMCount"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
vmwNsxManagerNotificationInfoGroup1 = vmwNsxManagerNotificationInfoGroup1.setStatus('current')
if mibBuilder.loadTexts: vmwNsxManagerNotificationInfoGroup1.setDescription('These objects provide details in NSX Manager notifications.')
vmwNsxManagerNotificationGroup1 = NotificationGroup((1, 3, 6, 1, 4, 1, 6876, 90, 1, 99, 2, 3)).setObjects(("VMWARE-NSX-MANAGER-MIB", "vmwNsxMConfigGroup"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpAddedBlackList"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpRemovedBlackList"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSsoConfigFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSsoUnconfigured"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUserRoleAssigned"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUserRoleUnassigned"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGroupRoleAssigned"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGroupRoleUnassigned"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVcLoginFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVcDisconnected"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLostVcConnectivity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFltrCnfgUpdateFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFltrCnfgNotAppliedToVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFltrCnfgAppliedToVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFltrCreatedForVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFltrDeletedForVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallConfigUpdateFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallRuleFailedVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallRuleAppliedVnic"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMCntnrCnfgUpdateFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFlowMissed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardCnfgUpdateFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardApplied"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardDisableFail"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLegacyAppServiceDeletionFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallCpuThresholdCrossed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallMemThresholdCrossed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMConnPerSecThrshldCrossed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallCnfgUpdateTimedOut"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardCnfgUpdateTmOut"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallPublishFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMCntnrUpdatePublishFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSpoofGuardUpdatePublishFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMExcludeListPublishFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallCnfgUpdateOnDltCntnr"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMHostSyncFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMHostSynced"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallInstalled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallClusterInstalled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallClusterUninstalled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallClusterDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeNoVmServing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeGatewayCreated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmBadState"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmCommFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmCnfgChanged"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeGatewayDeleted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeGatewayReDeployed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmPowerOff"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeApplianceSizeChanged"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeUpgrade51x"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeLicenseChanged"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeApplianceMoved"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeApplianceNotFound"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVMHealthCheckMiss"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHealthCheckMiss"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeCommAgentNotConnected"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMApplianceWithDifferentId"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallRuleModified"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeAntiAffinityRuleViolated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaEnabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeGatewayRecovered"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmRecovered"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeGatewayUpgraded"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmHlthChkDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgePrePublishFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeForcedSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmBooted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmInBadState"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmCpuUsageIncreased"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmMemUsageIncreased"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmProcessFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmSysTimeBad"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmSysTimeSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeAesniCryptoEngineUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeAesniCryptoEngineDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeVmOom"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeFileSysRo"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaCommDisconnected"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaSwitchOverSelf"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaSwitchOverActive"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeHaSwitchOverStandby"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeMonitorProcessFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLbVirtualServerPoolUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLbVirtualServerPoolDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLbVirtualServerPoolWrong"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMLbPoolWarning"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecChannelUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecChannelDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecTunnelUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecTunnelDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecChannelUnknown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMIpsecTunnelUnknown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbMemberUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbMemberWarning"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbMemberDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbMemberUnknown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbPeerUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGlobalLbPeerDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDhcpServiceDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEndpointThinAgentEnabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGuestIntrspctnEnabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGuestIntrspctnIncompatibleEsx"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGuestIntrspctnEsxConnFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGuestIntrspctnStatusRcvFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEsxModuleEnabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEsxModuleUninstalled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMGuestIntrspctnHstMxMssngRep"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEndpointUndefined"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEamGenericAlarm"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDplymntStatusChanged"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDplymntUnitCreated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDplymntUnitUpdated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDplymntUnitDestroyed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDataStoreNotCnfgrdOnHost"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDplymntInstallationFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricAgentCreated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricAgentDestroyed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricSrvceNeedsRedplymnt"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUpgradeOfDplymntFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricDependenciesNotInstalled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricErrorNotifSecBfrUpgrade"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricErrCallbackNtRcvdUpgrade"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricErrCallbackNtRcvdUninstall"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricUninstallServiceFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricErrorNotifSecBfrUninstall"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricServerRebootUninstall"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricServerRebootUpgrade"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricConnEamFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricConnEamRestored"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricPreUninstallCleanUpFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFabricBackingEamNotFound"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDepPluginIpPoolExhausted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDepPluginGenericAlarm"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDepPluginGenericException"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDepPluginVmReboot"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingConfigFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingReconfigFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingConfigFailedNotifSkip"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingInfraUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingInfraDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMMessagingDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerPolicyOutOfSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerPolicyDeleted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMInconsistentSvmAlarm"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSvmRestartAlarm"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSvmAgentUnavailable"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVmAddedToSg"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVmRemovedFromSg"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFullUniversalSyncFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSecondaryDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUniversalSyncFailedForEntity"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServerUp"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMExtensionRegistered"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMExtensionUpdated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDataSecScanStarted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDataSecScanEnded"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataCollectionEnabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataCollectionDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataStoppedFlowing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataResumedFlowing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUsvmHeartbeatStopped"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUsvmHeartbeatResumed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUsvmReceivedHello"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMUpgradeSuccess"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMRestoreSuccess"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanLogicalSwitchImproperlyCnfg"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanLogicalSwitchProperlyCnfg"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanInitFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanPortInitFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanInstanceDoesNotExist"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanLogicalSwitchWrkngImproperly"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanTransportZoneIncorrectlyWrkng"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanTransportZoneNotUsed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanOverlayClassMissingOnDvs"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerRemoved"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerConnProblem"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerInactive"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerActive"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanVmknicMissingOrDeleted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanInfo"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanVmknicPortGrpMissing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanVmknicPortGrpAppears"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanConnDown"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataCollectionDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataStoppedFlowing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSamDataResumedFlowing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanOverlayClassMissingOnDvs"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerRemoved"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerConnProblem"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanControllerInactive"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSsoDisconnected"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallInstallFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallForceSyncClusterFailed"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallForceSyncClusterSuccess"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMFirewallVsfwdProcessStarted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeResourceReservationFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeSplitBrainDetected"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeSplitBrainRecovered"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMEdgeSplitBrainRecoveryAttempt"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerFirewallPolicyOutOfSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerNetworkPolicyOutOfSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerGuestPolicyOutOfSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSync"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSyncRebootFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSyncDraftRollback"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMServiceComposerOutOfSyncDraftSettingFailure"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMBackingPortgroupMissing"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMBackingPortgroupReappears"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMManagedObjectIdChanged"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMHighLatencyOnDisk"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMHighLatencyOnDiskResolved"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMControllerVmPoweredOff"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMControllerVmDeleted"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMVxlanConfigNotSet"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSnmpDisabled"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMSnmpManagerConfigUpdated"), ("VMWARE-NSX-MANAGER-MIB", "vmwNsxMDuplicateIp"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
vmwNsxManagerNotificationGroup1 = vmwNsxManagerNotificationGroup1.setStatus('current')
if mibBuilder.loadTexts: vmwNsxManagerNotificationGroup1.setDescription('Group of objects describing notifications (traps, informs).')
mibBuilder.exportSymbols("VMWARE-NSX-MANAGER-MIB", vmwNsxManagerNotificationGroup1=vmwNsxManagerNotificationGroup1, vmwNsxMIpsecChannelUp=vmwNsxMIpsecChannelUp, vmwNsxMIpsecChannelUnknown=vmwNsxMIpsecChannelUnknown, vmwNsxMGlobalLbMemberUp=vmwNsxMGlobalLbMemberUp, vmwNsxMMessagingInfraDown=vmwNsxMMessagingInfraDown, vmwNsxMFabric=vmwNsxMFabric, vmwNsxMEdgeSplitBrainRecovered=vmwNsxMEdgeSplitBrainRecovered, vmwNsxMFirewallForceSyncClusterSuccess=vmwNsxMFirewallForceSyncClusterSuccess, vmwNsxMFabricDplymntUnitUpdated=vmwNsxMFabricDplymntUnitUpdated, vmwNsxMMessaging=vmwNsxMMessaging, vmwNsxMLbPoolWarning=vmwNsxMLbPoolWarning, vmwNsxMHostSynced=vmwNsxMHostSynced, vmwNsxMVcLoginFailed=vmwNsxMVcLoginFailed, vmwNsxMVxlanConnDown=vmwNsxMVxlanConnDown, vmwNsxManagerMIBBasicCompliance=vmwNsxManagerMIBBasicCompliance, vmwNsxMEdgeVmBadState=vmwNsxMEdgeVmBadState, vmwNsxMEventMessage=vmwNsxMEventMessage, vmwNsxMFirewallMemThresholdCrossed=vmwNsxMFirewallMemThresholdCrossed, vmwNsxMFabricErrorNotifSecBfrUninstall=vmwNsxMFabricErrorNotifSecBfrUninstall, vmwNsxMServiceComposerNetworkPolicyOutOfSync=vmwNsxMServiceComposerNetworkPolicyOutOfSync, vmwNsxMDepPluginGenericException=vmwNsxMDepPluginGenericException, vmwNsxMFabricServerRebootUpgrade=vmwNsxMFabricServerRebootUpgrade, vmwNsxMEdgeVmCnfgChanged=vmwNsxMEdgeVmCnfgChanged, vmwNsxMExcludeListPublishFailed=vmwNsxMExcludeListPublishFailed, vmwNsxMVxlanInitFailed=vmwNsxMVxlanInitFailed, vmwNsxMEdgeGatewayUpgraded=vmwNsxMEdgeGatewayUpgraded, vmwNsxMVxlanVmknicPortGrpAppears=vmwNsxMVxlanVmknicPortGrpAppears, vmwNsxMTranslationPrefix=vmwNsxMTranslationPrefix, vmwNsxMEamGenericAlarm=vmwNsxMEamGenericAlarm, vmwNsxManagerMIBConformance=vmwNsxManagerMIBConformance, vmwNsxMDuplicateIp=vmwNsxMDuplicateIp, vmwNsxMBranch=vmwNsxMBranch, vmwNsxMVxlanConfigNotSet=vmwNsxMVxlanConfigNotSet, vmwNsxMLegacyAppServiceDeletionFailed=vmwNsxMLegacyAppServiceDeletionFailed, vmwNsxMEdgeFileSysRo=vmwNsxMEdgeFileSysRo, vmwNsxMGuestIntrspctnStatusRcvFailed=vmwNsxMGuestIntrspctnStatusRcvFailed, vmwNsxMFlowMissed=vmwNsxMFlowMissed, vmwNsxMEventCode=vmwNsxMEventCode, vmwNsxMSecurityPrefix=vmwNsxMSecurityPrefix, vmwNsxMEdgeApplianceNotFound=vmwNsxMEdgeApplianceNotFound, vmwNsxMGlobalLbPeerUp=vmwNsxMGlobalLbPeerUp, vmwNsxMFabricErrorNotifSecBfrUpgrade=vmwNsxMFabricErrorNotifSecBfrUpgrade, vmwNsxMMessagingConfigFailed=vmwNsxMMessagingConfigFailed, vmwNsxMVsmCorePrefix=vmwNsxMVsmCorePrefix, vmwNsxMVxlanControllerConnProblem=vmwNsxMVxlanControllerConnProblem, vmwNsxManagerMIBCompliances=vmwNsxManagerMIBCompliances, vmwNsxMFirewallClusterDisabled=vmwNsxMFirewallClusterDisabled, vmwNsxMSpoofGuardApplied=vmwNsxMSpoofGuardApplied, vmwNsxMFirewallCnfgUpdateTimedOut=vmwNsxMFirewallCnfgUpdateTimedOut, vmwNsxManagerMIB=vmwNsxManagerMIB, vmwNsxMEdgeHaCommDisconnected=vmwNsxMEdgeHaCommDisconnected, vmwNsxMVxlan=vmwNsxMVxlan, vmwNsxMFabricAgentDestroyed=vmwNsxMFabricAgentDestroyed, vmwNsxMVxlanInfo=vmwNsxMVxlanInfo, vmwNsxMFirewallRuleFailedVnic=vmwNsxMFirewallRuleFailedVnic, vmwNsxMServiceComposerFirewallPolicyOutOfSync=vmwNsxMServiceComposerFirewallPolicyOutOfSync, vmwNsxMEdgePrePublishFailed=vmwNsxMEdgePrePublishFailed, vmwNsxMLbVirtualServerPoolDown=vmwNsxMLbVirtualServerPoolDown, VmwNsxManagerTypeSeverity=VmwNsxManagerTypeSeverity, vmwNsxMSvmOperations=vmwNsxMSvmOperations, vmwNsxMSvmAgentUnavailable=vmwNsxMSvmAgentUnavailable, vmwNsxMEdgeAesniCryptoEngineUp=vmwNsxMEdgeAesniCryptoEngineUp, vmwNsxMSecurity=vmwNsxMSecurity, vmwNsxMEamPrefix=vmwNsxMEamPrefix, vmwNsxMEndpoint=vmwNsxMEndpoint, vmwNsxMEam=vmwNsxMEam, vmwNsxMGlobalLbPeerDown=vmwNsxMGlobalLbPeerDown, vmwNsxMEdgeAntiAffinityRuleViolated=vmwNsxMEdgeAntiAffinityRuleViolated, vmwNsxMDlpPrefix=vmwNsxMDlpPrefix, vmwNsxMEdgeApplianceMoved=vmwNsxMEdgeApplianceMoved, vmwNsxMApplianceWithDifferentId=vmwNsxMApplianceWithDifferentId, vmwNsxMGuestIntrspctnEnabled=vmwNsxMGuestIntrspctnEnabled, vmwNsxMEdgeApplianceSizeChanged=vmwNsxMEdgeApplianceSizeChanged, vmwNsxMFabricErrCallbackNtRcvdUninstall=vmwNsxMFabricErrCallbackNtRcvdUninstall, vmwNsxMEndpointUndefined=vmwNsxMEndpointUndefined, vmwNsxMCntnrCnfgUpdateFailed=vmwNsxMCntnrCnfgUpdateFailed, vmwNsxMEdgeVmOom=vmwNsxMEdgeVmOom, vmwNsxMHostSyncFailed=vmwNsxMHostSyncFailed, vmwNsxMMessagingDisabled=vmwNsxMMessagingDisabled, vmwNsxMControllerVmPoweredOff=vmwNsxMControllerVmPoweredOff, vmwNsxMFirewallCpuThresholdCrossed=vmwNsxMFirewallCpuThresholdCrossed, vmwNsxMUsvmReceivedHello=vmwNsxMUsvmReceivedHello, vmwNsxMBackingPortgroupMissing=vmwNsxMBackingPortgroupMissing, vmwNsxManagerMIBGroups=vmwNsxManagerMIBGroups, vmwNsxMEdgeVmRecovered=vmwNsxMEdgeVmRecovered, vmwNsxMEdgeHaSwitchOverSelf=vmwNsxMEdgeHaSwitchOverSelf, vmwNsxMUniversalSync=vmwNsxMUniversalSync, vmwNsxMGroupsPrefix=vmwNsxMGroupsPrefix, vmwNsxMManagedObjectIdChanged=vmwNsxMManagedObjectIdChanged, vmwNsxMServiceComposerPolicyDeleted=vmwNsxMServiceComposerPolicyDeleted, vmwNsxMSvmRestartAlarm=vmwNsxMSvmRestartAlarm, vmwNsxMFabricServerRebootUninstall=vmwNsxMFabricServerRebootUninstall, vmwNsxMSpoofGuardFailed=vmwNsxMSpoofGuardFailed, vmwNsxMEdgeMonitorProcessFailure=vmwNsxMEdgeMonitorProcessFailure, vmwNsxMFabricSrvceNeedsRedplymnt=vmwNsxMFabricSrvceNeedsRedplymnt, vmwNsxMRestoreSuccess=vmwNsxMRestoreSuccess, vmwNsxMSnmp=vmwNsxMSnmp, vmwNsxMSecondaryDown=vmwNsxMSecondaryDown, vmwNsxMGroupRoleAssigned=vmwNsxMGroupRoleAssigned, vmwNsxMEdgeAesniCryptoEngineDown=vmwNsxMEdgeAesniCryptoEngineDown, vmwNsxMEsxModuleEnabled=vmwNsxMEsxModuleEnabled, vmwNsxMServiceComposerOutOfSyncRebootFailure=vmwNsxMServiceComposerOutOfSyncRebootFailure, vmwNsxMFabricUninstallServiceFailed=vmwNsxMFabricUninstallServiceFailed, vmwNsxMIpRemovedBlackList=vmwNsxMIpRemovedBlackList, vmwNsxMEdgeVmSysTimeBad=vmwNsxMEdgeVmSysTimeBad, vmwNsxMEdgeHaDisabled=vmwNsxMEdgeHaDisabled, vmwNsxMUserRoleAssigned=vmwNsxMUserRoleAssigned, vmwNsxMLostVcConnectivity=vmwNsxMLostVcConnectivity, vmwNsxMFltrCnfgNotAppliedToVnic=vmwNsxMFltrCnfgNotAppliedToVnic, vmwNsxMFabricErrCallbackNtRcvdUpgrade=vmwNsxMFabricErrCallbackNtRcvdUpgrade, vmwNsxMVxlanTransportZoneNotUsed=vmwNsxMVxlanTransportZoneNotUsed, vmwNsxMVxlanVmknicPortGrpMissing=vmwNsxMVxlanVmknicPortGrpMissing, vmwNsxMEdgeGatewayReDeployed=vmwNsxMEdgeGatewayReDeployed, vmwNsxMEdgeHaSwitchOverStandby=vmwNsxMEdgeHaSwitchOverStandby, vmwNsxMGroupsBranch=vmwNsxMGroupsBranch, vmwNsxMSpoofGuardUpdatePublishFailed=vmwNsxMSpoofGuardUpdatePublishFailed, vmwNsxMExtensionRegistrationPrefix=vmwNsxMExtensionRegistrationPrefix, vmwNsxMHighLatencyOnDisk=vmwNsxMHighLatencyOnDisk, vmwNsxMServiceComposerOutOfSync=vmwNsxMServiceComposerOutOfSync, vmwNsxMUpgradeSuccess=vmwNsxMUpgradeSuccess, vmwNsxMServiceComposerOutOfSyncDraftRollback=vmwNsxMServiceComposerOutOfSyncDraftRollback, vmwNsxMTranslation=vmwNsxMTranslation, vmwNsxMEdgeLicenseChanged=vmwNsxMEdgeLicenseChanged, vmwNsxMVxlanControllerRemoved=vmwNsxMVxlanControllerRemoved, vmwNsxMDhcpServiceDisabled=vmwNsxMDhcpServiceDisabled, vmwNsxMFabricConnEamFailed=vmwNsxMFabricConnEamFailed, vmwNsxMVxlanOverlayClassMissingOnDvs=vmwNsxMVxlanOverlayClassMissingOnDvs, PYSNMP_MODULE_ID=vmwNsxManagerMIB, vmwNsxMEdgeSplitBrainRecoveryAttempt=vmwNsxMEdgeSplitBrainRecoveryAttempt, vmwNsxMEventComponent=vmwNsxMEventComponent, vmwNsxMVxlanPrefix=vmwNsxMVxlanPrefix, vmwNsxMEdgeForcedSync=vmwNsxMEdgeForcedSync, vmwNsxMIpsecTunnelUnknown=vmwNsxMIpsecTunnelUnknown, vmwNsxMUpgradeOfDplymntFailed=vmwNsxMUpgradeOfDplymntFailed, vmwNsxMUserRoleUnassigned=vmwNsxMUserRoleUnassigned, vmwNsxMSamDataCollectionDisabled=vmwNsxMSamDataCollectionDisabled, vmwNsxMEventTimestamp=vmwNsxMEventTimestamp, vmwNsxMGuestIntrspctnHstMxMssngRep=vmwNsxMGuestIntrspctnHstMxMssngRep, vmwNsxMVxlanPortInitFailed=vmwNsxMVxlanPortInitFailed, vmwNsxMSsoDisconnected=vmwNsxMSsoDisconnected, vmwNsxMSamSystem=vmwNsxMSamSystem, vmwNsxMUsvm=vmwNsxMUsvm, vmwNsxMSnmpManagerConfigUpdated=vmwNsxMSnmpManagerConfigUpdated, vmwNsxMExtensionUpdated=vmwNsxMExtensionUpdated, vmwNsxMBackingPortgroupReappears=vmwNsxMBackingPortgroupReappears, vmwNsxMMessagingInfraUp=vmwNsxMMessagingInfraUp, vmwNsxMEdgeVmProcessFailure=vmwNsxMEdgeVmProcessFailure, vmwNsxMEndpointThinAgentEnabled=vmwNsxMEndpointThinAgentEnabled, vmwNsxMEdgeVmMemUsageIncreased=vmwNsxMEdgeVmMemUsageIncreased, vmwNsxMDlp=vmwNsxMDlp, vmwNsxMAlertData=vmwNsxMAlertData, vmwNsxMDepPluginPrefix=vmwNsxMDepPluginPrefix, vmwNsxMDepPluginIpPoolExhausted=vmwNsxMDepPluginIpPoolExhausted, vmwNsxMFirewallInstalled=vmwNsxMFirewallInstalled, vmwNsxMFabricPreUninstallCleanUpFailed=vmwNsxMFabricPreUninstallCleanUpFailed, vmwNsxMFabricDplymntStatusChanged=vmwNsxMFabricDplymntStatusChanged, vmwNsxMVxlanLogicalSwitchWrkngImproperly=vmwNsxMVxlanLogicalSwitchWrkngImproperly, vmwNsxMFirewallClusterUninstalled=vmwNsxMFirewallClusterUninstalled, vmwNsxMEdgeGatewayDeleted=vmwNsxMEdgeGatewayDeleted, vmwNsxMFabricDplymntInstallationFailed=vmwNsxMFabricDplymntInstallationFailed, vmwNsxMEdgeVmCommFailed=vmwNsxMEdgeVmCommFailed, vmwNsxMInconsistentSvmAlarm=vmwNsxMInconsistentSvmAlarm, vmwNsxMFltrCnfgAppliedToVnic=vmwNsxMFltrCnfgAppliedToVnic, vmwNsxMControllerVmDeleted=vmwNsxMControllerVmDeleted, vmwNsxMUsvmPrefix=vmwNsxMUsvmPrefix, vmwNsxMDepPlugin=vmwNsxMDepPlugin, vmwNsxMServiceComposerPolicyOutOfSync=vmwNsxMServiceComposerPolicyOutOfSync, vmwNsxMNotification=vmwNsxMNotification, vmwNsxMSsoConfigFailure=vmwNsxMSsoConfigFailure, vmwNsxMMessagingReconfigFailed=vmwNsxMMessagingReconfigFailed, vmwNsxMEdgeVmCpuUsageIncreased=vmwNsxMEdgeVmCpuUsageIncreased, vmwNsxMUniversalSyncPrefix=vmwNsxMUniversalSyncPrefix, vmwNsxMIpsecTunnelDown=vmwNsxMIpsecTunnelDown, vmwNsxMFirewallInstallFailed=vmwNsxMFirewallInstallFailed, vmwNsxMDepPluginGenericAlarm=vmwNsxMDepPluginGenericAlarm, vmwNsxMEdgeVmPowerOff=vmwNsxMEdgeVmPowerOff, vmwNsxMSamDataStoppedFlowing=vmwNsxMSamDataStoppedFlowing, vmwNsxMEdgeHaEnabled=vmwNsxMEdgeHaEnabled, vmwNsxMIpsecChannelDown=vmwNsxMIpsecChannelDown, vmwNsxMFabricPrefix=vmwNsxMFabricPrefix, vmwNsxMFullUniversalSyncFailed=vmwNsxMFullUniversalSyncFailed, vmwNsxMFabricDplymntUnitCreated=vmwNsxMFabricDplymntUnitCreated, vmwNsxMGroupRoleUnassigned=vmwNsxMGroupRoleUnassigned, vmwNsxMUuid=vmwNsxMUuid, vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure=vmwNsxMServiceComposerOutOfSyncPrecedenceChangeFailure, vmwNsxMEdgeGatewayRecovered=vmwNsxMEdgeGatewayRecovered, vmwNsxMSamSystemPrefix=vmwNsxMSamSystemPrefix, vmwNsxMEdgeVmHlthChkDisabled=vmwNsxMEdgeVmHlthChkDisabled, vmwNsxMFabricAgentCreated=vmwNsxMFabricAgentCreated, vmwNsxMServiceComposerPrefix=vmwNsxMServiceComposerPrefix, vmwNsxMEdgeVMHealthCheckMiss=vmwNsxMEdgeVMHealthCheckMiss, vmwNsxMSpoofGuardCnfgUpdateFailed=vmwNsxMSpoofGuardCnfgUpdateFailed, vmwNsxMSpoofGuardCnfgUpdateTmOut=vmwNsxMSpoofGuardCnfgUpdateTmOut, vmwNsxMUsvmHeartbeatStopped=vmwNsxMUsvmHeartbeatStopped, vmwNsxMHighLatencyOnDiskResolved=vmwNsxMHighLatencyOnDiskResolved, vmwNsxMEdgeVmSysTimeSync=vmwNsxMEdgeVmSysTimeSync, vmwNsxMLbVirtualServerPoolWrong=vmwNsxMLbVirtualServerPoolWrong, vmwNsxMConfigGroup=vmwNsxMConfigGroup, vmwNsxMGuestIntrspctnEsxConnFailed=vmwNsxMGuestIntrspctnEsxConnFailed, vmwNsxMCntnrUpdatePublishFailed=vmwNsxMCntnrUpdatePublishFailed, vmwNsxMSnmpDisabled=vmwNsxMSnmpDisabled, vmwNsxMFirewallClusterInstalled=vmwNsxMFirewallClusterInstalled, vmwNsxMFabricDependenciesNotInstalled=vmwNsxMFabricDependenciesNotInstalled, vmwNsxMMessagingConfigFailedNotifSkip=vmwNsxMMessagingConfigFailedNotifSkip, vmwNsxMAsyncRest=vmwNsxMAsyncRest, vmwNsxMFirewallRuleModified=vmwNsxMFirewallRuleModified, vmwNsxMAsyncRestPrefix=vmwNsxMAsyncRestPrefix, vmwNsxMExtensionRegistration=vmwNsxMExtensionRegistration, vmwNsxMEdgePrefix=vmwNsxMEdgePrefix, vmwNsxMServiceComposerGuestPolicyOutOfSync=vmwNsxMServiceComposerGuestPolicyOutOfSync, vmwNsxMVxlanLogicalSwitchImproperlyCnfg=vmwNsxMVxlanLogicalSwitchImproperlyCnfg, vmwNsxMEdgeCommAgentNotConnected=vmwNsxMEdgeCommAgentNotConnected, vmwNsxMFirewallForceSyncClusterFailed=vmwNsxMFirewallForceSyncClusterFailed, vmwNsxMEdgeVmBooted=vmwNsxMEdgeVmBooted, vmwNsxMDepPluginVmReboot=vmwNsxMDepPluginVmReboot, vmwNsxMDataSecScanStarted=vmwNsxMDataSecScanStarted, vmwNsxMVxlanVmknicMissingOrDeleted=vmwNsxMVxlanVmknicMissingOrDeleted, vmwNsxMVmRemovedFromSg=vmwNsxMVmRemovedFromSg, vmwNsxMEdge=vmwNsxMEdge, vmwNsxMVmAddedToSg=vmwNsxMVmAddedToSg, vmwNsxMUsvmHeartbeatResumed=vmwNsxMUsvmHeartbeatResumed, vmwNsxManagerNotificationInfoGroup1=vmwNsxManagerNotificationInfoGroup1, vmwNsxMVxlanInstanceDoesNotExist=vmwNsxMVxlanInstanceDoesNotExist, vmwNsxMSpoofGuardDisableFail=vmwNsxMSpoofGuardDisableFail, vmwNsxMConnPerSecThrshldCrossed=vmwNsxMConnPerSecThrshldCrossed, vmwNsxMSamDataCollectionEnabled=vmwNsxMSamDataCollectionEnabled, vmwNsxMGlobalLbMemberDown=vmwNsxMGlobalLbMemberDown, vmwNsxMVxlanTransportZoneIncorrectlyWrkng=vmwNsxMVxlanTransportZoneIncorrectlyWrkng, vmwNsxMFirewallConfigUpdateFailed=vmwNsxMFirewallConfigUpdateFailed, vmwNsxMGuestIntrspctnIncompatibleEsx=vmwNsxMGuestIntrspctnIncompatibleEsx, vmwNsxMGlobalLbMemberWarning=vmwNsxMGlobalLbMemberWarning, vmwNsxMEdgeVmInBadState=vmwNsxMEdgeVmInBadState, vmwNsxMFltrCreatedForVnic=vmwNsxMFltrCreatedForVnic, vmwNsxMMessagingPrefix=vmwNsxMMessagingPrefix, vmwNsxMFirewallCnfgUpdateOnDltCntnr=vmwNsxMFirewallCnfgUpdateOnDltCntnr, vmwNsxMFirewall=vmwNsxMFirewall, vmwNsxMEdgeSplitBrainDetected=vmwNsxMEdgeSplitBrainDetected, vmwNsxMSamDataResumedFlowing=vmwNsxMSamDataResumedFlowing, vmwNsxMGlobalLbMemberUnknown=vmwNsxMGlobalLbMemberUnknown, vmwNsxMServerUp=vmwNsxMServerUp, vmwNsxMFltrCnfgUpdateFailed=vmwNsxMFltrCnfgUpdateFailed, vmwNsxMServiceComposer=vmwNsxMServiceComposer, vmwNsxMIpAddedBlackList=vmwNsxMIpAddedBlackList, vmwNsxMEventSeverity=vmwNsxMEventSeverity, vmwNsxMFirewallPrefix=vmwNsxMFirewallPrefix, vmwNsxMEdgeGatewayCreated=vmwNsxMEdgeGatewayCreated, vmwNsxMEndpointPrefix=vmwNsxMEndpointPrefix, vmwNsxMFirewallPublishFailed=vmwNsxMFirewallPublishFailed, vmwNsxMVxlanControllerInactive=vmwNsxMVxlanControllerInactive, vmwNsxMSpoofGuardDisabled=vmwNsxMSpoofGuardDisabled, vmwNsxMLbVirtualServerPoolUp=vmwNsxMLbVirtualServerPoolUp, vmwNsxMVcDisconnected=vmwNsxMVcDisconnected, vmwNsxMSvmOperationsPrefix=vmwNsxMSvmOperationsPrefix, vmwNsxMSsoUnconfigured=vmwNsxMSsoUnconfigured)
mibBuilder.exportSymbols("VMWARE-NSX-MANAGER-MIB", vmwNsxMEsxModuleUninstalled=vmwNsxMEsxModuleUninstalled, vmwNsxMDataSecScanEnded=vmwNsxMDataSecScanEnded, vmwNsxMVxlanLogicalSwitchProperlyCnfg=vmwNsxMVxlanLogicalSwitchProperlyCnfg, vmwNsxMDataStoreNotCnfgrdOnHost=vmwNsxMDataStoreNotCnfgrdOnHost, vmwNsxMVxlanControllerActive=vmwNsxMVxlanControllerActive, vmwNsxMFabricConnEamRestored=vmwNsxMFabricConnEamRestored, vmwNsxMExtensionRegistered=vmwNsxMExtensionRegistered, vmwNsxMEdgeNoVmServing=vmwNsxMEdgeNoVmServing, vmwNsxMServiceComposerOutOfSyncDraftSettingFailure=vmwNsxMServiceComposerOutOfSyncDraftSettingFailure, vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure=vmwNsxMServiceComposerOutOfSyncSectionDeletionFailure, vmwNsxMFltrDeletedForVnic=vmwNsxMFltrDeletedForVnic, vmwNsxMEdgeUpgrade51x=vmwNsxMEdgeUpgrade51x, vmwNsxMFirewallRuleAppliedVnic=vmwNsxMFirewallRuleAppliedVnic, vmwNsxMEdgeHealthCheckMiss=vmwNsxMEdgeHealthCheckMiss, vmwNsxMEdgeHaSwitchOverActive=vmwNsxMEdgeHaSwitchOverActive, vmwNsxMFabricDplymntUnitDestroyed=vmwNsxMFabricDplymntUnitDestroyed, vmwNsxMFabricBackingEamNotFound=vmwNsxMFabricBackingEamNotFound, vmwNsxMFirewallVsfwdProcessStarted=vmwNsxMFirewallVsfwdProcessStarted, vmwNsxMCount=vmwNsxMCount, vmwNsxMEdgeResourceReservationFailure=vmwNsxMEdgeResourceReservationFailure, vmwNsxMUniversalSyncFailedForEntity=vmwNsxMUniversalSyncFailedForEntity, vmwNsxMVsmCore=vmwNsxMVsmCore, vmwNsxMIpsecTunnelUp=vmwNsxMIpsecTunnelUp, vmwNsxMSnmpPrefix=vmwNsxMSnmpPrefix)
| true | true |
f71e13e08442f6f8defa5a1eed69962294a1fe9c | 397 | py | Python | run_sample.py | nkmrtty/TwitterStreamingAPI | 2f2d6c1021a6f4c5ccbb85d65a9c5088af1deced | [
"MIT"
] | null | null | null | run_sample.py | nkmrtty/TwitterStreamingAPI | 2f2d6c1021a6f4c5ccbb85d65a9c5088af1deced | [
"MIT"
] | null | null | null | run_sample.py | nkmrtty/TwitterStreamingAPI | 2f2d6c1021a6f4c5ccbb85d65a9c5088af1deced | [
"MIT"
] | null | null | null | # coding: utf-8
from streaming_api import sample
# Replace keys with your own keys
APIKEYS = dict(
CONSUMER='consumer key',
CONSUMER_SECRET='consumer secret key',
ACCESS_TOKEN='access token key',
ACCESS_TOKEN_SECRET='access token secret key')
# NOTICE: this script shows delimited output in your terminal
res = sample(APIKEYS)
for tweet_json_str in res:
print tweet_json_str
| 26.466667 | 61 | 0.753149 |
from streaming_api import sample
APIKEYS = dict(
CONSUMER='consumer key',
CONSUMER_SECRET='consumer secret key',
ACCESS_TOKEN='access token key',
ACCESS_TOKEN_SECRET='access token secret key')
res = sample(APIKEYS)
for tweet_json_str in res:
print tweet_json_str
| false | true |
f71e13eb6df77b4cc29006b4eca37e31f7423877 | 648 | py | Python | exatomic/core/tests/test_atom.py | tjduigna/exatomic | 3e27233084588bc6a58b63fc81aaf5a6b67a968d | [
"Apache-2.0"
] | null | null | null | exatomic/core/tests/test_atom.py | tjduigna/exatomic | 3e27233084588bc6a58b63fc81aaf5a6b67a968d | [
"Apache-2.0"
] | 1 | 2017-05-25T21:05:40.000Z | 2017-05-25T23:54:15.000Z | exatomic/core/tests/test_atom.py | tjduigna/exatomic | 3e27233084588bc6a58b63fc81aaf5a6b67a968d | [
"Apache-2.0"
] | 1 | 2017-05-25T20:48:33.000Z | 2017-05-25T20:48:33.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2018, Exa Analytics Development Team
## Distributed under the terms of the Apache License 2.0
#"""
#Tests for the Atom DataFrame
##############################
#The tests here use some contrived examples.
#"""
#import numpy as np
#from unittest import TestCase
#from exa import units
#from exa.core.dataframe import ColumnError
#from exatomic.atom import Atom
#
#
#class TestAtom(TestCase):
# """Tests for :class:`~exatomic.atom.Atom`."""
# def test_init(self):
# """Test that the atom dataframe raises errors correctly."""
# with self.assertRaises(ColumnError):
# Atom()
| 29.454545 | 68 | 0.658951 |
#######
#The tests here use some contrived examples.
#"""
| true | true |
f71e141a890b58c304a9199c3f22429af1489824 | 8,642 | py | Python | contrib/manhole/piped_manhole/providers.py | alexbrasetvik/Piped | 0312c14d6c4c293df378c915cc9787bcc7faed36 | [
"MIT"
] | null | null | null | contrib/manhole/piped_manhole/providers.py | alexbrasetvik/Piped | 0312c14d6c4c293df378c915cc9787bcc7faed36 | [
"MIT"
] | null | null | null | contrib/manhole/piped_manhole/providers.py | alexbrasetvik/Piped | 0312c14d6c4c293df378c915cc9787bcc7faed36 | [
"MIT"
] | null | null | null | # Copyright (c) 2011, Found IT A/S and Piped Project Contributors.
# See LICENSE for details.
from twisted.application import internet, service, strports
from twisted.conch import manhole, manhole_ssh, error as conch_error
from twisted.conch.insults import insults
from twisted.conch.ssh import keys
from twisted.cred import error, portal
from twisted.internet import defer
from twisted.python import reflect
from zope import interface
from piped import resource
class ManholeProvider(object, service.MultiService):
""" Embeds manholes in Piped services.
Configuration example::
manholes:
my_manhole:
enabled: true # defaults to true
port: 10022 # defaults to 10022
# or instead of port, you may use a strport
# listen: 10222
keys:
public_key_file: path # or public_key: str
private_key_file: path # or private_key: str
checkers: # multiple checkers are allowed
inmemory:
checker: twisted.cred.checkers.InMemoryUsernamePasswordDatabaseDontUse
arguments:
username: password
"""
interface.classProvides(resource.IResourceProvider)
def __init__(self):
service.MultiService.__init__(self)
def configure(self, runtime_environment):
self.setName('manhole')
self.setServiceParent(runtime_environment.application)
self.manholes = runtime_environment.get_configuration_value('manholes', dict())
for manhole_name, manhole_configuration in self.manholes.items():
if not manhole_configuration.get('enabled', True):
continue # this manhole has been disabled, so don't create it
manholeservice = ManholeService(manhole_configuration)
manholeservice.setName(manhole_name)
manholeservice.setServiceParent(self)
manholeservice.configure(runtime_environment)
class PipedManhole(manhole.ColoredManhole):
""" A colored manhole that handles a few extra key combinations. """
def connectionMade(self):
r = manhole.ColoredManhole.connectionMade(self)
# add a keyhandler for what my macbook sends when Im hitting backspace
self.keyHandlers['\x08'] = self.handle_BACKSPACE
self.keyHandlers['\x01'] = self.handle_HOME # CTRL-A
self.keyHandlers['\x05'] = self.handle_END # CTRL-E
self.keyHandlers['\x15'] = self.handle_BACKSPACE_LINE # CTRL-U
self.keyHandlers['\x17'] = self.handle_BACKSPACE_WORD # CTRL-W
return r
def handle_BACKSPACE_LINE(self):
while self.lineBufferIndex > 0:
self.handle_BACKSPACE()
WORD_DELIMITERS = """ .;:({['\""""
def handle_BACKSPACE_WORD(self):
self.handle_BACKSPACE()
while self.lineBufferIndex > 0 and self.lineBuffer[self.lineBufferIndex-1] not in self.WORD_DELIMITERS:
self.handle_BACKSPACE()
class PipedConchFactory(manhole_ssh.ConchFactory):
""" A conch factory that can be initialized with an explicit pair of
public_key, private_key to use.
"""
def __init__(self, portal, private_key=None, public_key=None, **kw):
manhole_ssh.ConchFactory.__init__(self, portal)
if private_key:
self.privateKeys = {
'ssh-rsa' : keys.Key.fromString(private_key)
}
if public_key:
self.publicKeys = {
'ssh-rsa' : keys.Key.fromString(public_key)
}
class ManholeService(service.MultiService):
""" A configurable manhole service.
See ManholeProvider for a configuration example.
"""
protocolFactory = PipedManhole
conchFactory = PipedConchFactory
def __init__(self, manhole_configuration):
service.MultiService.__init__(self)
self.manhole_configuration = manhole_configuration
def configure(self, runtime_environment):
self.runtime_environment = runtime_environment
self.key_config = self._normalize_key_config(self.manhole_configuration.get('keys', dict()))
factory = self._make_factory()
listen = str(self.manhole_configuration.get('listen', self.manhole_configuration.get('port', 10022)))
tcpservice = strports.service(listen, factory)
tcpservice.setName(self.name)
tcpservice.setServiceParent(self)
self._configure_dependencies(self.manhole_configuration.get('dependencies', dict()))
def _configure_dependencies(self, dependency_map):
for dependency_key, dependency_configuration in dependency_map.items():
if isinstance(dependency_configuration, basestring):
dependency_configuration = dependency_map[dependency_key] = dict(provider=dependency_configuration)
self.dependencies = self.runtime_environment.create_dependency_map(self, **dependency_map)
def _normalize_key_config(self, key_config):
private_key_file = key_config.pop('private_key_file', None)
public_key_file = key_config.pop('public_key_file', None)
if private_key_file:
private_key_file = getattr(private_key_file, 'path', private_key_file)
key_config['private_key'] = open(private_key_file).read()
if public_key_file:
public_key_file = getattr(public_key_file, 'path', public_key_file)
key_config['public_key'] = open(public_key_file).read()
return key_config
def _make_factory(self):
checkers = self._make_checkers()
realm = PipedTerminalRealm()
portal_ = MultipleCheckersPortal(realm, checkers)
def chainProtocolFactory():
return insults.ServerProtocol(self.protocolFactory, namespace=self._get_namespace())
realm.chainedProtocolFactory = chainProtocolFactory
factory = self.conchFactory(portal_, **self.key_config)
return factory
def _make_checkers(self):
cs = list()
for checker_config in self.manhole_configuration['checkers'].values():
checker_name = checker_config.pop('checker')
checker_factory = reflect.namedAny(checker_name)
checker = checker_factory(**checker_config.get('arguments', dict()))
cs.append(checker)
return cs
def _get_namespace(self):
namespace = dict(runtime_environment=self.runtime_environment, dependencies=self.dependencies)
for key, value in self.manhole_configuration.get('namespace', dict()).items():
namespace[key] = reflect.namedAny(value)
return namespace
class PipedTerminalSessionTransport(manhole_ssh.TerminalSessionTransport):
def __init__(self, proto, chainedProtocol, avatar, width, height):
chainedProtocol.avatar = avatar
manhole_ssh.TerminalSessionTransport.__init__(self, proto, chainedProtocol, avatar, width, height)
class PipedTerminalRealm(manhole_ssh.TerminalRealm):
transportFactory = PipedTerminalSessionTransport
def _getAvatar(self, avatarId):
avatar = manhole_ssh.TerminalRealm._getAvatar(self, avatarId)
avatar.avatarId = avatarId
return avatar
class MultipleCheckersPortal(portal.Portal):
""" A Portal subclass that authenticates against multiple checkers. """
def registerChecker(self, checker, *credentialInterfaces):
if not credentialInterfaces:
credentialInterfaces = checker.credentialInterfaces
for credentialInterface in credentialInterfaces:
self.checkers.setdefault(credentialInterface, list()).append(checker)
@defer.inlineCallbacks
def login(self, credentials, mind, *interfaces):
for i in self.checkers:
if i.providedBy(credentials):
for checker in self.checkers[i]:
try:
avatar_id = yield checker.requestAvatarId(credentials)
avatar = yield self.realm.requestAvatar(avatar_id, mind, *interfaces)
defer.returnValue(avatar)
except conch_error.ValidPublicKey:
# This is how SSHPublicKeyDatabase says "Your public key is okay, now prove you have
# the private key to continue".
raise
except error.UnauthorizedLogin:
continue
raise error.UnauthorizedLogin()
ifac = interface.providedBy(credentials)
raise error.UnhandledCredentials("No checker for %s" % ', '.join(map(reflect.qual, ifac)))
| 40.383178 | 115 | 0.672298 |
from twisted.application import internet, service, strports
from twisted.conch import manhole, manhole_ssh, error as conch_error
from twisted.conch.insults import insults
from twisted.conch.ssh import keys
from twisted.cred import error, portal
from twisted.internet import defer
from twisted.python import reflect
from zope import interface
from piped import resource
class ManholeProvider(object, service.MultiService):
interface.classProvides(resource.IResourceProvider)
def __init__(self):
service.MultiService.__init__(self)
def configure(self, runtime_environment):
self.setName('manhole')
self.setServiceParent(runtime_environment.application)
self.manholes = runtime_environment.get_configuration_value('manholes', dict())
for manhole_name, manhole_configuration in self.manholes.items():
if not manhole_configuration.get('enabled', True):
continue
manholeservice = ManholeService(manhole_configuration)
manholeservice.setName(manhole_name)
manholeservice.setServiceParent(self)
manholeservice.configure(runtime_environment)
class PipedManhole(manhole.ColoredManhole):
def connectionMade(self):
r = manhole.ColoredManhole.connectionMade(self)
# add a keyhandler for what my macbook sends when Im hitting backspace
self.keyHandlers['\x08'] = self.handle_BACKSPACE
self.keyHandlers['\x01'] = self.handle_HOME # CTRL-A
self.keyHandlers['\x05'] = self.handle_END # CTRL-E
self.keyHandlers['\x15'] = self.handle_BACKSPACE_LINE # CTRL-U
self.keyHandlers['\x17'] = self.handle_BACKSPACE_WORD # CTRL-W
return r
def handle_BACKSPACE_LINE(self):
while self.lineBufferIndex > 0:
self.handle_BACKSPACE()
WORD_DELIMITERS = """ .;:({['\""""
def handle_BACKSPACE_WORD(self):
self.handle_BACKSPACE()
while self.lineBufferIndex > 0 and self.lineBuffer[self.lineBufferIndex-1] not in self.WORD_DELIMITERS:
self.handle_BACKSPACE()
class PipedConchFactory(manhole_ssh.ConchFactory):
def __init__(self, portal, private_key=None, public_key=None, **kw):
manhole_ssh.ConchFactory.__init__(self, portal)
if private_key:
self.privateKeys = {
'ssh-rsa' : keys.Key.fromString(private_key)
}
if public_key:
self.publicKeys = {
'ssh-rsa' : keys.Key.fromString(public_key)
}
class ManholeService(service.MultiService):
protocolFactory = PipedManhole
conchFactory = PipedConchFactory
def __init__(self, manhole_configuration):
service.MultiService.__init__(self)
self.manhole_configuration = manhole_configuration
def configure(self, runtime_environment):
self.runtime_environment = runtime_environment
self.key_config = self._normalize_key_config(self.manhole_configuration.get('keys', dict()))
factory = self._make_factory()
listen = str(self.manhole_configuration.get('listen', self.manhole_configuration.get('port', 10022)))
tcpservice = strports.service(listen, factory)
tcpservice.setName(self.name)
tcpservice.setServiceParent(self)
self._configure_dependencies(self.manhole_configuration.get('dependencies', dict()))
def _configure_dependencies(self, dependency_map):
for dependency_key, dependency_configuration in dependency_map.items():
if isinstance(dependency_configuration, basestring):
dependency_configuration = dependency_map[dependency_key] = dict(provider=dependency_configuration)
self.dependencies = self.runtime_environment.create_dependency_map(self, **dependency_map)
def _normalize_key_config(self, key_config):
private_key_file = key_config.pop('private_key_file', None)
public_key_file = key_config.pop('public_key_file', None)
if private_key_file:
private_key_file = getattr(private_key_file, 'path', private_key_file)
key_config['private_key'] = open(private_key_file).read()
if public_key_file:
public_key_file = getattr(public_key_file, 'path', public_key_file)
key_config['public_key'] = open(public_key_file).read()
return key_config
def _make_factory(self):
checkers = self._make_checkers()
realm = PipedTerminalRealm()
portal_ = MultipleCheckersPortal(realm, checkers)
def chainProtocolFactory():
return insults.ServerProtocol(self.protocolFactory, namespace=self._get_namespace())
realm.chainedProtocolFactory = chainProtocolFactory
factory = self.conchFactory(portal_, **self.key_config)
return factory
def _make_checkers(self):
cs = list()
for checker_config in self.manhole_configuration['checkers'].values():
checker_name = checker_config.pop('checker')
checker_factory = reflect.namedAny(checker_name)
checker = checker_factory(**checker_config.get('arguments', dict()))
cs.append(checker)
return cs
def _get_namespace(self):
namespace = dict(runtime_environment=self.runtime_environment, dependencies=self.dependencies)
for key, value in self.manhole_configuration.get('namespace', dict()).items():
namespace[key] = reflect.namedAny(value)
return namespace
class PipedTerminalSessionTransport(manhole_ssh.TerminalSessionTransport):
def __init__(self, proto, chainedProtocol, avatar, width, height):
chainedProtocol.avatar = avatar
manhole_ssh.TerminalSessionTransport.__init__(self, proto, chainedProtocol, avatar, width, height)
class PipedTerminalRealm(manhole_ssh.TerminalRealm):
transportFactory = PipedTerminalSessionTransport
def _getAvatar(self, avatarId):
avatar = manhole_ssh.TerminalRealm._getAvatar(self, avatarId)
avatar.avatarId = avatarId
return avatar
class MultipleCheckersPortal(portal.Portal):
def registerChecker(self, checker, *credentialInterfaces):
if not credentialInterfaces:
credentialInterfaces = checker.credentialInterfaces
for credentialInterface in credentialInterfaces:
self.checkers.setdefault(credentialInterface, list()).append(checker)
@defer.inlineCallbacks
def login(self, credentials, mind, *interfaces):
for i in self.checkers:
if i.providedBy(credentials):
for checker in self.checkers[i]:
try:
avatar_id = yield checker.requestAvatarId(credentials)
avatar = yield self.realm.requestAvatar(avatar_id, mind, *interfaces)
defer.returnValue(avatar)
except conch_error.ValidPublicKey:
# This is how SSHPublicKeyDatabase says "Your public key is okay, now prove you have
raise
except error.UnauthorizedLogin:
continue
raise error.UnauthorizedLogin()
ifac = interface.providedBy(credentials)
raise error.UnhandledCredentials("No checker for %s" % ', '.join(map(reflect.qual, ifac)))
| true | true |
f71e166d754b8f4a25fb6851144de58b5e70a5e4 | 196 | py | Python | app/__init__.py | devchild/flask_rest_knockout_example | e44074b0a3080180a24bb23d6f27ef6dcbbc3829 | [
"MIT"
] | null | null | null | app/__init__.py | devchild/flask_rest_knockout_example | e44074b0a3080180a24bb23d6f27ef6dcbbc3829 | [
"MIT"
] | null | null | null | app/__init__.py | devchild/flask_rest_knockout_example | e44074b0a3080180a24bb23d6f27ef6dcbbc3829 | [
"MIT"
] | null | null | null | from flask import Flask
from flask.ext.httpauth import HTTPBasicAuth
auth = HTTPBasicAuth()
app = Flask(__name__)
app.config.from_object('config')
from app import views
from app.api import v1_0
| 19.6 | 44 | 0.795918 | from flask import Flask
from flask.ext.httpauth import HTTPBasicAuth
auth = HTTPBasicAuth()
app = Flask(__name__)
app.config.from_object('config')
from app import views
from app.api import v1_0
| true | true |
f71e16b81129ca73af9944cd7c4cc7f6f2d538f2 | 3,823 | py | Python | akshare/index/index_cflp.py | J-Z-Z/akshare | 0a9ca71b381a272e2f56211e455ff2493dfed17a | [
"MIT"
] | 721 | 2021-09-21T12:10:33.000Z | 2022-03-31T09:47:01.000Z | akshare/index/index_cflp.py | J-Z-Z/akshare | 0a9ca71b381a272e2f56211e455ff2493dfed17a | [
"MIT"
] | 135 | 2021-09-21T12:07:54.000Z | 2022-03-31T14:15:36.000Z | akshare/index/index_cflp.py | J-Z-Z/akshare | 0a9ca71b381a272e2f56211e455ff2493dfed17a | [
"MIT"
] | 234 | 2021-09-21T12:16:27.000Z | 2022-03-31T09:47:04.000Z | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
Date: 2021/12/27 15:47
Desc: 中国公路物流运价、运量指数
http://index.0256.cn/expx.htm
"""
import pandas as pd
import requests
def index_cflp_price(symbol: str = "周指数") -> pd.DataFrame:
"""
中国公路物流运价指数
http://index.0256.cn/expx.htm
:param symbol: choice of {"周指数", "月指数", "季度指数", "年度指数"}
:type symbol: str
:return: 中国公路物流运价指数
:rtype: pandas.DataFrame
"""
symbol_map = {
"周指数": "2",
"月指数": "3",
"季度指数": "4",
"年度指数": "5",
}
url = "http://index.0256.cn/expcenter_trend.action"
params = {
"marketId": "1",
"attribute1": "5",
"exponentTypeId": symbol_map[symbol],
"cateId": "2",
"attribute2": "华北",
"city": "",
"startLine": "",
"endLine": "",
}
headers = {
"Origin": "http://index.0256.cn",
"Referer": "http://index.0256.cn/expx.htm",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36",
}
r = requests.post(url, data=params, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(
[
data_json["chart1"]["xLebal"],
data_json["chart1"]["yLebal"],
data_json["chart2"]["yLebal"],
data_json["chart3"]["yLebal"],
]
).T
temp_df.columns = ["日期", "定基指数", "环比指数", "同比指数"]
temp_df["日期"] = pd.to_datetime(temp_df["日期"]).dt.date
temp_df["定基指数"] = pd.to_numeric(temp_df["定基指数"])
temp_df["环比指数"] = pd.to_numeric(temp_df["环比指数"])
temp_df["同比指数"] = pd.to_numeric(temp_df["同比指数"])
return temp_df
def index_cflp_volume(symbol: str = "月指数") -> pd.DataFrame:
"""
中国公路物流运量指数
http://index.0256.cn/expx.htm
:param symbol: choice of {"月指数", "季度指数", "年度指数"}
:type symbol: str
:return: 中国公路物流运量指数
:rtype: pandas.DataFrame
"""
symbol_map = {
"月指数": "3",
"季度指数": "4",
"年度指数": "5",
}
url = "http://index.0256.cn/volume_query.action"
params = {
"type": "1",
"marketId": "1",
"expTypeId": symbol_map[symbol],
"startDate1": "",
"endDate1": "",
"city": "",
"startDate3": "",
"endDate3": "",
}
headers = {
"Origin": "http://index.0256.cn",
"Referer": "http://index.0256.cn/expx.htm",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36",
}
r = requests.post(url, data=params, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(
[
data_json["chart1"]["xLebal"],
data_json["chart1"]["yLebal"],
data_json["chart2"]["yLebal"],
data_json["chart3"]["yLebal"],
]
).T
temp_df.columns = ["日期", "定基指数", "环比指数", "同比指数"]
temp_df["日期"] = pd.to_datetime(temp_df["日期"]).dt.date
temp_df["定基指数"] = pd.to_numeric(temp_df["定基指数"])
temp_df["环比指数"] = pd.to_numeric(temp_df["环比指数"])
temp_df["同比指数"] = pd.to_numeric(temp_df["同比指数"])
return temp_df
if __name__ == "__main__":
index_cflp_price_df = index_cflp_price(symbol="周指数")
print(index_cflp_price_df)
index_cflp_price_df = index_cflp_price(symbol="月指数")
print(index_cflp_price_df)
index_cflp_price_df = index_cflp_price(symbol="季度指数")
print(index_cflp_price_df)
index_cflp_price_df = index_cflp_price(symbol="年度指数")
print(index_cflp_price_df)
index_cflp_volume_df = index_cflp_volume(symbol="月指数")
print(index_cflp_volume_df)
index_cflp_volume_df = index_cflp_volume(symbol="季度指数")
print(index_cflp_volume_df)
index_cflp_volume_df = index_cflp_volume(symbol="年度指数")
print(index_cflp_volume_df)
| 29.407692 | 140 | 0.578342 |
import pandas as pd
import requests
def index_cflp_price(symbol: str = "周指数") -> pd.DataFrame:
symbol_map = {
"周指数": "2",
"月指数": "3",
"季度指数": "4",
"年度指数": "5",
}
url = "http://index.0256.cn/expcenter_trend.action"
params = {
"marketId": "1",
"attribute1": "5",
"exponentTypeId": symbol_map[symbol],
"cateId": "2",
"attribute2": "华北",
"city": "",
"startLine": "",
"endLine": "",
}
headers = {
"Origin": "http://index.0256.cn",
"Referer": "http://index.0256.cn/expx.htm",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36",
}
r = requests.post(url, data=params, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(
[
data_json["chart1"]["xLebal"],
data_json["chart1"]["yLebal"],
data_json["chart2"]["yLebal"],
data_json["chart3"]["yLebal"],
]
).T
temp_df.columns = ["日期", "定基指数", "环比指数", "同比指数"]
temp_df["日期"] = pd.to_datetime(temp_df["日期"]).dt.date
temp_df["定基指数"] = pd.to_numeric(temp_df["定基指数"])
temp_df["环比指数"] = pd.to_numeric(temp_df["环比指数"])
temp_df["同比指数"] = pd.to_numeric(temp_df["同比指数"])
return temp_df
def index_cflp_volume(symbol: str = "月指数") -> pd.DataFrame:
symbol_map = {
"月指数": "3",
"季度指数": "4",
"年度指数": "5",
}
url = "http://index.0256.cn/volume_query.action"
params = {
"type": "1",
"marketId": "1",
"expTypeId": symbol_map[symbol],
"startDate1": "",
"endDate1": "",
"city": "",
"startDate3": "",
"endDate3": "",
}
headers = {
"Origin": "http://index.0256.cn",
"Referer": "http://index.0256.cn/expx.htm",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36",
}
r = requests.post(url, data=params, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(
[
data_json["chart1"]["xLebal"],
data_json["chart1"]["yLebal"],
data_json["chart2"]["yLebal"],
data_json["chart3"]["yLebal"],
]
).T
temp_df.columns = ["日期", "定基指数", "环比指数", "同比指数"]
temp_df["日期"] = pd.to_datetime(temp_df["日期"]).dt.date
temp_df["定基指数"] = pd.to_numeric(temp_df["定基指数"])
temp_df["环比指数"] = pd.to_numeric(temp_df["环比指数"])
temp_df["同比指数"] = pd.to_numeric(temp_df["同比指数"])
return temp_df
if __name__ == "__main__":
index_cflp_price_df = index_cflp_price(symbol="周指数")
print(index_cflp_price_df)
index_cflp_price_df = index_cflp_price(symbol="月指数")
print(index_cflp_price_df)
index_cflp_price_df = index_cflp_price(symbol="季度指数")
print(index_cflp_price_df)
index_cflp_price_df = index_cflp_price(symbol="年度指数")
print(index_cflp_price_df)
index_cflp_volume_df = index_cflp_volume(symbol="月指数")
print(index_cflp_volume_df)
index_cflp_volume_df = index_cflp_volume(symbol="季度指数")
print(index_cflp_volume_df)
index_cflp_volume_df = index_cflp_volume(symbol="年度指数")
print(index_cflp_volume_df)
| true | true |
f71e17377f920505ee763cba0e21f3a428218bb3 | 1,814 | py | Python | refex/python/test_python_pattern.py | ssbr/refex | 221d343b8584362900fb93a07ff96a4b9d9d4ece | [
"Apache-2.0"
] | 11 | 2020-08-28T03:54:49.000Z | 2022-01-06T05:55:21.000Z | refex/python/test_python_pattern.py | ssbr/refex | 221d343b8584362900fb93a07ff96a4b9d9d4ece | [
"Apache-2.0"
] | 5 | 2020-08-25T16:19:05.000Z | 2021-06-07T21:31:03.000Z | refex/python/test_python_pattern.py | ssbr/refex | 221d343b8584362900fb93a07ff96a4b9d9d4ece | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for refex.python.python_pattern."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tokenize
from absl.testing import absltest
from absl.testing import parameterized
from refex.python import python_pattern
class PythonPatternTest(parameterized.TestCase):
@parameterized.parameters('', 'x', 'x y')
def test_simple_nonpattern(self, pattern):
tokenized, _ = python_pattern.token_pattern(pattern)
self.assertEqual(tokenize.untokenize(tokenized), pattern)
@parameterized.parameters('$x', 'foo + $x', 'import $x', '$x "$y"', '$x = 0')
def test_simple_pattern(self, pattern):
tokenized, [metavar_i] = python_pattern.token_pattern(pattern)
# token text is 'x' -- that's the only variable in the pattern.
self.assertEqual(tokenized[metavar_i][1], 'x')
# it round trips to the same string except $x -> x
self.assertEqual(tokenize.untokenize(tokenized), pattern.replace('$x', 'x'))
@parameterized.parameters('$1', '$', '$\n', '$[', '$""', '$ x', '$\nx')
def test_syntax_error(self, pattern):
with self.assertRaises(SyntaxError):
python_pattern.token_pattern(pattern)
if __name__ == '__main__':
absltest.main()
| 35.568627 | 80 | 0.732084 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tokenize
from absl.testing import absltest
from absl.testing import parameterized
from refex.python import python_pattern
class PythonPatternTest(parameterized.TestCase):
@parameterized.parameters('', 'x', 'x y')
def test_simple_nonpattern(self, pattern):
tokenized, _ = python_pattern.token_pattern(pattern)
self.assertEqual(tokenize.untokenize(tokenized), pattern)
@parameterized.parameters('$x', 'foo + $x', 'import $x', '$x "$y"', '$x = 0')
def test_simple_pattern(self, pattern):
tokenized, [metavar_i] = python_pattern.token_pattern(pattern)
self.assertEqual(tokenized[metavar_i][1], 'x')
# it round trips to the same string except $x -> x
self.assertEqual(tokenize.untokenize(tokenized), pattern.replace('$x', 'x'))
@parameterized.parameters('$1', '$', '$\n', '$[', '$""', '$ x', '$\nx')
def test_syntax_error(self, pattern):
with self.assertRaises(SyntaxError):
python_pattern.token_pattern(pattern)
if __name__ == '__main__':
absltest.main()
| true | true |
f71e17f2e51f05e1bc7f152554674fa888f9cca1 | 463 | py | Python | python_arrays.py | egriswol/astr-119-hw-1 | e290355de8f48b9def3fdacf4779ac4a3c51a003 | [
"MIT"
] | null | null | null | python_arrays.py | egriswol/astr-119-hw-1 | e290355de8f48b9def3fdacf4779ac4a3c51a003 | [
"MIT"
] | 1 | 2018-10-18T17:49:41.000Z | 2018-10-18T17:49:41.000Z | python_arrays.py | egriswol/astr-119-hw-1 | e290355de8f48b9def3fdacf4779ac4a3c51a003 | [
"MIT"
] | 1 | 2018-10-18T01:31:32.000Z | 2018-10-18T01:31:32.000Z | x = [0.0, 3.0, 5.0, 2.5, 3.7] #define array
print(type(x))
#remove third element
x.pop(2)
print(x) #will print without third element
#remove 2.5
x.remove(2.5)
print(x)
#add an element to the end
x.append(1.2)
print(x)
#copy
y = x.copy()
print(y)
#how many elements are 0.0
print (y.count(0.0))
#print the index with value 3.7
print(y.index(3.7))
#sort the list
y.sort()
print(y)
#reverse sort
y.reverse()
print(y)
#remove all elements
y.clear()
print(y)
| 12.513514 | 43 | 0.667387 | x = [0.0, 3.0, 5.0, 2.5, 3.7]
print(type(x))
x.pop(2)
print(x)
x.remove(2.5)
print(x)
x.append(1.2)
print(x)
y = x.copy()
print(y)
print (y.count(0.0))
print(y.index(3.7))
y.sort()
print(y)
y.reverse()
print(y)
y.clear()
print(y)
| true | true |
f71e180e422103be66c72b48961b969337bef914 | 1,453 | py | Python | jocular/stretch.py | MartinCooke/jocular | 635816d4ef6aa6ea75187137e25386dad2d551e9 | [
"MIT"
] | 6 | 2021-03-21T16:46:44.000Z | 2021-11-27T14:07:06.000Z | jocular/stretch.py | MartinCooke/jocular | 635816d4ef6aa6ea75187137e25386dad2d551e9 | [
"MIT"
] | null | null | null | jocular/stretch.py | MartinCooke/jocular | 635816d4ef6aa6ea75187137e25386dad2d551e9 | [
"MIT"
] | null | null | null | ''' Various stretch functions. Easy to add more. Room for refinement,
methinks.
'''
import numpy as np
def stretch(x, method='linear', param=None, NR=0, background=None):
# if no noise reduction just use stretch alone
if (NR <= 0) or (background is None):
return stretch_main(x, method=method, param=param)
else:
# get stretched data and lightly suppress low end
y = stretch_main(x, method=method, param=param)
hyper_param = 1 - .1 * (NR / 100)
return y * stretch_main(x, method='hyper', param=hyper_param)
def stretch_main(x, method='linear', param=None):
if method == 'linear':
return x
if method == 'hyper':
d = .02
c = d * (1 + d - param)
return (1 + c) * (x / (x + c))
if method == 'log':
c = param * 200
return np.log(c*x + 1) / np.log(c + 1)
if method == 'asinh':
# c = param * 250
c = param * 2000
return np.arcsinh(c*x) / np.arcsinh(c + .0000001)
if method == 'gamma':
# with noise reduction, linear from x=0-a, with slope s
y = x.copy()
# g = .5 - .5 * param
# g = .75 - .75 * param
g = 1 - param
a0 = .01
s = g / (a0 * (g - 1) + a0 ** (1 - g))
d = (1 / (a0 ** g * (g - 1) + 1)) - 1
y[x < a0] = x[x < a0] * s
y[x >= a0] = (1 + d) * (x[x >= a0] ** g) - d
return y
else:
return x
| 26.907407 | 69 | 0.494838 |
import numpy as np
def stretch(x, method='linear', param=None, NR=0, background=None):
if (NR <= 0) or (background is None):
return stretch_main(x, method=method, param=param)
else:
y = stretch_main(x, method=method, param=param)
hyper_param = 1 - .1 * (NR / 100)
return y * stretch_main(x, method='hyper', param=hyper_param)
def stretch_main(x, method='linear', param=None):
if method == 'linear':
return x
if method == 'hyper':
d = .02
c = d * (1 + d - param)
return (1 + c) * (x / (x + c))
if method == 'log':
c = param * 200
return np.log(c*x + 1) / np.log(c + 1)
if method == 'asinh':
c = param * 2000
return np.arcsinh(c*x) / np.arcsinh(c + .0000001)
if method == 'gamma':
y = x.copy()
g = 1 - param
a0 = .01
s = g / (a0 * (g - 1) + a0 ** (1 - g))
d = (1 / (a0 ** g * (g - 1) + 1)) - 1
y[x < a0] = x[x < a0] * s
y[x >= a0] = (1 + d) * (x[x >= a0] ** g) - d
return y
else:
return x
| true | true |
f71e19a3303bab6b818caaa27d08b643c1e25ed5 | 60 | py | Python | modules/documentarchive/__init__.py | Medisur/journalmanagement | bc356e8d3354529a14a5e04bec3d80c03ed1c0ec | [
"MIT"
] | 1 | 2019-04-16T08:53:16.000Z | 2019-04-16T08:53:16.000Z | modules/documentarchive/__init__.py | Medisur/journalmanagement | bc356e8d3354529a14a5e04bec3d80c03ed1c0ec | [
"MIT"
] | null | null | null | modules/documentarchive/__init__.py | Medisur/journalmanagement | bc356e8d3354529a14a5e04bec3d80c03ed1c0ec | [
"MIT"
] | null | null | null |
from .archive import Archive
from .resource import Resource | 20 | 30 | 0.833333 |
from .archive import Archive
from .resource import Resource | true | true |
f71e1a4d75fec1c60082e0cb4c6a760f8637f5c3 | 9,493 | py | Python | doc/conf.py | janhaviagrawal/ApplicationInsights-Python | 49a58cd688a866c71c952019b6c9ee71772443c3 | [
"MIT"
] | null | null | null | doc/conf.py | janhaviagrawal/ApplicationInsights-Python | 49a58cd688a866c71c952019b6c9ee71772443c3 | [
"MIT"
] | null | null | null | doc/conf.py | janhaviagrawal/ApplicationInsights-Python | 49a58cd688a866c71c952019b6c9ee71772443c3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Application Insights SDK for Python documentation build configuration file, created by
# sphinx-quickstart on Mon Dec 22 23:32:45 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = [ 'templates' ]
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Application Insights SDK for Python'
copyright = '2018, Microsoft'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.11.6'
# The full version, including alpha/beta/rc tags.
release = '0.11.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [ 'build' ]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = [ 'static' ]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ApplicationInsightsSDKforPythondoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ApplicationInsightsSDKforPython.tex', 'Application Insights SDK for Python Documentation', 'Microsoft', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'applicationinsightssdkforpython', 'Application Insights SDK for Python Documentation', ['Microsoft'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ApplicationInsightsSDKforPython', 'Application Insights SDK for Python Documentation',
'Microsoft', 'ApplicationInsightsSDKforPython', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| 32.961806 | 127 | 0.721163 |
import sys
import os
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
templates_path = [ 'templates' ]
source_suffix = '.rst'
master_doc = 'index'
project = 'Application Insights SDK for Python'
copyright = '2018, Microsoft'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.11.6'
# The full version, including alpha/beta/rc tags.
release = '0.11.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [ 'build' ]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = [ 'static' ]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ApplicationInsightsSDKforPythondoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ApplicationInsightsSDKforPython.tex', 'Application Insights SDK for Python Documentation', 'Microsoft', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'applicationinsightssdkforpython', 'Application Insights SDK for Python Documentation', ['Microsoft'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ApplicationInsightsSDKforPython', 'Application Insights SDK for Python Documentation',
'Microsoft', 'ApplicationInsightsSDKforPython', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
intersphinx_mapping = {'http://docs.python.org/': None}
| true | true |
f71e1a784d2b6bd92da4b28d2d1773a3db8e5216 | 1,048 | py | Python | dist-packages/samba/tests/auth.py | Jianwei-Wang/python2.7_lib | 911b8e81512e5ac5f13e669ab46f7693ed897378 | [
"PSF-2.0"
] | 480 | 2015-02-03T11:59:43.000Z | 2022-03-21T13:23:29.000Z | lib/python2.7/site-packages/samba/tests/auth.py | brianwrf/pth-toolkit | 3641cdc76c0f52275315c9b18bf08b22521bd4d7 | [
"BSD-2-Clause"
] | 6 | 2015-02-03T14:06:12.000Z | 2021-05-11T12:07:02.000Z | lib/python2.7/site-packages/samba/tests/auth.py | brianwrf/pth-toolkit | 3641cdc76c0f52275315c9b18bf08b22521bd4d7 | [
"BSD-2-Clause"
] | 137 | 2015-02-05T13:31:57.000Z | 2022-02-23T09:44:18.000Z | # Unix SMB/CIFS implementation.
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Tests for the Auth Python bindings.
Note that this just tests the bindings work. It does not intend to test
the functionality, that's already done in other tests.
"""
from samba import auth
import samba.tests
class AuthTests(samba.tests.TestCase):
def test_system_session(self):
auth.system_session()
| 32.75 | 71 | 0.757634 |
from samba import auth
import samba.tests
class AuthTests(samba.tests.TestCase):
def test_system_session(self):
auth.system_session()
| true | true |
f71e1c6aa999072506fb58ee77bb770ae4345ad1 | 1,262 | py | Python | assignments/03_solfege/solfege.py | ABennett97/be434-fall-2021 | 68762dc3588751a33519203dcf8b638f79501c15 | [
"MIT"
] | null | null | null | assignments/03_solfege/solfege.py | ABennett97/be434-fall-2021 | 68762dc3588751a33519203dcf8b638f79501c15 | [
"MIT"
] | null | null | null | assignments/03_solfege/solfege.py | ABennett97/be434-fall-2021 | 68762dc3588751a33519203dcf8b638f79501c15 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""
Author : abennett1 <abennett1@localhost>
Date : 2021-09-20
Purpose: Rock the Casbah
"""
import argparse
# --------------------------------------------------
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='Do Re Mi solfege',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('text',
metavar='str',
nargs='+',
help='Solfege')
return parser.parse_args()
# --------------------------------------------------
def main():
"""Make a jazz noise here"""
args = get_args()
syllable = {'Do': 'Do, A deer, a female deer', 'Re': 'Re, A drop of golden sun',
'Mi': 'Mi, A name I call myself', 'Fa': 'Fa, A long long way to run',
'Sol': 'Sol, A needle pulling thread', 'La': 'La, A note to follow sol',
'Ti': 'Ti, A drink with jam and bread'}
for word in args.text:
if word in syllable:
print(syllable.get(word))
else:
print("I don't know " + '"' + word + '"')
# --------------------------------------------------
if __name__ == '__main__':
main()
| 27.434783 | 88 | 0.473059 |
import argparse
def get_args():
parser = argparse.ArgumentParser(
description='Do Re Mi solfege',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('text',
metavar='str',
nargs='+',
help='Solfege')
return parser.parse_args()
def main():
args = get_args()
syllable = {'Do': 'Do, A deer, a female deer', 'Re': 'Re, A drop of golden sun',
'Mi': 'Mi, A name I call myself', 'Fa': 'Fa, A long long way to run',
'Sol': 'Sol, A needle pulling thread', 'La': 'La, A note to follow sol',
'Ti': 'Ti, A drink with jam and bread'}
for word in args.text:
if word in syllable:
print(syllable.get(word))
else:
print("I don't know " + '"' + word + '"')
# --------------------------------------------------
if __name__ == '__main__':
main()
| true | true |
f71e1ddbb26c4a4945e9fa2cf486485926f6161d | 588 | py | Python | examples/compass-display.py | pimoroni/micropython-scrollbit | 4b9b51830430984e27006aa9ae785ea68c7f397c | [
"MIT"
] | 6 | 2018-12-24T15:17:24.000Z | 2021-05-02T22:33:36.000Z | examples/compass-display.py | pimoroni/micropython-scrollbit | 4b9b51830430984e27006aa9ae785ea68c7f397c | [
"MIT"
] | 5 | 2018-05-18T08:51:51.000Z | 2020-07-31T10:37:34.000Z | examples/compass-display.py | pimoroni/micropython-scrollbit | 4b9b51830430984e27006aa9ae785ea68c7f397c | [
"MIT"
] | 7 | 2018-05-17T16:43:38.000Z | 2020-07-29T23:58:08.000Z |
# Shows the compass heading on your scroll:bit
# You will need to calibrate the compass by tilting the micro:bit
# when your program starts.
# Flash a blank file to your micro:bit,
# then save this as main.py and transfer it to your micro:bit using the Files menu
# you will also need to copy over scrollbit.py, see README.md for details.
import scrollbit
import microbit
import time
microbit.compass.calibrate()
while True:
scrollbit.clear()
scrollbit.write(str(microbit.compass.heading()), offset_col=1, offset_row=1, brightness=100)
scrollbit.show()
time.sleep(1)
| 28 | 96 | 0.753401 |
import scrollbit
import microbit
import time
microbit.compass.calibrate()
while True:
scrollbit.clear()
scrollbit.write(str(microbit.compass.heading()), offset_col=1, offset_row=1, brightness=100)
scrollbit.show()
time.sleep(1)
| true | true |
f71e1faa52ac268ed1a984e551256e4760ebee06 | 9,327 | py | Python | cardano/storage.py | safeada/python-cardano | dc1c4203602b562ae5c42bf2f0e9dfa87dc12450 | [
"MIT"
] | 32 | 2018-08-02T02:02:38.000Z | 2021-12-13T17:18:27.000Z | cardano/storage.py | yihuang/cardano-utils | dc1c4203602b562ae5c42bf2f0e9dfa87dc12450 | [
"MIT"
] | 5 | 2018-11-16T10:31:47.000Z | 2021-05-24T03:30:34.000Z | cardano/storage.py | yihuang/cardano-utils | dc1c4203602b562ae5c42bf2f0e9dfa87dc12450 | [
"MIT"
] | 15 | 2018-08-01T09:48:01.000Z | 2021-08-11T13:05:36.000Z | '''
* Use rocksdb as cardano-sl did.
* Store each epoch in seperate db.
'b/' + hash -> block data
'u/' + hash -> undo data
g -> hash of genesis block of epoch.
* Main database:
* 'c/tip' -> hash
* 'b/' + hash -> BlockHeader
* 'e/fl/' + hash -> hash of next block.
* 'ut/t/' + txIn -> TxOut
* 's/' + stake holder id
* 's/ftssum'
* 'a/' + addr -> 1 # address discovery.
Sync
----
* get headers from storage current tip to network tip.
* download blocks and save to db.
'''
import os
import cbor
import rocksdb
from .block import DecodedBlock, DecodedBlockHeader
from . import config
def iter_prefix(db, prefix):
it = db.iteritems()
it.seek(prefix)
for k, v in it:
if not k.startswith(prefix):
break
yield k, v
def remove_prefix(db, prefix):
batch = rocksdb.WriteBatch()
for k, _ in iter_prefix(db, prefix):
batch.delete(k)
db.write(batch)
class Storage(object):
def __init__(self, root_path, readonly=False):
print('create storage at', root_path)
if not os.path.exists(root_path):
os.makedirs(root_path)
self._root_path = root_path
opt = rocksdb.Options(create_if_missing=True)
self.db = rocksdb.DB(os.path.join(self._root_path, 'db'), opt, readonly)
self._tip = None # cache current tip header in memory.
# cache recent used epoch db.
self._current_epoch_db = None
self._current_epoch = None
def epoch_db_path(self, epoch):
return os.path.join(self._root_path, 'epoch%d' % epoch)
def open_epoch_db(self, epoch, readonly=False):
if epoch != self._current_epoch:
self._current_epoch = epoch
self._current_epoch_db = rocksdb.DB(
self.epoch_db_path(epoch),
rocksdb.Options(create_if_missing=True),
readonly
)
return self._current_epoch_db
def load_tip(self):
h = self.db.get(b'c/tip')
if h:
return self.blockheader(h)
def tip(self):
if not self._tip:
self._tip = self.load_tip()
return self._tip
def set_tip(self, hdr, batch=None):
self._tip = hdr
(batch or self.db).put(b'c/tip', hdr.hash())
def blockheader(self, h):
buf = self.db.get(b'b/' + h)
if buf:
return DecodedBlockHeader.from_raw(buf, h)
def raw_block(self, hdr):
db = self.open_epoch_db(hdr.slot()[0], readonly=True)
buf = db.get(b'b/' + hdr.hash())
if buf:
return buf
def block(self, hdr):
raw = self.raw_block(hdr)
if raw:
return DecodedBlock.from_raw(raw)
def undos(self, hdr):
db = self.open_epoch_db(hdr.slot()[0], readonly=True)
buf = db.get(b'u/' + hdr.hash())
if buf:
return cbor.loads(buf)
def genesis_block(self, epoch):
db = self.open_epoch_db(epoch, readonly=True)
h = db.get(b'g')
assert h, 'epoch not exist: %d' % epoch
return DecodedBlock.from_raw(db.get(h))
def blocks_rev(self, start_hash=None):
'Iterate blocks backwardly.'
current_hash = start_hash or self.tip().hash()
current_epoch = self.blockheader(current_hash).slot()[0]
current_epoch_db = self.open_epoch_db(current_epoch, readonly=True)
while True:
raw = current_epoch_db.get(b'b/' + current_hash)
if not raw:
# try decrease epoch id.
current_epoch -= 1
if current_epoch < 0:
break
current_epoch_db = self.open_epoch_db(current_epoch, readonly=True)
continue
blk = DecodedBlock(cbor.loads(raw), raw)
yield blk
current_hash = blk.header().prev_header()
def blocks(self, start_hash=None):
'Iterate blocks forwardly.'
if start_hash:
current_epoch, _ = DecodedBlockHeader(
cbor.loads(self.db.get(b'b/' + start_hash))
).slot()
else:
start_hash = config.GENESIS_BLOCK_HASH
current_epoch = 0
current_epoch_db = self.open_epoch_db(current_epoch, readonly=True)
current_hash = start_hash
raw = current_epoch_db.get(b'b/' + current_hash)
yield DecodedBlock(cbor.loads(raw), raw)
while True:
current_hash = self.db.get(b'e/fl/' + current_hash)
if not current_hash:
return
raw = current_epoch_db.get(b'b/' + current_hash)
if raw:
yield DecodedBlock(cbor.loads(raw), raw)
continue
# try increase epoch number.
current_epoch += 1
current_epoch_db = self.open_epoch_db(current_epoch, readonly=True)
if not current_epoch_db:
return
raw = current_epoch_db.get(b'b/' + current_hash)
if not raw:
return
yield DecodedBlock(cbor.loads(raw), raw)
def blockheaders_rev(self, start=None):
'Iterate block header backwardly.'
current_hash = start or self.tip().hash()
while True:
raw = self.db.get(b'b/' + current_hash)
if not raw:
break
hdr = DecodedBlockHeader(cbor.loads(raw), raw)
yield hdr
current_hash = hdr.prev_header()
def blockheaders(self, start=None):
current_hash = start or config.GENESIS_BLOCK_HASH
while True:
raw = self.db.get(b'b/' + current_hash)
yield DecodedBlockHeader.from_raw(raw, current_hash)
current_hash = self.db.get(b'e/fl/' + current_hash)
if not current_hash:
break
def iter_header_hash(self, start=None):
current_hash = start or config.GENESIS_BLOCK_HASH
while True:
yield current_hash
current_hash = self.db.get(b'e/fl/' + current_hash)
if not current_hash:
break
def blockheaders_noorder(self):
'Iterate block header in rocksdb order, fastest.'
return map(
lambda t: DecodedBlockHeader.from_raw(t[1], t[0][2:]),
iter_prefix(self.db, b'b/')
)
def append_block(self, block):
hdr = block.header()
batch = rocksdb.WriteBatch()
# check prev_hash
tip = self.tip()
if tip:
assert hdr.prev_header() == tip.hash(), 'invalid block.'
h = hdr.hash()
batch.put(b'b/' + h, hdr.raw())
batch.put(b'e/fl/' + hdr.prev_header(), h)
undos = None
if not block.is_genesis():
undos = self._get_block_undos(block)
self.utxo_apply_block(block, batch)
for tx in block.transactions():
for out in tx.outputs():
batch.put(b'a/' + out.addr, b'')
self.set_tip(hdr, batch)
self.db.write(batch)
# write body
epoch, _ = hdr.slot()
db = self.open_epoch_db(epoch, readonly=False)
batch = rocksdb.WriteBatch()
if hdr.is_genesis():
assert not db.get(b'g')
batch.put(b'g', h)
else:
batch.put(b'u/' + h, cbor.dumps(undos))
batch.put(b'b/' + h, block.raw())
db.write(batch)
return undos
def _get_block_undos(self, block):
return [[self.get_output(txin) for txin in tx.inputs()]
for tx in block.transactions()]
def utxo_apply_block(self, block, batch):
txins, utxo = block.utxos()
for txin in txins:
batch.delete(b'ut/t/' + cbor.dumps(txin))
for txin, txout in utxo.items():
batch.put(b'ut/t/' + cbor.dumps(txin), cbor.dumps(txout))
def iter_utxo(self):
from .wallet import TxIn, TxOut
prefix = b'ut/t/'
for k, v in iter_prefix(self.db, prefix):
yield TxIn(*cbor.loads(k[len(prefix):])), TxOut(*cbor.loads(v))
def iter_addresses(self):
it = self.db.iterkeys()
it.seek(b'a/')
for k in it:
if not k.startswith(b'a/'):
break
yield k[2:]
def get_output(self, txin):
from .wallet import TxOut
data = self.db.get(b'ut/t/' + cbor.dumps(txin))
if data:
return TxOut(*cbor.loads(data))
def hash_range(store, hstart, hstop, depth_limit):
if hstart == hstop:
assert depth_limit > 0
yield hstart
return
start = store.blockheader(hstart)
stop = store.blockheader(hstop)
assert start and stop
assert stop.diffculty() > start.diffculty()
assert stop.diffculty() - start.diffculty() < depth_limit
for h in store.iter_header_hash(start):
yield h
if h == stop:
break
def fetch_raw_blocks(store, hstart, hstop):
'''
'''
for h in hash_range(store,
hstart,
hstop,
config.CHAIN['block']['recoveryHeadersMessage']):
yield store.raw_block(store.blockheader(h))
def stream_raw_blocks(store, hstart):
for h in store.iter_header_hash(hstart):
yield store.raw_block(store.blockheader(h))
| 30.986711 | 83 | 0.566956 | import os
import cbor
import rocksdb
from .block import DecodedBlock, DecodedBlockHeader
from . import config
def iter_prefix(db, prefix):
it = db.iteritems()
it.seek(prefix)
for k, v in it:
if not k.startswith(prefix):
break
yield k, v
def remove_prefix(db, prefix):
batch = rocksdb.WriteBatch()
for k, _ in iter_prefix(db, prefix):
batch.delete(k)
db.write(batch)
class Storage(object):
def __init__(self, root_path, readonly=False):
print('create storage at', root_path)
if not os.path.exists(root_path):
os.makedirs(root_path)
self._root_path = root_path
opt = rocksdb.Options(create_if_missing=True)
self.db = rocksdb.DB(os.path.join(self._root_path, 'db'), opt, readonly)
self._tip = None
self._current_epoch_db = None
self._current_epoch = None
def epoch_db_path(self, epoch):
return os.path.join(self._root_path, 'epoch%d' % epoch)
def open_epoch_db(self, epoch, readonly=False):
if epoch != self._current_epoch:
self._current_epoch = epoch
self._current_epoch_db = rocksdb.DB(
self.epoch_db_path(epoch),
rocksdb.Options(create_if_missing=True),
readonly
)
return self._current_epoch_db
def load_tip(self):
h = self.db.get(b'c/tip')
if h:
return self.blockheader(h)
def tip(self):
if not self._tip:
self._tip = self.load_tip()
return self._tip
def set_tip(self, hdr, batch=None):
self._tip = hdr
(batch or self.db).put(b'c/tip', hdr.hash())
def blockheader(self, h):
buf = self.db.get(b'b/' + h)
if buf:
return DecodedBlockHeader.from_raw(buf, h)
def raw_block(self, hdr):
db = self.open_epoch_db(hdr.slot()[0], readonly=True)
buf = db.get(b'b/' + hdr.hash())
if buf:
return buf
def block(self, hdr):
raw = self.raw_block(hdr)
if raw:
return DecodedBlock.from_raw(raw)
def undos(self, hdr):
db = self.open_epoch_db(hdr.slot()[0], readonly=True)
buf = db.get(b'u/' + hdr.hash())
if buf:
return cbor.loads(buf)
def genesis_block(self, epoch):
db = self.open_epoch_db(epoch, readonly=True)
h = db.get(b'g')
assert h, 'epoch not exist: %d' % epoch
return DecodedBlock.from_raw(db.get(h))
def blocks_rev(self, start_hash=None):
current_hash = start_hash or self.tip().hash()
current_epoch = self.blockheader(current_hash).slot()[0]
current_epoch_db = self.open_epoch_db(current_epoch, readonly=True)
while True:
raw = current_epoch_db.get(b'b/' + current_hash)
if not raw:
current_epoch -= 1
if current_epoch < 0:
break
current_epoch_db = self.open_epoch_db(current_epoch, readonly=True)
continue
blk = DecodedBlock(cbor.loads(raw), raw)
yield blk
current_hash = blk.header().prev_header()
def blocks(self, start_hash=None):
if start_hash:
current_epoch, _ = DecodedBlockHeader(
cbor.loads(self.db.get(b'b/' + start_hash))
).slot()
else:
start_hash = config.GENESIS_BLOCK_HASH
current_epoch = 0
current_epoch_db = self.open_epoch_db(current_epoch, readonly=True)
current_hash = start_hash
raw = current_epoch_db.get(b'b/' + current_hash)
yield DecodedBlock(cbor.loads(raw), raw)
while True:
current_hash = self.db.get(b'e/fl/' + current_hash)
if not current_hash:
return
raw = current_epoch_db.get(b'b/' + current_hash)
if raw:
yield DecodedBlock(cbor.loads(raw), raw)
continue
current_epoch += 1
current_epoch_db = self.open_epoch_db(current_epoch, readonly=True)
if not current_epoch_db:
return
raw = current_epoch_db.get(b'b/' + current_hash)
if not raw:
return
yield DecodedBlock(cbor.loads(raw), raw)
def blockheaders_rev(self, start=None):
current_hash = start or self.tip().hash()
while True:
raw = self.db.get(b'b/' + current_hash)
if not raw:
break
hdr = DecodedBlockHeader(cbor.loads(raw), raw)
yield hdr
current_hash = hdr.prev_header()
def blockheaders(self, start=None):
current_hash = start or config.GENESIS_BLOCK_HASH
while True:
raw = self.db.get(b'b/' + current_hash)
yield DecodedBlockHeader.from_raw(raw, current_hash)
current_hash = self.db.get(b'e/fl/' + current_hash)
if not current_hash:
break
def iter_header_hash(self, start=None):
current_hash = start or config.GENESIS_BLOCK_HASH
while True:
yield current_hash
current_hash = self.db.get(b'e/fl/' + current_hash)
if not current_hash:
break
def blockheaders_noorder(self):
return map(
lambda t: DecodedBlockHeader.from_raw(t[1], t[0][2:]),
iter_prefix(self.db, b'b/')
)
def append_block(self, block):
hdr = block.header()
batch = rocksdb.WriteBatch()
tip = self.tip()
if tip:
assert hdr.prev_header() == tip.hash(), 'invalid block.'
h = hdr.hash()
batch.put(b'b/' + h, hdr.raw())
batch.put(b'e/fl/' + hdr.prev_header(), h)
undos = None
if not block.is_genesis():
undos = self._get_block_undos(block)
self.utxo_apply_block(block, batch)
for tx in block.transactions():
for out in tx.outputs():
batch.put(b'a/' + out.addr, b'')
self.set_tip(hdr, batch)
self.db.write(batch)
epoch, _ = hdr.slot()
db = self.open_epoch_db(epoch, readonly=False)
batch = rocksdb.WriteBatch()
if hdr.is_genesis():
assert not db.get(b'g')
batch.put(b'g', h)
else:
batch.put(b'u/' + h, cbor.dumps(undos))
batch.put(b'b/' + h, block.raw())
db.write(batch)
return undos
def _get_block_undos(self, block):
return [[self.get_output(txin) for txin in tx.inputs()]
for tx in block.transactions()]
def utxo_apply_block(self, block, batch):
txins, utxo = block.utxos()
for txin in txins:
batch.delete(b'ut/t/' + cbor.dumps(txin))
for txin, txout in utxo.items():
batch.put(b'ut/t/' + cbor.dumps(txin), cbor.dumps(txout))
def iter_utxo(self):
from .wallet import TxIn, TxOut
prefix = b'ut/t/'
for k, v in iter_prefix(self.db, prefix):
yield TxIn(*cbor.loads(k[len(prefix):])), TxOut(*cbor.loads(v))
def iter_addresses(self):
it = self.db.iterkeys()
it.seek(b'a/')
for k in it:
if not k.startswith(b'a/'):
break
yield k[2:]
def get_output(self, txin):
from .wallet import TxOut
data = self.db.get(b'ut/t/' + cbor.dumps(txin))
if data:
return TxOut(*cbor.loads(data))
def hash_range(store, hstart, hstop, depth_limit):
if hstart == hstop:
assert depth_limit > 0
yield hstart
return
start = store.blockheader(hstart)
stop = store.blockheader(hstop)
assert start and stop
assert stop.diffculty() > start.diffculty()
assert stop.diffculty() - start.diffculty() < depth_limit
for h in store.iter_header_hash(start):
yield h
if h == stop:
break
def fetch_raw_blocks(store, hstart, hstop):
for h in hash_range(store,
hstart,
hstop,
config.CHAIN['block']['recoveryHeadersMessage']):
yield store.raw_block(store.blockheader(h))
def stream_raw_blocks(store, hstart):
for h in store.iter_header_hash(hstart):
yield store.raw_block(store.blockheader(h))
| true | true |
f71e1ff0549a51eb610d1ac07e95017b95706ac7 | 56,186 | py | Python | mathchem/mathchem.py | juvilius/mathchem-package | ca56cb03e6ccdb47b3dfc382ca36b0a00d3e28b9 | [
"MIT"
] | null | null | null | mathchem/mathchem.py | juvilius/mathchem-package | ca56cb03e6ccdb47b3dfc382ca36b0a00d3e28b9 | [
"MIT"
] | null | null | null | mathchem/mathchem.py | juvilius/mathchem-package | ca56cb03e6ccdb47b3dfc382ca36b0a00d3e28b9 | [
"MIT"
] | null | null | null | import numpy as np
class Mol():
r"""
Molecule.
"""
__g6_string = ''
# Adjacency matrix
__A = []
# Incidence matrix
__B = []
# Laplacian matrix
__L = []
# Normalized laplacian matrix
__NL = []
# Signless laplacian matrix
__Q = []
# Distance matrix
__D = []
# Resistance Distance matrix
__RD = []
__Order = 0
__Edges = []
__Sage_graph = None
__NX_graph = None
__Degrees = []
__Spectrum = []
__Laplacian_spectrum = []
__Distance_spectrum = []
__Norm_laplacian_spectrum = []
__Signless_laplacian_spectrum = []
__RD_spectrum = []
__Is_connected = None
# Switch it to False when we know that the graph is connected. Useful for big calculations
__Check_connectedness = True
def _reset_(self):
""" Reset all attributes """
self.__g6_string = ''
# Adjacency matrix
self.__A = []
# Incidence matrix
self.__B = []
# Laplacian matrix
self.__L = []
# Normalized laplacian matrix
self.__NL = []
# Signless laplacian matrix
self.__Q = []
# Distance matrix
self.__D = []
# Resistance Distance matrix
self.__RD = []
self.__Order = 0
self.__Edges = []
self.__Sage_graph = None
self.__NX_graph = None
self.__Degrees = []
self.__Spectrum = []
self.__Laplacian_spectrum = []
self.__Distance_spectrum = []
self.__Norm_laplacian_spectrum = []
self.__Signless_laplacian_spectrum = []
self.__RD_spectrum = []
self.__Is_connected = None
# allow to set structure from somewhere
# used in utilites
def _set_A(self, A):
self.__A = A
def _set_Edges(self, edges):
self.__Edges = edges
def _set_Order(self, order):
self.__Order = order
# native method to initialize Mol class is to provide g6 string
def __init__(self, string=None, check_connectedness=True):
""" Molecular graph class """
self.__Check_connectedness = check_connectedness
if string != None:
if string[0] == '>':
if string.startswith('>>graph6<<'):
string = string[10:]
elif string.startswith('>>sparse6<<'):
string = string[11:]
if string[0] == ':':
self.read_s6(string)
else:
self.read_g6(string)
def __repr__(self):
if self.__A != None:
return 'Molecular graph on ' + str(
self.__Order) + ' vertices and ' + str(self.size()) + ' edges'
return 'Empty Molecular graph'
def __len__(self):
if self.__A != None: return len(self.__A)
else: return 0
def set_check_connectedness(self, c):
""" Switch on/off of checking connectedness for the graph. Might be useful in batch calculations to economy time.
args: c (True/False)
"""
self.check_connectedness = c
def g6_string(self):
""" Return a graph6 string representation of the graph
Alias: graph6_string """
return self.__g6_string
# alias like in Sage:
graph6_string = g6_string
def order(self):
""" Return number of vertices """
return self.__Order
# alias for order
n = order
def edges(self):
""" Return list of edges """
return self.__Edges
def size(self):
""" Return number of edges"""
return len(self.__Edges)
# alias for size
m = size
def vertices(self):
""" Return list of vertices """
return range(self.__Order)
def sage_graph(self):
""" Return Sage Graph object """
if self.__Sage_graph is None: self._init_sage_graph_()
return self.__Sage_graph
def NX_graph(self):
""" Return NetworkX graph object """
if self.__NX_graph is None:
import networkx as nx
self.__NX_graph = nx.Graph(self.__Edges)
return self.__NX_graph
nx_graph = NX_graph
def _init_sage_graph_(self):
""" Initialize SAGE graph from Adjacency matrix"""
from sage.graphs.graph import Graph
self.__Sage_graph = Graph(self.__Edges)
def read_g6(self, s):
""" Initialize graph from graph6 string """
def graph_bit(pos, off):
return ((ord(s[off + 1 + pos / 6]) - 63) & (2**(5 - pos % 6))) != 0
if s.startswith('>>graph6<<'):
s = s[10:]
# reset all the attributes before changing the structure
self._reset_()
n = ord(s[0]) - 63
off = 0
if n == 63:
if ord(s[1]) - 63 != 63:
n = ((ord(s[1]) - 63) << 12) + (
(ord(s[2]) - 63) << 6) + ord(s[3]) - 63
off = 3
else:
n = ((ord(s[2]) - 63) << 30) + ((ord(s[3]) - 63) << 24) + (
(ord(s[4]) - 63) << 18) + ((ord(s[5]) - 63) << 12) + (
(ord(s[6]) - 63) << 6) + ord(s[7]) - 63
off = 7
self.__Order = n
self.__A = [[0 for col in range(n)] for row in range(n)]
i = 0
j = 1
self.__Edges = []
for x in range(n * (n - 1) / 2):
if graph_bit(x, off):
self.__A[i][j] = 1
self.__A[j][i] = 1
self.__Edges.append((i, j))
if j - i == 1:
i = 0
j += 1
else:
i += 1
self.__g6_string = s
read_graph6 = read_g6
def read_s6(self, s):
""" Initialize graph from sparse6 string """
def graph_bit(pos, off):
return ((ord(s[off + 1 + pos / 6]) - 63) & (2**(5 - pos % 6))) != 0
if s.startswith('>>sparse6<<'):
s = s[11:]
if not s[0] == ':':
print('This is not a sparse6 format!')
return False
# reset all the attributes before changing the structure
self._reset_()
s = s[1:]
n = ord(s[0]) - 63
off = 0
if n == 63:
if ord(s[1]) - 63 != 63:
n = ((ord(s[1]) - 63) << 12) + (
(ord(s[2]) - 63) << 6) + ord(s[3]) - 63
off = 3
else:
n = ((ord(s[2]) - 63) << 30) + ((ord(s[3]) - 63) << 24) + (
(ord(s[4]) - 63) << 18) + ((ord(s[5]) - 63) << 12) + (
(ord(s[6]) - 63) << 6) + ord(s[7]) - 63
off = 7
self.__Order = n
k = 1
while 1 << k < n:
k += 1
data = s[off + 1:]
#print n,k
#print data
def parseData():
"""Return stream of pairs b[i], x[i] for sparse6 format."""
chunks = iter(data)
d = None # partial data word
dLen = 0 # how many unparsed bits are left in d
while 1:
if dLen < 1:
d = ord(next(chunks)) - 63
dLen = 6
dLen -= 1
b = (d >> dLen) & 1 # grab top remaining bit
x = d & ((1 << dLen) - 1) # partially built up value of x
xLen = dLen # how many bits included so far in x
while xLen < k: # now grab full chunks until we have enough
d = ord(next(chunks)) - 63
dLen = 6
x = (x << 6) + d
xLen += 6
x = (x >> (xLen - k)) # shift back the extra bits
dLen = xLen - k
yield b, x
self.__A = [[0 for col in range(n)] for row in range(n)]
self.__Edges = []
v = 0
for b, x in parseData():
if b: v += 1
if x >= n:
break # padding with ones can cause overlarge number here
elif x > v:
v = x
else:
self.__A[x][v] = 1
self.__A[v][x] = 1
self.__Edges.append((x, v))
self.__g6_string = ''
read_sparse6 = read_s6
def read_matrix(self, matrix):
"""Initialize graph from adjacency matrix including numpy.matrix"""
if type(matrix) == np.matrix:
matrix = matrix.astype(int).tolist()
self._reset_()
self.__Order = len(matrix)
self.__A = matrix
for i in range(self.__Order):
for j in range(i):
if matrix[i][j] == 1:
self.__Edges.append((i, j))
def read_edgelist(self, edges):
"""Initialize graph from list of edges.
Example:
m = mathchem.Mol()
m.read_edgelist( [(4,3),(3,1),(1,4))] )"""
# first relabel nodes
nodes = []
for e in edges:
if not e[0] in nodes: nodes.append(e[0])
if not e[1] in nodes: nodes.append(e[1])
self._reset_()
self.__Order = len(nodes)
d = dict(zip(nodes, range(len(nodes))))
self.__Edges = [(d[e[0]], d[e[1]]) for e in edges]
self.__A = [[0 for col in range(self.__Order)]
for row in range(self.__Order)]
for i, j in self.__Edges:
self.__A[i][j] = 1
self.__A[j][i] = 1
def write_dot_file(self, filename):
f_out = open(filename, 'w')
f_out.writelines('graph Mol {\n')
for (i, j) in self.edges():
f_out.writelines(' ' + str(i) + ' -- ' + str(j) + ';\n')
f_out.writelines('}')
f_out.close()
#
#
# matrices
#
#
def adjacency_matrix(self):
""" Return Adjacency matrix
Alias : A
"""
return self.__A
A = adjacency_matrix
def incidence_matrix(self):
""" Return Incidence matrix
Alias: B
"""
if self.__B == []:
def func(u, v):
col = [0] * self.__Order
col[u] = 1
col[v] = 1
return col
# apply func to each edge
b = map(lambda e: func(e), self.edges())
# transpose the result
self.__B = map(list, zip(*b))
return self.__B
B = incidence_matrix
def laplacian_matrix(self):
""" Return Laplacian matrix
L = D-A
where D - matrix whose diagonal elements are the degrees of the corresponding vertices
A - adjacency matrix
Alias : L
"""
if self.__L == []:
self.__L = np.diag(self.degrees()) - np.matrix(self.__A)
return self.__L
L = laplacian_matrix
def signless_laplacian_matrix(self):
""" Return Signless Laplacian matrix
Q = D+A
Alias : Q
"""
if self.__Q == []:
self.__Q = np.diag(self.degrees()) + np.matrix(self.__A)
return self.__Q
Q = signless_laplacian_matrix
def normalized_laplacian_matrix(self):
""" Return Normalized Laplacian matrix
NL = deg^(-1/2) * L * deg(1/2)
Alias : NL
"""
## TODO: check if we have zeros in degrees()
if self.__NL == []:
d1 = np.diag(np.power(self.degrees(), -.5))
d2 = np.diag(np.power(self.degrees(), .5))
self.__NL = d1 * self.laplacian_matrix() * d2
return self.__NL
NL = normalized_laplacian_matrix
def distance_matrix(self):
""" Return Distance matrix
Alias : D
"""
if self.__Order == 0: return []
if self.__D == []:
# use here float only for using np.inf - infinity
A = np.matrix(self.__A, dtype=float)
n, m = A.shape
I = np.identity(n)
A[A == 0] = np.inf # set zero entries to inf
A[I == 1] = 0 # except diagonal which should be zero
for i in range(n):
r = A[i, :]
A = np.minimum(A, r + r.T)
self.__D = np.matrix(A, dtype=int)
return self.__D
D = distance_matrix
def reciprocal_distance_matrix(self):
""" Return Reciprocal Distance matrix """
rd = np.matrix(self.distance_matrix(), dtype=float)
# probably there exists more python way to apply a function to each element of matrix
for i in range(self.__Order):
for j in range(self.__Order):
if not rd[i, j] == 0: rd[i, j] = 1 / rd[i, j]
return rd
def resistance_distance_matrix(self):
""" Return Resistance Distance matrix """
if not self.is_connected() or self.__Order == 0:
return False
if self.__RD == []:
#from numpy import linalg as la
n = self.__Order
s = n * self.laplacian_matrix() + 1
sn = n * np.linalg.inv(s)
RD = np.ndarray((n, n))
for i in range(n):
for j in range(n):
RD[i, j] = np.float64(
np.longdouble(sn[i, i]) + np.longdouble(sn[j, j]) -
2 * np.longdouble(sn[i, j]))
self.__RD = RD
return self.__RD
def seidel_matrix(self):
""" Return Seidel matrix
S = J - I - 2A
Alias: S
"""
n = self.__Order
return np.ones((n, n)) - np.identity(n) - 2 * np.matrix(self.__A)
S = seidel_matrix
#
#
# Graph invariants
#
#
def diameter(self):
""" Return diameter of the graph
Diameter is the maximum value of distance matrix
"""
if self.__Order == 0: return 0
return self.distance_matrix().max()
def degrees(self):
""" Return degree of the vertex
Alias : deg
"""
if self.__Degrees == []:
self.__Degrees = map(lambda r: sum(r), self.__A)
## calcuate degrees for all vertices
return self.__Degrees
deg = degrees
def eccentricity(self):
""" Eccentricity of the graph for all its vertices"""
if self.__Order == 0: return None
return self.distance_matrix().max(axis=0).tolist()[0]
def distances_from_vertex(self, v):
""" Return list of all distances from a given vertex to all others"""
# used to test graph where it is connected or not
seen = {}
level = 0
nextlevel = [v]
while nextlevel:
thislevel = nextlevel
nextlevel = []
for v in thislevel:
if v not in seen:
seen[v] = level
nb = [
i
for (i, j) in zip(range(len(self.__A[v])), self.__A[v])
if j != 0
]
nextlevel.extend(nb)
#if (cutoff is not None and cutoff <= level): break
level = level + 1
return seen
def is_connected(self):
""" Return True/False depends on the graph is connected or not """
if self.__Order == 0: return False
if not self.__Check_connectedness: return True
if self.__Is_connected is None:
# we take vertex 0 and check whether we can reach all other vertices
self.__Is_connected = len(
self.distances_from_vertex(0)) == self.order()
return self.__Is_connected
#
#
# Graph spectra
#
#
def spectrum(self, matrix="adjacency"):
r""" Spectrum of the graph
args:
matrix (str or matrix)
'adjacency' or 'A' : default
'laplacian' or 'L'
'distance' or 'D'
'signless_laplacian' or 'Q'
'normalized_laplacian' or 'NL'
'resistance_distance' or 'RD'
'reciprocal_distance'
arbitrary matrix
"""
from numpy import linalg as la
if type(matrix) is str:
if self.__Order == 0: return []
if matrix == "adjacency" or matrix == "A":
if self.__Spectrum == []:
s = la.eigvalsh(self.__A).tolist()
s.sort(reverse=True)
self.__Spectrum = s
return self.__Spectrum
elif matrix == "laplacian" or matrix == "L":
if self.__Laplacian_spectrum == []:
s = la.eigvalsh(self.laplacian_matrix()).tolist()
s.sort(reverse=True)
self.__Laplacian_spectrum = map(
lambda x: x if x > 0 else 0, s)
return self.__Laplacian_spectrum
elif matrix == "distance" or matrix == "D":
if self.__Distance_spectrum == []:
s = la.eigvalsh(self.distance_matrix()).tolist()
s.sort(reverse=True)
self.__Distance_spectrum = s
return self.__Distance_spectrum
elif matrix == "signless_laplacian" or matrix == "Q":
if self.__Signless_laplacian_spectrum == []:
## TODO: check if we have zeros in degrees()
s = la.eigvalsh(self.signless_laplacian_matrix()).tolist()
s.sort(reverse=True)
self.__Signless_laplacian_spectrum = map(
lambda x: x if x > 0 else 0, s)
return self.__Signless_laplacian_spectrum
elif matrix == "normalized_laplacian" or matrix == "NL":
if self.__Norm_laplacian_spectrum == []:
## TODO: check if we have zeros in degrees()
s = la.eigvalsh(
self.normalized_laplacian_matrix()).tolist()
s.sort(reverse=True)
self.__Norm_laplacian_spectrum = s
return self.__Norm_laplacian_spectrum
elif matrix == "resistance_distance" or matrix == "RD":
if self.__RD_spectrum == []:
s = la.eigvalsh(self.resistance_distance_matrix()).tolist()
s.sort(reverse=True)
self.__RD_spectrum = s
return self.__RD_spectrum
# NO CACHE
elif matrix == "reciprocal_distance":
s = la.eigvalsh(self.reciprocal_distance_matrix()).tolist()
s.sort(reverse=True)
return s
else:
return False
# if the parameter is an arbitrary matrix
# DEPRECATED:
# use mathchem.spectrum(matrix) for arbitrary matrices
#
else:
s = la.eigvalsh(matrix).tolist()
s.sort(reverse=True)
return s
# for arbitrary matrices use:
# mathchem.spectral_moment(matrix)
def spectral_moment(self, k, matrix="adjacency"):
""" Return k-th spectral moment
parameters: matrix - see spectrum help
"""
return np.sum(np.power(self.spectrum(matrix), k))
# for arbitrary matrices use:
# mathchem.spectral_radius(matrix)
def spectral_radius(self, matrix="adjacency"):
s = self.spectrum(matrix)
return max(abs(s[0]), abs(s[len(s) - 1]))
# for arbitrary matrices use:
# mathchem.energy(matrix)
def energy(self, matrix="adjacency"):
""" Return energy of the graph
parameters: matrix - see spectrum help
"""
if self.__Order == 0: return False
s = self.spectrum(matrix)
a = np.sum(s, dtype=np.longdouble) / len(s)
return np.float64(
np.sum(map(lambda x: abs(x - a), s), dtype=np.longdouble))
def incidence_energy(self):
""" Return incidence energy (IE)
Incidence energy is the sum of singular values of incidence matrix
"""
if self.__Order == 0: return False
from numpy.linalg import svd
return np.float64(
np.sum(svd(self.incidence_matrix(), compute_uv=False),
dtype=np.longdouble))
#
#
# Chemical indices
#
#
def zagreb_m1_index(self):
""" Zagreb M1 Index """
return sum(map(lambda d: d**2, self.degrees()))
def zagreb_m2_index(self):
""" Zagreb M2 Index
The molecular graph must contain at least one edge, otherwise the function Return False
Zagreb M2 Index is a special case of Connectivity Index with power = 1"""
return sum(
map(lambda e1, e2: self.degrees()[e1] * self.degrees()[e2],
self.edges()))
def zagreb_m1_coindex(self):
""" Zagreb M1 Coindex """
return 2 * self.size() * (self.__Order - 1) - self.zagreb_m1_index()
def zagreb_m2_coindex(self):
""" Zagreb M2 Coindex """
return 2 * (self.size()**
2) - self.zagreb_m2_index() - self.zagreb_m1_index() * .5
def connectivity_index(self, power):
""" Connectivity index (R)"""
E = self.edges() # E - all edges
if len(E) == 0: return 0
return np.float64(
np.sum(map(
lambda e1, e2:
(self.degrees()[e1] * self.degrees()[e2])**power, E),
dtype=np.longdouble))
def augmented_zagreb_index(self):
""" Augmented Zagreb Index"""
E = self.edges() # E - all edges
d = self.degrees()
if len(E) < 2: return 0
return np.float64(
np.sum(map(
lambda e1, e2: (np.longdouble(d[e1] * d[e2]) /
(d[e1] + d[e2] - 2))**3, E),
dtype=np.longdouble))
def sum_connectivity_index(self):
""" Sum-Connectivity index"""
E = self.edges() # E - all edges
if len(E) == 0: return 0
return np.float64(
np.sum(map(
lambda e1, e2:
(self.degrees()[e1] + self.degrees()[e2])**(-0.5), E),
dtype=np.longdouble))
def geometric_arithmetic_index(self):
""" Geometric-Arithmetic index"""
E = self.edges() # E - all edges
if len(E) == 0: return 0
return np.float64(
np.sum(map(
lambda e1, e2: 2.0 * np.sqrt(self.degrees()[e1] * self.degrees(
)[e2]) / (self.degrees()[e1] + self.degrees()[e2]), E),
dtype=np.longdouble))
def eccentric_connectivity_index(self):
""" Eccentric Connectivity Index
The molecuar graph must be connected, otherwise the function Return False"""
if not self.is_connected():
return False
return sum(map(lambda a, b: a * b, self.degrees(),
self.eccentricity()))
def randic_index(self):
""" Randic Index
The molecular graph must contain at least one edge, otherwise the function Return False
Randic Index is a special case of Connectivity Index with power = -1/2"""
return self.connectivity_index(-0.5)
def atom_bond_connectivity_index(self):
""" Atom-Bond Connectivity Index (ABC) """
s = np.longdouble(0) # summator
for u, v in self.edges():
d1 = np.float64(self.degrees()[u])
d2 = np.float64(self.degrees()[v])
s += np.longdouble(((d1 + d2 - 2) / (d1 * d2))**.5)
return np.float64(s)
def estrada_index(self, matrix="adjacency"):
""" Estrada Index (EE)
args:
matrix -- see spectrum for help, default value is 'adjacency'
There is an alias 'distance_estrada_index' for distance matrix
"""
return np.float64(
np.sum(map(lambda x: np.exp(x.real), self.spectrum(matrix)),
dtype=np.longdouble))
def distance_estrada_index(self):
""" Distance Estrada Index (DEE)
Special case of Estrada index with distance matrix
"""
return self.estrada_index('distance')
def degree_distance(self):
""" Degree Distance (DD)
The molecuar graph must be connected, otherwise the function Return False"""
if not self.is_connected():
return False
dd = np.matrix(self.degrees()) * self.distance_matrix().sum(axis=1)
return dd[0, 0]
def reverse_degree_distance(self):
""" Reverse Distance Degree (rDD)
The molecuar graph must be connected, otherwise the function Return False"""
if not self.is_connected():
return False
return 2 * (self.order() - 1) * len(
self.edges()) * self.diameter() - self.degree_distance()
def molecular_topological_index(self):
""" (Schultz) Molecular Topological Index (MTI)
The molecuar graph must be connected, otherwise the function Return False"""
if not self.is_connected():
return False
# (A+D)*d
A = np.matrix(self.__A)
d = np.matrix(self.degrees())
return np.float64(
((A + self.distance_matrix()) * d.T).sum(dtype=np.longdouble))
def eccentric_distance_sum(self):
""" Distance Sum
The molecuar graph must be connected, otherwise the function Return False"""
if not self.is_connected():
return False
return (self.eccentricity() * self.distance_matrix().sum(axis=1))[0, 0]
# strange - it is slow ((
def balaban_j_index(self):
""" Balaban J index
The molecuar graph must be connected, otherwise the function Return False"""
if not self.is_connected():
return False
ds = self.distance_matrix().sum(axis=1)
m = len(self.edges())
k = (m / (m - self.__Order + 2.0))
return np.float64(
k *
np.sum(map(lambda u, v: 1 / np.sqrt(
(ds[u][0, 0] * ds[v][0, 0])), self.edges()),
dtype=np.longdouble))
def sum_balaban_index(self):
""" Sum Balaban index
The molecuar graph must be connected, otherwise the function Return False"""
if not self.is_connected():
return False
ds = self.distance_matrix().sum(axis=1)
m = len(self.edges())
k = (m / (m - self.__Order + 2.0))
return np.float64(
k *
np.sum(map(lambda u, v: 1 / np.sqrt(
(ds[u][0, 0] + ds[v][0, 0])), self.edges()),
dtype=np.longdouble))
def kirchhoff_index(self):
""" Kirchhoff Index (Kf)
Kf = 1/2 * sum_i sum_j RD[i,j]
Based on resistance distance matrix RD
Alias: resistance
The molecuar graph must be connected, otherwise the function Return False
"""
if not self.is_connected():
return False
return np.float64(
self.resistance_distance_matrix().sum(dtype=np.longdouble) / 2)
resistance = kirchhoff_index
def wiener_index(self):
""" Wiener Index (W)
W = 1/2 * sum_i sum_j D[i,j]
where D is distance matrix
The molecuar graph must be connected, otherwise the function Return False
"""
if not self.is_connected():
return False
return self.distance_matrix().sum(dtype=np.float64) / 2
def terminal_wiener_index(self):
""" Calculate Terminal Wiener Index (TW)
TW = Sum of all distances between pendent vertices (with degree = 1)
"""
if not self.is_connected(): return False
s = 0
for u in range(self.order()):
if self.degrees()[u] != 1: continue
for v in range(u + 1, self.order()):
if self.degrees()[v] == 1:
s = s + self.distance_matrix()[u, v]
return s
def reverse_wiener_index(self):
""" Reverse Wiener Index (RW)
RW = 1/2 * sum_i!=j ( d - D[i,j] )
where D is distance matrix and d is diameter
The molecuar graph must be connected, otherwise the function Return False
"""
if not self.is_connected():
return False
# here we use formula: RW = 1/2 * n * (n-1) * d - W
return self.diameter() * (
self.__Order * (self.__Order - 1)) / 2 - self.wiener_index()
def hyper_wiener_index(self):
""" Hyper-Wiener Index (WW)
WW = 1/2 * ( sum_ij d(i,j)^2 + sum_i_j d(i,j) )
where D is distance matrix
The molecuar graph must be connected, otherwise the function Return False
"""
if not self.is_connected():
return False
return (
np.power(self.distance_matrix(), 2).sum() +
self.distance_matrix().sum()) / 4 # since we have symmetric matrix
def harary_index(self):
""" Harary Index (H)
H = 1/2 sum_i sum_j Rd[i,j]
where Rd is reciprocal distance matrix
Rd[i,j] = 1 / D[i,j] for D[i,j] != 0
Rd[i,j] = 0 otherwise
The molecuar graph must be connected, otherwise the function Return False
"""
if not self.is_connected():
return False
return np.float64(
self.reciprocal_distance_matrix().sum(dtype=np.longdouble)) / 2
def LEL(self):
""" Return Laplacian-like energy (LEL) """
return np.float64(
np.sum(map(lambda x: np.sqrt(x), self.spectrum('laplacian')),
dtype=np.longdouble))
def multiplicative_sum_zagreb_index(self):
""" Log( Multiplicative Sum Zagreb index )"""
d = self.degrees()
return np.float64(
np.sum(map(lambda u, v: np.log(np.float64(d[u] + d[v])),
self.edges()),
dtype=np.longdouble))
def multiplicative_p2_zagreb_index(self):
"""Calculates Log( Multiplicative P2 Zagreb index )"""
d = self.degrees()
return np.float64(
np.sum(map(lambda u, v: np.log(np.float64(d[u] * d[v])),
self.edges()),
dtype=np.longdouble))
def multiplicative_p1_zagreb_index(self):
"""Calculates Log( Multiplicative P1 Zagreb index )"""
d = self.degrees()
return np.float64(
np.sum(map(lambda v: np.log(np.float64(d[v]**2)), self.vertices()),
dtype=np.longdouble))
def szeged_index(self):
"""Calculates Szeged index"""
if not self.is_connected():
return False
s = 0
D = self.distance_matrix()
for u, v in self.edges():
diff = D[u, :] - D[v, :]
s += (diff > 0).sum() * (diff < 0).sum()
return float(s)
def revised_szeged_index(self):
"""Calculates Revised Szeged index"""
if not self.is_connected():
return False
s = 0.0
D = self.distance_matrix()
for u, v in self.edges():
diff = D[u, :] - D[v, :]
o = (diff == 0).sum()
s += ((diff > 0).sum() + .5 * o) * ((diff < 0).sum() + .5 * o)
return s
def homo_lumo_index(self):
"""Calculates HOMO-LUMO index"""
if not self.is_connected():
return False
n = self.order()
if n % 2 == 0:
h = int(n / 2 -
1) # because array indices start from 0 instead of 1
l = int(h + 1)
return max([abs(self.spectrum()[h]), abs(self.spectrum()[l])])
# else:
h = int((n - 1) / 2)
return abs(self.spectrum()[h])
HL_index = homo_lumo_index
# Adriatic indices
# DEPRECATED
# use mathchem.all_adriatic()
def all_adriatic(self):
""" Generate all possible parameters sets for adriatic indices"""
r = []
for p in [0, 1]:
for i in [1, 2, 3]:
for j in range(1, 9):
if i == 3:
for a in [0.5, 2]:
r.append((p, i, j, a))
elif i == 2 and j in range(1, 6):
for a in [-1, -0.5, 0.5, 1, 2]:
r.append((p, i, j, a))
elif i == 2 or i == 1:
for a in [0.5, 1, 2]:
r.append((p, i, j, a))
return r
def adriatic_name(self, p, i, j, a):
""" Return the name for given parameters of Adriatic indices"""
#(j)
name1 = {1:'Randic type ',\
2:'sum ',\
3:'inverse sum ', \
4:'misbalance ', \
5:'inverse misbalance ', \
6:'min-max ', \
7:'max-min ', \
8:'symmetric division '}
# (i,a)
name2 = {(1, 0.5):'lor',\
(1,1):'lo', \
(1,2):'los', \
(2,-1):'in', \
(2, -0.5):'ir', \
(2, 0.5):'ro', \
(2,1):'', \
(2,2):'s', \
(3, 0.5):'ha', \
(3,2):'two'}
#(p)
name3 = {0: 'deg', 1: 'di'}
return (name1[j] + name2[(i, a)] + name3[p])
def _adriatic_entry_(self, du, dv, i, j, a):
""" Return an individual edge contribution for Adriatic indices and matrices"""
# phi(x,a)
phi = {
1: lambda x, a: np.log(x)**a,
2: lambda x, a: x**a,
3: lambda x, a: a**x
}
# gamma (x,y)
gamma = {\
1: lambda x,y: x*y,\
2: lambda x,y: x+y,\
3: lambda x,y: 0 if x+y==0 else 1.0/(x+y),\
4: lambda x,y: abs(x-y),\
5: lambda x,y: 0 if x==y else 1.0/abs(x-y),\
6: lambda x,y: 0 if max(x,y)==0 else min(x,y)/max(x,y),\
7: lambda x,y: 0 if min(x,y)==0 else max(x,y)/min(x,y),\
8: lambda x,y: 0 if x==0 or y==0 else x/y+y/x}
return gamma[j](phi[i](du, a), phi[i](dv, a))
def adriatic_matrix(self, p, i, j, a):
""" Return the Adriatic matrix with given parameters"""
if p == 0: d = self.degrees()
else: d = self.distance_matrix().sum(axis=0).tolist()[0]
AM = [[0] * self.order() for k in range(self.order())]
for u, v in self.edges():
AM[u][v] = AM[v][u] = self._adriatic_entry_(
np.float64(d[u]), np.float64(d[v]), i, j, a)
return AM
def adriatic_index(self, p, i, j, a):
""" Return the Adriatic index with given parameters"""
if p == 0: d = self.degrees()
else: d = self.distance_matrix().sum(axis=0).tolist()[0]
func = lambda u, v: self._adriatic_entry_(np.float64(d[u]),
np.float64(d[v]), i, j, a)
return np.float64(np.sum(map(func, self.edges()), dtype=np.longdouble))
# Adriatic indices by names
def randic_type_lordeg_index(self):
""" Adriatic index: Randic type lordeg index"""
return self.adriatic_index(0, 1, 1, 0.5)
def randic_type_lodeg_index(self):
""" Adriatic index: Randic type lodeg index"""
return self.adriatic_index(0, 1, 1, 1)
def randic_type_losdeg_index(self):
""" Adriatic index: Randic type losdeg index"""
return self.adriatic_index(0, 1, 1, 2)
def sum_lordeg_index(self):
""" Adriatic index: sum lordeg index"""
return self.adriatic_index(0, 1, 2, 0.5)
def sum_lodeg_index(self):
""" Adriatic index: sum lodeg index"""
return self.adriatic_index(0, 1, 2, 1)
def sum_losdeg_index(self):
""" Adriatic index: sum losdeg index"""
return self.adriatic_index(0, 1, 2, 2)
def inverse_sum_lordeg_index(self):
""" Adriatic index: inverse sum lordeg index"""
return self.adriatic_index(0, 1, 3, 0.5)
def inverse_sum_lodeg_index(self):
""" Adriatic index: inverse sum lodeg index"""
return self.adriatic_index(0, 1, 3, 1)
def inverse_sum_losdeg_index(self):
""" Adriatic index: inverse sum losdeg index"""
return self.adriatic_index(0, 1, 3, 2)
def misbalance_lordeg_index(self):
""" Adriatic index: misbalance lordeg index"""
return self.adriatic_index(0, 1, 4, 0.5)
def misbalance_lodeg_index(self):
""" Adriatic index: misbalance lodeg index"""
return self.adriatic_index(0, 1, 4, 1)
def misbalance_losdeg_index(self):
""" Adriatic index: misbalance losdeg index"""
return self.adriatic_index(0, 1, 4, 2)
def inverse_misbalance_lordeg_index(self):
""" Adriatic index: inverse misbalance lordeg index"""
return self.adriatic_index(0, 1, 5, 0.5)
def inverse_misbalance_lodeg_index(self):
""" Adriatic index: inverse misbalance lodeg index"""
return self.adriatic_index(0, 1, 5, 1)
def inverse_misbalance_losdeg_index(self):
""" Adriatic index: inverse misbalance losdeg index"""
return self.adriatic_index(0, 1, 5, 2)
def min_max_lordeg_index(self):
""" Adriatic index: min-max lordeg index"""
return self.adriatic_index(0, 1, 6, 0.5)
def min_max_lodeg_index(self):
""" Adriatic index: min-max lodeg index"""
return self.adriatic_index(0, 1, 6, 1)
def min_max_losdeg_index(self):
""" Adriatic index: min-max losdeg index"""
return self.adriatic_index(0, 1, 6, 2)
def max_min_lordeg_index(self):
""" Adriatic index: max-min lordeg index"""
return self.adriatic_index(0, 1, 7, 0.5)
def max_min_lodeg_index(self):
""" Adriatic index: max-min lodeg index"""
return self.adriatic_index(0, 1, 7, 1)
def max_min_losdeg_index(self):
""" Adriatic index: max-min losdeg index"""
return self.adriatic_index(0, 1, 7, 2)
def symmetric_division_lordeg_index(self):
""" Adriatic index: symmetric division lordeg index"""
return self.adriatic_index(0, 1, 8, 0.5)
def symmetric_division_lodeg_index(self):
""" Adriatic index: symmetric division lodeg index"""
return self.adriatic_index(0, 1, 8, 1)
def symmetric_division_losdeg_index(self):
""" Adriatic index: symmetric division losdeg index"""
return self.adriatic_index(0, 1, 8, 2)
def randic_type_indeg_index(self):
""" Adriatic index: Randic type indeg index"""
return self.adriatic_index(0, 2, 1, -1)
def randic_type_irdeg_index(self):
""" Adriatic index: Randic type irdeg index"""
return self.adriatic_index(0, 2, 1, -0.5)
def randic_type_rodeg_index(self):
""" Adriatic index: Randic type rodeg index"""
return self.adriatic_index(0, 2, 1, 0.5)
def randic_type_deg_index(self):
""" Adriatic index: Randic type deg index"""
return self.adriatic_index(0, 2, 1, 1)
def randic_type_sdeg_index(self):
""" Adriatic index: Randic type sdeg index"""
return self.adriatic_index(0, 2, 1, 2)
def sum_indeg_index(self):
""" Adriatic index: sum indeg index"""
return self.adriatic_index(0, 2, 2, -1)
def sum_irdeg_index(self):
""" Adriatic index: sum irdeg index"""
return self.adriatic_index(0, 2, 2, -0.5)
def sum_rodeg_index(self):
""" Adriatic index: sum rodeg index"""
return self.adriatic_index(0, 2, 2, 0.5)
def sum_deg_index(self):
""" Adriatic index: sum deg index"""
return self.adriatic_index(0, 2, 2, 1)
def sum_sdeg_index(self):
""" Adriatic index: sum sdeg index"""
return self.adriatic_index(0, 2, 2, 2)
def inverse_sum_indeg_index(self):
""" Adriatic index: inverse sum indeg index"""
return self.adriatic_index(0, 2, 3, -1)
def inverse_sum_irdeg_index(self):
""" Adriatic index: inverse sum irdeg index"""
return self.adriatic_index(0, 2, 3, -0.5)
def inverse_sum_rodeg_index(self):
""" Adriatic index: inverse sum rodeg index"""
return self.adriatic_index(0, 2, 3, 0.5)
def inverse_sum_deg_index(self):
""" Adriatic index: inverse sum deg index"""
return self.adriatic_index(0, 2, 3, 1)
def inverse_sum_sdeg_index(self):
""" Adriatic index: inverse sum sdeg index"""
return self.adriatic_index(0, 2, 3, 2)
def misbalance_indeg_index(self):
""" Adriatic index: misbalance indeg index"""
return self.adriatic_index(0, 2, 4, -1)
def misbalance_irdeg_index(self):
""" Adriatic index: misbalance irdeg index"""
return self.adriatic_index(0, 2, 4, -0.5)
def misbalance_rodeg_index(self):
""" Adriatic index: misbalance rodeg index"""
return self.adriatic_index(0, 2, 4, 0.5)
def misbalance_deg_index(self):
""" Adriatic index: misbalance deg index"""
return self.adriatic_index(0, 2, 4, 1)
def misbalance_sdeg_index(self):
""" Adriatic index: misbalance sdeg index"""
return self.adriatic_index(0, 2, 4, 2)
def inverse_misbalance_indeg_index(self):
""" Adriatic index: inverse misbalance indeg index"""
return self.adriatic_index(0, 2, 5, -1)
def inverse_misbalance_irdeg_index(self):
""" Adriatic index: inverse misbalance irdeg index"""
return self.adriatic_index(0, 2, 5, -0.5)
def inverse_misbalance_rodeg_index(self):
""" Adriatic index: inverse misbalance rodeg index"""
return self.adriatic_index(0, 2, 5, 0.5)
def inverse_misbalance_deg_index(self):
""" Adriatic index: inverse misbalance deg index"""
return self.adriatic_index(0, 2, 5, 1)
def inverse_misbalance_sdeg_index(self):
""" Adriatic index: inverse misbalance sdeg index"""
return self.adriatic_index(0, 2, 5, 2)
def min_max_rodeg_index(self):
""" Adriatic index: min-max rodeg index"""
return self.adriatic_index(0, 2, 6, 0.5)
def min_max_deg_index(self):
""" Adriatic index: min-max deg index"""
return self.adriatic_index(0, 2, 6, 1)
def min_max_sdeg_index(self):
""" Adriatic index: min-max sdeg index"""
return self.adriatic_index(0, 2, 6, 2)
def max_min_rodeg_index(self):
""" Adriatic index: max-min rodeg index"""
return self.adriatic_index(0, 2, 7, 0.5)
def max_min_deg_index(self):
""" Adriatic index: max-min deg index"""
return self.adriatic_index(0, 2, 7, 1)
def max_min_sdeg_index(self):
""" Adriatic index: max-min sdeg index"""
return self.adriatic_index(0, 2, 7, 2)
def symmetric_division_rodeg_index(self):
""" Adriatic index: symmetric division rodeg index"""
return self.adriatic_index(0, 2, 8, 0.5)
def symmetric_division_deg_index(self):
""" Adriatic index: symmetric division deg index"""
return self.adriatic_index(0, 2, 8, 1)
def symmetric_division_sdeg_index(self):
""" Adriatic index: symmetric division sdeg index"""
return self.adriatic_index(0, 2, 8, 2)
def randic_type_hadeg_index(self):
""" Adriatic index: Randic type hadeg index"""
return self.adriatic_index(0, 3, 1, 0.5)
def randic_type_twodeg_index(self):
""" Adriatic index: Randic type twodeg index"""
return self.adriatic_index(0, 3, 1, 2)
def sum_hadeg_index(self):
""" Adriatic index: sum hadeg index"""
return self.adriatic_index(0, 3, 2, 0.5)
def sum_twodeg_index(self):
""" Adriatic index: sum twodeg index"""
return self.adriatic_index(0, 3, 2, 2)
def inverse_sum_hadeg_index(self):
""" Adriatic index: inverse sum hadeg index"""
return self.adriatic_index(0, 3, 3, 0.5)
def inverse_sum_twodeg_index(self):
""" Adriatic index: inverse sum twodeg index"""
return self.adriatic_index(0, 3, 3, 2)
def misbalance_hadeg_index(self):
""" Adriatic index: misbalance hadeg index"""
return self.adriatic_index(0, 3, 4, 0.5)
def misbalance_twodeg_index(self):
""" Adriatic index: misbalance twodeg index"""
return self.adriatic_index(0, 3, 4, 2)
def inverse_misbalance_hadeg_index(self):
""" Adriatic index: inverse misbalance hadeg index"""
return self.adriatic_index(0, 3, 5, 0.5)
def inverse_misbalance_twodeg_index(self):
""" Adriatic index: inverse misbalance twodeg index"""
return self.adriatic_index(0, 3, 5, 2)
def min_max_hadeg_index(self):
""" Adriatic index: min-max hadeg index"""
return self.adriatic_index(0, 3, 6, 0.5)
def min_max_twodeg_index(self):
""" Adriatic index: min-max twodeg index"""
return self.adriatic_index(0, 3, 6, 2)
def max_min_hadeg_index(self):
""" Adriatic index: max-min hadeg index"""
return self.adriatic_index(0, 3, 7, 0.5)
def max_min_twodeg_index(self):
""" Adriatic index: max-min twodeg index"""
return self.adriatic_index(0, 3, 7, 2)
def symmetric_division_hadeg_index(self):
""" Adriatic index: symmetric division hadeg index"""
return self.adriatic_index(0, 3, 8, 0.5)
def symmetric_division_twodeg_index(self):
""" Adriatic index: symmetric division twodeg index"""
return self.adriatic_index(0, 3, 8, 2)
def randic_type_lordi_index(self):
""" Adriatic index: Randic type lordi index"""
return self.adriatic_index(1, 1, 1, 0.5)
def randic_type_lodi_index(self):
""" Adriatic index: Randic type lodi index"""
return self.adriatic_index(1, 1, 1, 1)
def randic_type_losdi_index(self):
""" Adriatic index: Randic type losdi index"""
return self.adriatic_index(1, 1, 1, 2)
def sum_lordi_index(self):
""" Adriatic index: sum lordi index"""
return self.adriatic_index(1, 1, 2, 0.5)
def sum_lodi_index(self):
""" Adriatic index: sum lodi index"""
return self.adriatic_index(1, 1, 2, 1)
def sum_losdi_index(self):
""" Adriatic index: sum losdi index"""
return self.adriatic_index(1, 1, 2, 2)
def inverse_sum_lordi_index(self):
""" Adriatic index: inverse sum lordi index"""
return self.adriatic_index(1, 1, 3, 0.5)
def inverse_sum_lodi_index(self):
""" Adriatic index: inverse sum lodi index"""
return self.adriatic_index(1, 1, 3, 1)
def inverse_sum_losdi_index(self):
""" Adriatic index: inverse sum losdi index"""
return self.adriatic_index(1, 1, 3, 2)
def misbalance_lordi_index(self):
""" Adriatic index: misbalance lordi index"""
return self.adriatic_index(1, 1, 4, 0.5)
def misbalance_lodi_index(self):
""" Adriatic index: misbalance lodi index"""
return self.adriatic_index(1, 1, 4, 1)
def misbalance_losdi_index(self):
""" Adriatic index: misbalance losdi index"""
return self.adriatic_index(1, 1, 4, 2)
def inverse_misbalance_lordi_index(self):
""" Adriatic index: inverse misbalance lordi index"""
return self.adriatic_index(1, 1, 5, 0.5)
def inverse_misbalance_lodi_index(self):
""" Adriatic index: inverse misbalance lodi index"""
return self.adriatic_index(1, 1, 5, 1)
def inverse_misbalance_losdi_index(self):
""" Adriatic index: inverse misbalance losdi index"""
return self.adriatic_index(1, 1, 5, 2)
def min_max_lordi_index(self):
""" Adriatic index: min-max lordi index"""
return self.adriatic_index(1, 1, 6, 0.5)
def min_max_lodi_index(self):
""" Adriatic index: min-max lodi index"""
return self.adriatic_index(1, 1, 6, 1)
def min_max_losdi_index(self):
""" Adriatic index: min-max losdi index"""
return self.adriatic_index(1, 1, 6, 2)
def max_min_lordi_index(self):
""" Adriatic index: max-min lordi index"""
return self.adriatic_index(1, 1, 7, 0.5)
def max_min_lodi_index(self):
""" Adriatic index: max-min lodi index"""
return self.adriatic_index(1, 1, 7, 1)
def max_min_losdi_index(self):
""" Adriatic index: max-min losdi index"""
return self.adriatic_index(1, 1, 7, 2)
def symmetric_division_lordi_index(self):
""" Adriatic index: symmetric division lordi index"""
return self.adriatic_index(1, 1, 8, 0.5)
def symmetric_division_lodi_index(self):
""" Adriatic index: symmetric division lodi index"""
return self.adriatic_index(1, 1, 8, 1)
def symmetric_division_losdi_index(self):
""" Adriatic index: symmetric division losdi index"""
return self.adriatic_index(1, 1, 8, 2)
def randic_type_indi_index(self):
""" Adriatic index: Randic type indi index"""
return self.adriatic_index(1, 2, 1, -1)
def randic_type_irdi_index(self):
""" Adriatic index: Randic type irdi index"""
return self.adriatic_index(1, 2, 1, -0.5)
def randic_type_rodi_index(self):
""" Adriatic index: Randic type rodi index"""
return self.adriatic_index(1, 2, 1, 0.5)
def randic_type_di_index(self):
""" Adriatic index: Randic type di index"""
return self.adriatic_index(1, 2, 1, 1)
def randic_type_sdi_index(self):
""" Adriatic index: Randic type sdi index"""
return self.adriatic_index(1, 2, 1, 2)
def sum_indi_index(self):
""" Adriatic index: sum indi index"""
return self.adriatic_index(1, 2, 2, -1)
def sum_irdi_index(self):
""" Adriatic index: sum irdi index"""
return self.adriatic_index(1, 2, 2, -0.5)
def sum_rodi_index(self):
""" Adriatic index: sum rodi index"""
return self.adriatic_index(1, 2, 2, 0.5)
def sum_di_index(self):
""" Adriatic index: sum di index"""
return self.adriatic_index(1, 2, 2, 1)
def sum_sdi_index(self):
""" Adriatic index: sum sdi index"""
return self.adriatic_index(1, 2, 2, 2)
def inverse_sum_indi_index(self):
""" Adriatic index: inverse sum indi index"""
return self.adriatic_index(1, 2, 3, -1)
def inverse_sum_irdi_index(self):
""" Adriatic index: inverse sum irdi index"""
return self.adriatic_index(1, 2, 3, -0.5)
def inverse_sum_rodi_index(self):
""" Adriatic index: inverse sum rodi index"""
return self.adriatic_index(1, 2, 3, 0.5)
def inverse_sum_di_index(self):
""" Adriatic index: inverse sum di index"""
return self.adriatic_index(1, 2, 3, 1)
def inverse_sum_sdi_index(self):
""" Adriatic index: inverse sum sdi index"""
return self.adriatic_index(1, 2, 3, 2)
def misbalance_indi_index(self):
""" Adriatic index: misbalance indi index"""
return self.adriatic_index(1, 2, 4, -1)
def misbalance_irdi_index(self):
""" Adriatic index: misbalance irdi index"""
return self.adriatic_index(1, 2, 4, -0.5)
def misbalance_rodi_index(self):
""" Adriatic index: misbalance rodi index"""
return self.adriatic_index(1, 2, 4, 0.5)
def misbalance_di_index(self):
""" Adriatic index: misbalance di index"""
return self.adriatic_index(1, 2, 4, 1)
def misbalance_sdi_index(self):
""" Adriatic index: misbalance sdi index"""
return self.adriatic_index(1, 2, 4, 2)
def inverse_misbalance_indi_index(self):
""" Adriatic index: inverse misbalance indi index"""
return self.adriatic_index(1, 2, 5, -1)
def inverse_misbalance_irdi_index(self):
""" Adriatic index: inverse misbalance irdi index"""
return self.adriatic_index(1, 2, 5, -0.5)
def inverse_misbalance_rodi_index(self):
""" Adriatic index: inverse misbalance rodi index"""
return self.adriatic_index(1, 2, 5, 0.5)
def inverse_misbalance_di_index(self):
""" Adriatic index: inverse misbalance di index"""
return self.adriatic_index(1, 2, 5, 1)
def inverse_misbalance_sdi_index(self):
""" Adriatic index: inverse misbalance sdi index"""
return self.adriatic_index(1, 2, 5, 2)
def min_max_rodi_index(self):
""" Adriatic index: min-max rodi index"""
return self.adriatic_index(1, 2, 6, 0.5)
def min_max_di_index(self):
""" Adriatic index: min-max di index"""
return self.adriatic_index(1, 2, 6, 1)
def min_max_sdi_index(self):
""" Adriatic index: min-max sdi index"""
return self.adriatic_index(1, 2, 6, 2)
def max_min_rodi_index(self):
""" Adriatic index: max-min rodi index"""
return self.adriatic_index(1, 2, 7, 0.5)
def max_min_di_index(self):
""" Adriatic index: max-min di index"""
return self.adriatic_index(1, 2, 7, 1)
def max_min_sdi_index(self):
""" Adriatic index: max-min sdi index"""
return self.adriatic_index(1, 2, 7, 2)
def symmetric_division_rodi_index(self):
""" Adriatic index: symmetric division rodi index"""
return self.adriatic_index(1, 2, 8, 0.5)
def symmetric_division_di_index(self):
""" Adriatic index: symmetric division di index"""
return self.adriatic_index(1, 2, 8, 1)
def symmetric_division_sdi_index(self):
""" Adriatic index: symmetric division sdi index"""
return self.adriatic_index(1, 2, 8, 2)
def randic_type_hadi_index(self):
""" Adriatic index: Randic type hadi index"""
return self.adriatic_index(1, 3, 1, 0.5)
def randic_type_twodi_index(self):
""" Adriatic index: Randic type twodi index"""
return self.adriatic_index(1, 3, 1, 2)
def sum_hadi_index(self):
""" Adriatic index: sum hadi index"""
return self.adriatic_index(1, 3, 2, 0.5)
def sum_twodi_index(self):
""" Adriatic index: sum twodi index"""
return self.adriatic_index(1, 3, 2, 2)
def inverse_sum_hadi_index(self):
""" Adriatic index: inverse sum hadi index"""
return self.adriatic_index(1, 3, 3, 0.5)
def inverse_sum_twodi_index(self):
""" Adriatic index: inverse sum twodi index"""
return self.adriatic_index(1, 3, 3, 2)
def misbalance_hadi_index(self):
""" Adriatic index: misbalance hadi index"""
return self.adriatic_index(1, 3, 4, 0.5)
def misbalance_twodi_index(self):
""" Adriatic index: misbalance twodi index"""
return self.adriatic_index(1, 3, 4, 2)
def inverse_misbalance_hadi_index(self):
""" Adriatic index: inverse misbalance hadi index"""
return self.adriatic_index(1, 3, 5, 0.5)
def inverse_misbalance_twodi_index(self):
""" Adriatic index: inverse misbalance twodi index"""
return self.adriatic_index(1, 3, 5, 2)
def min_max_hadi_index(self):
""" Adriatic index: min-max hadi index"""
return self.adriatic_index(1, 3, 6, 0.5)
def min_max_twodi_index(self):
""" Adriatic index: min-max twodi index"""
return self.adriatic_index(1, 3, 6, 2)
def max_min_hadi_index(self):
""" Adriatic index: max-min hadi index"""
return self.adriatic_index(1, 3, 7, 0.5)
def max_min_twodi_index(self):
""" Adriatic index: max-min twodi index"""
return self.adriatic_index(1, 3, 7, 2)
def symmetric_division_hadi_index(self):
""" Adriatic index: symmetric division hadi index"""
return self.adriatic_index(1, 3, 8, 0.5)
def symmetric_division_twodi_index(self):
""" Adriatic index: symmetric division twodi index"""
return self.adriatic_index(1, 3, 8, 2)
| 32.742424 | 121 | 0.549923 | import numpy as np
class Mol():
__g6_string = ''
__A = []
__B = []
__L = []
__NL = []
__Q = []
__D = []
__RD = []
__Order = 0
__Edges = []
__Sage_graph = None
__NX_graph = None
__Degrees = []
__Spectrum = []
__Laplacian_spectrum = []
__Distance_spectrum = []
__Norm_laplacian_spectrum = []
__Signless_laplacian_spectrum = []
__RD_spectrum = []
__Is_connected = None
__Check_connectedness = True
def _reset_(self):
self.__g6_string = ''
self.__A = []
self.__B = []
self.__L = []
self.__NL = []
self.__Q = []
self.__D = []
self.__RD = []
self.__Order = 0
self.__Edges = []
self.__Sage_graph = None
self.__NX_graph = None
self.__Degrees = []
self.__Spectrum = []
self.__Laplacian_spectrum = []
self.__Distance_spectrum = []
self.__Norm_laplacian_spectrum = []
self.__Signless_laplacian_spectrum = []
self.__RD_spectrum = []
self.__Is_connected = None
def _set_A(self, A):
self.__A = A
def _set_Edges(self, edges):
self.__Edges = edges
def _set_Order(self, order):
self.__Order = order
def __init__(self, string=None, check_connectedness=True):
self.__Check_connectedness = check_connectedness
if string != None:
if string[0] == '>':
if string.startswith('>>graph6<<'):
string = string[10:]
elif string.startswith('>>sparse6<<'):
string = string[11:]
if string[0] == ':':
self.read_s6(string)
else:
self.read_g6(string)
def __repr__(self):
if self.__A != None:
return 'Molecular graph on ' + str(
self.__Order) + ' vertices and ' + str(self.size()) + ' edges'
return 'Empty Molecular graph'
def __len__(self):
if self.__A != None: return len(self.__A)
else: return 0
def set_check_connectedness(self, c):
self.check_connectedness = c
def g6_string(self):
return self.__g6_string
graph6_string = g6_string
def order(self):
return self.__Order
n = order
def edges(self):
return self.__Edges
def size(self):
return len(self.__Edges)
m = size
def vertices(self):
return range(self.__Order)
def sage_graph(self):
if self.__Sage_graph is None: self._init_sage_graph_()
return self.__Sage_graph
def NX_graph(self):
if self.__NX_graph is None:
import networkx as nx
self.__NX_graph = nx.Graph(self.__Edges)
return self.__NX_graph
nx_graph = NX_graph
def _init_sage_graph_(self):
from sage.graphs.graph import Graph
self.__Sage_graph = Graph(self.__Edges)
def read_g6(self, s):
def graph_bit(pos, off):
return ((ord(s[off + 1 + pos / 6]) - 63) & (2**(5 - pos % 6))) != 0
if s.startswith('>>graph6<<'):
s = s[10:]
self._reset_()
n = ord(s[0]) - 63
off = 0
if n == 63:
if ord(s[1]) - 63 != 63:
n = ((ord(s[1]) - 63) << 12) + (
(ord(s[2]) - 63) << 6) + ord(s[3]) - 63
off = 3
else:
n = ((ord(s[2]) - 63) << 30) + ((ord(s[3]) - 63) << 24) + (
(ord(s[4]) - 63) << 18) + ((ord(s[5]) - 63) << 12) + (
(ord(s[6]) - 63) << 6) + ord(s[7]) - 63
off = 7
self.__Order = n
self.__A = [[0 for col in range(n)] for row in range(n)]
i = 0
j = 1
self.__Edges = []
for x in range(n * (n - 1) / 2):
if graph_bit(x, off):
self.__A[i][j] = 1
self.__A[j][i] = 1
self.__Edges.append((i, j))
if j - i == 1:
i = 0
j += 1
else:
i += 1
self.__g6_string = s
read_graph6 = read_g6
def read_s6(self, s):
def graph_bit(pos, off):
return ((ord(s[off + 1 + pos / 6]) - 63) & (2**(5 - pos % 6))) != 0
if s.startswith('>>sparse6<<'):
s = s[11:]
if not s[0] == ':':
print('This is not a sparse6 format!')
return False
self._reset_()
s = s[1:]
n = ord(s[0]) - 63
off = 0
if n == 63:
if ord(s[1]) - 63 != 63:
n = ((ord(s[1]) - 63) << 12) + (
(ord(s[2]) - 63) << 6) + ord(s[3]) - 63
off = 3
else:
n = ((ord(s[2]) - 63) << 30) + ((ord(s[3]) - 63) << 24) + (
(ord(s[4]) - 63) << 18) + ((ord(s[5]) - 63) << 12) + (
(ord(s[6]) - 63) << 6) + ord(s[7]) - 63
off = 7
self.__Order = n
k = 1
while 1 << k < n:
k += 1
data = s[off + 1:]
def parseData():
chunks = iter(data)
d = None
dLen = 0
while 1:
if dLen < 1:
d = ord(next(chunks)) - 63
dLen = 6
dLen -= 1
b = (d >> dLen) & 1
x = d & ((1 << dLen) - 1)
xLen = dLen
while xLen < k:
d = ord(next(chunks)) - 63
dLen = 6
x = (x << 6) + d
xLen += 6
x = (x >> (xLen - k))
dLen = xLen - k
yield b, x
self.__A = [[0 for col in range(n)] for row in range(n)]
self.__Edges = []
v = 0
for b, x in parseData():
if b: v += 1
if x >= n:
break
elif x > v:
v = x
else:
self.__A[x][v] = 1
self.__A[v][x] = 1
self.__Edges.append((x, v))
self.__g6_string = ''
read_sparse6 = read_s6
def read_matrix(self, matrix):
if type(matrix) == np.matrix:
matrix = matrix.astype(int).tolist()
self._reset_()
self.__Order = len(matrix)
self.__A = matrix
for i in range(self.__Order):
for j in range(i):
if matrix[i][j] == 1:
self.__Edges.append((i, j))
def read_edgelist(self, edges):
nodes = []
for e in edges:
if not e[0] in nodes: nodes.append(e[0])
if not e[1] in nodes: nodes.append(e[1])
self._reset_()
self.__Order = len(nodes)
d = dict(zip(nodes, range(len(nodes))))
self.__Edges = [(d[e[0]], d[e[1]]) for e in edges]
self.__A = [[0 for col in range(self.__Order)]
for row in range(self.__Order)]
for i, j in self.__Edges:
self.__A[i][j] = 1
self.__A[j][i] = 1
def write_dot_file(self, filename):
f_out = open(filename, 'w')
f_out.writelines('graph Mol {\n')
for (i, j) in self.edges():
f_out.writelines(' ' + str(i) + ' -- ' + str(j) + ';\n')
f_out.writelines('}')
f_out.close()
def adjacency_matrix(self):
return self.__A
A = adjacency_matrix
def incidence_matrix(self):
if self.__B == []:
def func(u, v):
col = [0] * self.__Order
col[u] = 1
col[v] = 1
return col
b = map(lambda e: func(e), self.edges())
self.__B = map(list, zip(*b))
return self.__B
B = incidence_matrix
def laplacian_matrix(self):
if self.__L == []:
self.__L = np.diag(self.degrees()) - np.matrix(self.__A)
return self.__L
L = laplacian_matrix
def signless_laplacian_matrix(self):
if self.__Q == []:
self.__Q = np.diag(self.degrees()) + np.matrix(self.__A)
return self.__Q
Q = signless_laplacian_matrix
def normalized_laplacian_matrix(self):
= np.diag(np.power(self.degrees(), -.5))
d2 = np.diag(np.power(self.degrees(), .5))
self.__NL = d1 * self.laplacian_matrix() * d2
return self.__NL
NL = normalized_laplacian_matrix
def distance_matrix(self):
if self.__Order == 0: return []
if self.__D == []:
A = np.matrix(self.__A, dtype=float)
n, m = A.shape
I = np.identity(n)
A[A == 0] = np.inf
A[I == 1] = 0
for i in range(n):
r = A[i, :]
A = np.minimum(A, r + r.T)
self.__D = np.matrix(A, dtype=int)
return self.__D
D = distance_matrix
def reciprocal_distance_matrix(self):
rd = np.matrix(self.distance_matrix(), dtype=float)
for i in range(self.__Order):
for j in range(self.__Order):
if not rd[i, j] == 0: rd[i, j] = 1 / rd[i, j]
return rd
def resistance_distance_matrix(self):
if not self.is_connected() or self.__Order == 0:
return False
if self.__RD == []:
n = self.__Order
s = n * self.laplacian_matrix() + 1
sn = n * np.linalg.inv(s)
RD = np.ndarray((n, n))
for i in range(n):
for j in range(n):
RD[i, j] = np.float64(
np.longdouble(sn[i, i]) + np.longdouble(sn[j, j]) -
2 * np.longdouble(sn[i, j]))
self.__RD = RD
return self.__RD
def seidel_matrix(self):
n = self.__Order
return np.ones((n, n)) - np.identity(n) - 2 * np.matrix(self.__A)
S = seidel_matrix
def diameter(self):
if self.__Order == 0: return 0
return self.distance_matrix().max()
def degrees(self):
if self.__Degrees == []:
self.__Degrees = map(lambda r: sum(r), self.__A)
deg = degrees
def eccentricity(self):
if self.__Order == 0: return None
return self.distance_matrix().max(axis=0).tolist()[0]
def distances_from_vertex(self, v):
seen = {}
level = 0
nextlevel = [v]
while nextlevel:
thislevel = nextlevel
nextlevel = []
for v in thislevel:
if v not in seen:
seen[v] = level
nb = [
i
for (i, j) in zip(range(len(self.__A[v])), self.__A[v])
if j != 0
]
nextlevel.extend(nb)
level = level + 1
return seen
def is_connected(self):
if self.__Order == 0: return False
if not self.__Check_connectedness: return True
if self.__Is_connected is None:
self.__Is_connected = len(
self.distances_from_vertex(0)) == self.order()
return self.__Is_connected
def spectrum(self, matrix="adjacency"):
from numpy import linalg as la
if type(matrix) is str:
if self.__Order == 0: return []
if matrix == "adjacency" or matrix == "A":
if self.__Spectrum == []:
s = la.eigvalsh(self.__A).tolist()
s.sort(reverse=True)
self.__Spectrum = s
return self.__Spectrum
elif matrix == "laplacian" or matrix == "L":
if self.__Laplacian_spectrum == []:
s = la.eigvalsh(self.laplacian_matrix()).tolist()
s.sort(reverse=True)
self.__Laplacian_spectrum = map(
lambda x: x if x > 0 else 0, s)
return self.__Laplacian_spectrum
elif matrix == "distance" or matrix == "D":
if self.__Distance_spectrum == []:
s = la.eigvalsh(self.distance_matrix()).tolist()
s.sort(reverse=True)
self.__Distance_spectrum = s
return self.__Distance_spectrum
elif matrix == "signless_laplacian" or matrix == "Q":
if self.__Signless_laplacian_spectrum == []:
ignless_laplacian_matrix()).tolist()
s.sort(reverse=True)
self.__Signless_laplacian_spectrum = map(
lambda x: x if x > 0 else 0, s)
return self.__Signless_laplacian_spectrum
elif matrix == "normalized_laplacian" or matrix == "NL":
if self.__Norm_laplacian_spectrum == []:
self.normalized_laplacian_matrix()).tolist()
s.sort(reverse=True)
self.__Norm_laplacian_spectrum = s
return self.__Norm_laplacian_spectrum
elif matrix == "resistance_distance" or matrix == "RD":
if self.__RD_spectrum == []:
s = la.eigvalsh(self.resistance_distance_matrix()).tolist()
s.sort(reverse=True)
self.__RD_spectrum = s
return self.__RD_spectrum
elif matrix == "reciprocal_distance":
s = la.eigvalsh(self.reciprocal_distance_matrix()).tolist()
s.sort(reverse=True)
return s
else:
return False
else:
s = la.eigvalsh(matrix).tolist()
s.sort(reverse=True)
return s
def spectral_moment(self, k, matrix="adjacency"):
return np.sum(np.power(self.spectrum(matrix), k))
def spectral_radius(self, matrix="adjacency"):
s = self.spectrum(matrix)
return max(abs(s[0]), abs(s[len(s) - 1]))
def energy(self, matrix="adjacency"):
if self.__Order == 0: return False
s = self.spectrum(matrix)
a = np.sum(s, dtype=np.longdouble) / len(s)
return np.float64(
np.sum(map(lambda x: abs(x - a), s), dtype=np.longdouble))
def incidence_energy(self):
if self.__Order == 0: return False
from numpy.linalg import svd
return np.float64(
np.sum(svd(self.incidence_matrix(), compute_uv=False),
dtype=np.longdouble))
def zagreb_m1_index(self):
return sum(map(lambda d: d**2, self.degrees()))
def zagreb_m2_index(self):
return sum(
map(lambda e1, e2: self.degrees()[e1] * self.degrees()[e2],
self.edges()))
def zagreb_m1_coindex(self):
return 2 * self.size() * (self.__Order - 1) - self.zagreb_m1_index()
def zagreb_m2_coindex(self):
return 2 * (self.size()**
2) - self.zagreb_m2_index() - self.zagreb_m1_index() * .5
def connectivity_index(self, power):
E = self.edges()
if len(E) == 0: return 0
return np.float64(
np.sum(map(
lambda e1, e2:
(self.degrees()[e1] * self.degrees()[e2])**power, E),
dtype=np.longdouble))
def augmented_zagreb_index(self):
E = self.edges()
d = self.degrees()
if len(E) < 2: return 0
return np.float64(
np.sum(map(
lambda e1, e2: (np.longdouble(d[e1] * d[e2]) /
(d[e1] + d[e2] - 2))**3, E),
dtype=np.longdouble))
def sum_connectivity_index(self):
E = self.edges()
if len(E) == 0: return 0
return np.float64(
np.sum(map(
lambda e1, e2:
(self.degrees()[e1] + self.degrees()[e2])**(-0.5), E),
dtype=np.longdouble))
def geometric_arithmetic_index(self):
E = self.edges()
if len(E) == 0: return 0
return np.float64(
np.sum(map(
lambda e1, e2: 2.0 * np.sqrt(self.degrees()[e1] * self.degrees(
)[e2]) / (self.degrees()[e1] + self.degrees()[e2]), E),
dtype=np.longdouble))
def eccentric_connectivity_index(self):
if not self.is_connected():
return False
return sum(map(lambda a, b: a * b, self.degrees(),
self.eccentricity()))
def randic_index(self):
return self.connectivity_index(-0.5)
def atom_bond_connectivity_index(self):
s = np.longdouble(0)
for u, v in self.edges():
d1 = np.float64(self.degrees()[u])
d2 = np.float64(self.degrees()[v])
s += np.longdouble(((d1 + d2 - 2) / (d1 * d2))**.5)
return np.float64(s)
def estrada_index(self, matrix="adjacency"):
return np.float64(
np.sum(map(lambda x: np.exp(x.real), self.spectrum(matrix)),
dtype=np.longdouble))
def distance_estrada_index(self):
return self.estrada_index('distance')
def degree_distance(self):
if not self.is_connected():
return False
dd = np.matrix(self.degrees()) * self.distance_matrix().sum(axis=1)
return dd[0, 0]
def reverse_degree_distance(self):
if not self.is_connected():
return False
return 2 * (self.order() - 1) * len(
self.edges()) * self.diameter() - self.degree_distance()
def molecular_topological_index(self):
if not self.is_connected():
return False
A = np.matrix(self.__A)
d = np.matrix(self.degrees())
return np.float64(
((A + self.distance_matrix()) * d.T).sum(dtype=np.longdouble))
def eccentric_distance_sum(self):
if not self.is_connected():
return False
return (self.eccentricity() * self.distance_matrix().sum(axis=1))[0, 0]
def balaban_j_index(self):
if not self.is_connected():
return False
ds = self.distance_matrix().sum(axis=1)
m = len(self.edges())
k = (m / (m - self.__Order + 2.0))
return np.float64(
k *
np.sum(map(lambda u, v: 1 / np.sqrt(
(ds[u][0, 0] * ds[v][0, 0])), self.edges()),
dtype=np.longdouble))
def sum_balaban_index(self):
if not self.is_connected():
return False
ds = self.distance_matrix().sum(axis=1)
m = len(self.edges())
k = (m / (m - self.__Order + 2.0))
return np.float64(
k *
np.sum(map(lambda u, v: 1 / np.sqrt(
(ds[u][0, 0] + ds[v][0, 0])), self.edges()),
dtype=np.longdouble))
def kirchhoff_index(self):
if not self.is_connected():
return False
return np.float64(
self.resistance_distance_matrix().sum(dtype=np.longdouble) / 2)
resistance = kirchhoff_index
def wiener_index(self):
if not self.is_connected():
return False
return self.distance_matrix().sum(dtype=np.float64) / 2
def terminal_wiener_index(self):
if not self.is_connected(): return False
s = 0
for u in range(self.order()):
if self.degrees()[u] != 1: continue
for v in range(u + 1, self.order()):
if self.degrees()[v] == 1:
s = s + self.distance_matrix()[u, v]
return s
def reverse_wiener_index(self):
if not self.is_connected():
return False
return self.diameter() * (
self.__Order * (self.__Order - 1)) / 2 - self.wiener_index()
def hyper_wiener_index(self):
if not self.is_connected():
return False
return (
np.power(self.distance_matrix(), 2).sum() +
self.distance_matrix().sum()) / 4
def harary_index(self):
if not self.is_connected():
return False
return np.float64(
self.reciprocal_distance_matrix().sum(dtype=np.longdouble)) / 2
def LEL(self):
return np.float64(
np.sum(map(lambda x: np.sqrt(x), self.spectrum('laplacian')),
dtype=np.longdouble))
def multiplicative_sum_zagreb_index(self):
d = self.degrees()
return np.float64(
np.sum(map(lambda u, v: np.log(np.float64(d[u] + d[v])),
self.edges()),
dtype=np.longdouble))
def multiplicative_p2_zagreb_index(self):
d = self.degrees()
return np.float64(
np.sum(map(lambda u, v: np.log(np.float64(d[u] * d[v])),
self.edges()),
dtype=np.longdouble))
def multiplicative_p1_zagreb_index(self):
d = self.degrees()
return np.float64(
np.sum(map(lambda v: np.log(np.float64(d[v]**2)), self.vertices()),
dtype=np.longdouble))
def szeged_index(self):
if not self.is_connected():
return False
s = 0
D = self.distance_matrix()
for u, v in self.edges():
diff = D[u, :] - D[v, :]
s += (diff > 0).sum() * (diff < 0).sum()
return float(s)
def revised_szeged_index(self):
if not self.is_connected():
return False
s = 0.0
D = self.distance_matrix()
for u, v in self.edges():
diff = D[u, :] - D[v, :]
o = (diff == 0).sum()
s += ((diff > 0).sum() + .5 * o) * ((diff < 0).sum() + .5 * o)
return s
def homo_lumo_index(self):
if not self.is_connected():
return False
n = self.order()
if n % 2 == 0:
h = int(n / 2 -
1)
l = int(h + 1)
return max([abs(self.spectrum()[h]), abs(self.spectrum()[l])])
h = int((n - 1) / 2)
return abs(self.spectrum()[h])
HL_index = homo_lumo_index
def all_adriatic(self):
r = []
for p in [0, 1]:
for i in [1, 2, 3]:
for j in range(1, 9):
if i == 3:
for a in [0.5, 2]:
r.append((p, i, j, a))
elif i == 2 and j in range(1, 6):
for a in [-1, -0.5, 0.5, 1, 2]:
r.append((p, i, j, a))
elif i == 2 or i == 1:
for a in [0.5, 1, 2]:
r.append((p, i, j, a))
return r
def adriatic_name(self, p, i, j, a):
name1 = {1:'Randic type ',\
2:'sum ',\
3:'inverse sum ', \
4:'misbalance ', \
5:'inverse misbalance ', \
6:'min-max ', \
7:'max-min ', \
8:'symmetric division '}
name2 = {(1, 0.5):'lor',\
(1,1):'lo', \
(1,2):'los', \
(2,-1):'in', \
(2, -0.5):'ir', \
(2, 0.5):'ro', \
(2,1):'', \
(2,2):'s', \
(3, 0.5):'ha', \
(3,2):'two'}
name3 = {0: 'deg', 1: 'di'}
return (name1[j] + name2[(i, a)] + name3[p])
def _adriatic_entry_(self, du, dv, i, j, a):
phi = {
1: lambda x, a: np.log(x)**a,
2: lambda x, a: x**a,
3: lambda x, a: a**x
}
gamma = {\
1: lambda x,y: x*y,\
2: lambda x,y: x+y,\
3: lambda x,y: 0 if x+y==0 else 1.0/(x+y),\
4: lambda x,y: abs(x-y),\
5: lambda x,y: 0 if x==y else 1.0/abs(x-y),\
6: lambda x,y: 0 if max(x,y)==0 else min(x,y)/max(x,y),\
7: lambda x,y: 0 if min(x,y)==0 else max(x,y)/min(x,y),\
8: lambda x,y: 0 if x==0 or y==0 else x/y+y/x}
return gamma[j](phi[i](du, a), phi[i](dv, a))
def adriatic_matrix(self, p, i, j, a):
if p == 0: d = self.degrees()
else: d = self.distance_matrix().sum(axis=0).tolist()[0]
AM = [[0] * self.order() for k in range(self.order())]
for u, v in self.edges():
AM[u][v] = AM[v][u] = self._adriatic_entry_(
np.float64(d[u]), np.float64(d[v]), i, j, a)
return AM
def adriatic_index(self, p, i, j, a):
if p == 0: d = self.degrees()
else: d = self.distance_matrix().sum(axis=0).tolist()[0]
func = lambda u, v: self._adriatic_entry_(np.float64(d[u]),
np.float64(d[v]), i, j, a)
return np.float64(np.sum(map(func, self.edges()), dtype=np.longdouble))
def randic_type_lordeg_index(self):
return self.adriatic_index(0, 1, 1, 0.5)
def randic_type_lodeg_index(self):
return self.adriatic_index(0, 1, 1, 1)
def randic_type_losdeg_index(self):
return self.adriatic_index(0, 1, 1, 2)
def sum_lordeg_index(self):
return self.adriatic_index(0, 1, 2, 0.5)
def sum_lodeg_index(self):
return self.adriatic_index(0, 1, 2, 1)
def sum_losdeg_index(self):
return self.adriatic_index(0, 1, 2, 2)
def inverse_sum_lordeg_index(self):
return self.adriatic_index(0, 1, 3, 0.5)
def inverse_sum_lodeg_index(self):
return self.adriatic_index(0, 1, 3, 1)
def inverse_sum_losdeg_index(self):
return self.adriatic_index(0, 1, 3, 2)
def misbalance_lordeg_index(self):
return self.adriatic_index(0, 1, 4, 0.5)
def misbalance_lodeg_index(self):
return self.adriatic_index(0, 1, 4, 1)
def misbalance_losdeg_index(self):
return self.adriatic_index(0, 1, 4, 2)
def inverse_misbalance_lordeg_index(self):
return self.adriatic_index(0, 1, 5, 0.5)
def inverse_misbalance_lodeg_index(self):
return self.adriatic_index(0, 1, 5, 1)
def inverse_misbalance_losdeg_index(self):
return self.adriatic_index(0, 1, 5, 2)
def min_max_lordeg_index(self):
return self.adriatic_index(0, 1, 6, 0.5)
def min_max_lodeg_index(self):
return self.adriatic_index(0, 1, 6, 1)
def min_max_losdeg_index(self):
return self.adriatic_index(0, 1, 6, 2)
def max_min_lordeg_index(self):
return self.adriatic_index(0, 1, 7, 0.5)
def max_min_lodeg_index(self):
return self.adriatic_index(0, 1, 7, 1)
def max_min_losdeg_index(self):
return self.adriatic_index(0, 1, 7, 2)
def symmetric_division_lordeg_index(self):
return self.adriatic_index(0, 1, 8, 0.5)
def symmetric_division_lodeg_index(self):
return self.adriatic_index(0, 1, 8, 1)
def symmetric_division_losdeg_index(self):
return self.adriatic_index(0, 1, 8, 2)
def randic_type_indeg_index(self):
return self.adriatic_index(0, 2, 1, -1)
def randic_type_irdeg_index(self):
return self.adriatic_index(0, 2, 1, -0.5)
def randic_type_rodeg_index(self):
return self.adriatic_index(0, 2, 1, 0.5)
def randic_type_deg_index(self):
return self.adriatic_index(0, 2, 1, 1)
def randic_type_sdeg_index(self):
return self.adriatic_index(0, 2, 1, 2)
def sum_indeg_index(self):
return self.adriatic_index(0, 2, 2, -1)
def sum_irdeg_index(self):
return self.adriatic_index(0, 2, 2, -0.5)
def sum_rodeg_index(self):
return self.adriatic_index(0, 2, 2, 0.5)
def sum_deg_index(self):
return self.adriatic_index(0, 2, 2, 1)
def sum_sdeg_index(self):
return self.adriatic_index(0, 2, 2, 2)
def inverse_sum_indeg_index(self):
return self.adriatic_index(0, 2, 3, -1)
def inverse_sum_irdeg_index(self):
return self.adriatic_index(0, 2, 3, -0.5)
def inverse_sum_rodeg_index(self):
return self.adriatic_index(0, 2, 3, 0.5)
def inverse_sum_deg_index(self):
return self.adriatic_index(0, 2, 3, 1)
def inverse_sum_sdeg_index(self):
return self.adriatic_index(0, 2, 3, 2)
def misbalance_indeg_index(self):
return self.adriatic_index(0, 2, 4, -1)
def misbalance_irdeg_index(self):
return self.adriatic_index(0, 2, 4, -0.5)
def misbalance_rodeg_index(self):
return self.adriatic_index(0, 2, 4, 0.5)
def misbalance_deg_index(self):
return self.adriatic_index(0, 2, 4, 1)
def misbalance_sdeg_index(self):
return self.adriatic_index(0, 2, 4, 2)
def inverse_misbalance_indeg_index(self):
return self.adriatic_index(0, 2, 5, -1)
def inverse_misbalance_irdeg_index(self):
return self.adriatic_index(0, 2, 5, -0.5)
def inverse_misbalance_rodeg_index(self):
return self.adriatic_index(0, 2, 5, 0.5)
def inverse_misbalance_deg_index(self):
return self.adriatic_index(0, 2, 5, 1)
def inverse_misbalance_sdeg_index(self):
return self.adriatic_index(0, 2, 5, 2)
def min_max_rodeg_index(self):
return self.adriatic_index(0, 2, 6, 0.5)
def min_max_deg_index(self):
return self.adriatic_index(0, 2, 6, 1)
def min_max_sdeg_index(self):
return self.adriatic_index(0, 2, 6, 2)
def max_min_rodeg_index(self):
return self.adriatic_index(0, 2, 7, 0.5)
def max_min_deg_index(self):
return self.adriatic_index(0, 2, 7, 1)
def max_min_sdeg_index(self):
return self.adriatic_index(0, 2, 7, 2)
def symmetric_division_rodeg_index(self):
return self.adriatic_index(0, 2, 8, 0.5)
def symmetric_division_deg_index(self):
return self.adriatic_index(0, 2, 8, 1)
def symmetric_division_sdeg_index(self):
return self.adriatic_index(0, 2, 8, 2)
def randic_type_hadeg_index(self):
return self.adriatic_index(0, 3, 1, 0.5)
def randic_type_twodeg_index(self):
return self.adriatic_index(0, 3, 1, 2)
def sum_hadeg_index(self):
return self.adriatic_index(0, 3, 2, 0.5)
def sum_twodeg_index(self):
return self.adriatic_index(0, 3, 2, 2)
def inverse_sum_hadeg_index(self):
return self.adriatic_index(0, 3, 3, 0.5)
def inverse_sum_twodeg_index(self):
return self.adriatic_index(0, 3, 3, 2)
def misbalance_hadeg_index(self):
return self.adriatic_index(0, 3, 4, 0.5)
def misbalance_twodeg_index(self):
return self.adriatic_index(0, 3, 4, 2)
def inverse_misbalance_hadeg_index(self):
return self.adriatic_index(0, 3, 5, 0.5)
def inverse_misbalance_twodeg_index(self):
return self.adriatic_index(0, 3, 5, 2)
def min_max_hadeg_index(self):
return self.adriatic_index(0, 3, 6, 0.5)
def min_max_twodeg_index(self):
return self.adriatic_index(0, 3, 6, 2)
def max_min_hadeg_index(self):
return self.adriatic_index(0, 3, 7, 0.5)
def max_min_twodeg_index(self):
return self.adriatic_index(0, 3, 7, 2)
def symmetric_division_hadeg_index(self):
return self.adriatic_index(0, 3, 8, 0.5)
def symmetric_division_twodeg_index(self):
return self.adriatic_index(0, 3, 8, 2)
def randic_type_lordi_index(self):
return self.adriatic_index(1, 1, 1, 0.5)
def randic_type_lodi_index(self):
return self.adriatic_index(1, 1, 1, 1)
def randic_type_losdi_index(self):
return self.adriatic_index(1, 1, 1, 2)
def sum_lordi_index(self):
return self.adriatic_index(1, 1, 2, 0.5)
def sum_lodi_index(self):
return self.adriatic_index(1, 1, 2, 1)
def sum_losdi_index(self):
return self.adriatic_index(1, 1, 2, 2)
def inverse_sum_lordi_index(self):
return self.adriatic_index(1, 1, 3, 0.5)
def inverse_sum_lodi_index(self):
return self.adriatic_index(1, 1, 3, 1)
def inverse_sum_losdi_index(self):
return self.adriatic_index(1, 1, 3, 2)
def misbalance_lordi_index(self):
return self.adriatic_index(1, 1, 4, 0.5)
def misbalance_lodi_index(self):
return self.adriatic_index(1, 1, 4, 1)
def misbalance_losdi_index(self):
return self.adriatic_index(1, 1, 4, 2)
def inverse_misbalance_lordi_index(self):
return self.adriatic_index(1, 1, 5, 0.5)
def inverse_misbalance_lodi_index(self):
return self.adriatic_index(1, 1, 5, 1)
def inverse_misbalance_losdi_index(self):
return self.adriatic_index(1, 1, 5, 2)
def min_max_lordi_index(self):
return self.adriatic_index(1, 1, 6, 0.5)
def min_max_lodi_index(self):
return self.adriatic_index(1, 1, 6, 1)
def min_max_losdi_index(self):
return self.adriatic_index(1, 1, 6, 2)
def max_min_lordi_index(self):
return self.adriatic_index(1, 1, 7, 0.5)
def max_min_lodi_index(self):
return self.adriatic_index(1, 1, 7, 1)
def max_min_losdi_index(self):
return self.adriatic_index(1, 1, 7, 2)
def symmetric_division_lordi_index(self):
return self.adriatic_index(1, 1, 8, 0.5)
def symmetric_division_lodi_index(self):
return self.adriatic_index(1, 1, 8, 1)
def symmetric_division_losdi_index(self):
return self.adriatic_index(1, 1, 8, 2)
def randic_type_indi_index(self):
return self.adriatic_index(1, 2, 1, -1)
def randic_type_irdi_index(self):
return self.adriatic_index(1, 2, 1, -0.5)
def randic_type_rodi_index(self):
return self.adriatic_index(1, 2, 1, 0.5)
def randic_type_di_index(self):
return self.adriatic_index(1, 2, 1, 1)
def randic_type_sdi_index(self):
return self.adriatic_index(1, 2, 1, 2)
def sum_indi_index(self):
return self.adriatic_index(1, 2, 2, -1)
def sum_irdi_index(self):
return self.adriatic_index(1, 2, 2, -0.5)
def sum_rodi_index(self):
return self.adriatic_index(1, 2, 2, 0.5)
def sum_di_index(self):
return self.adriatic_index(1, 2, 2, 1)
def sum_sdi_index(self):
return self.adriatic_index(1, 2, 2, 2)
def inverse_sum_indi_index(self):
return self.adriatic_index(1, 2, 3, -1)
def inverse_sum_irdi_index(self):
return self.adriatic_index(1, 2, 3, -0.5)
def inverse_sum_rodi_index(self):
return self.adriatic_index(1, 2, 3, 0.5)
def inverse_sum_di_index(self):
return self.adriatic_index(1, 2, 3, 1)
def inverse_sum_sdi_index(self):
return self.adriatic_index(1, 2, 3, 2)
def misbalance_indi_index(self):
return self.adriatic_index(1, 2, 4, -1)
def misbalance_irdi_index(self):
return self.adriatic_index(1, 2, 4, -0.5)
def misbalance_rodi_index(self):
return self.adriatic_index(1, 2, 4, 0.5)
def misbalance_di_index(self):
return self.adriatic_index(1, 2, 4, 1)
def misbalance_sdi_index(self):
return self.adriatic_index(1, 2, 4, 2)
def inverse_misbalance_indi_index(self):
return self.adriatic_index(1, 2, 5, -1)
def inverse_misbalance_irdi_index(self):
return self.adriatic_index(1, 2, 5, -0.5)
def inverse_misbalance_rodi_index(self):
return self.adriatic_index(1, 2, 5, 0.5)
def inverse_misbalance_di_index(self):
return self.adriatic_index(1, 2, 5, 1)
def inverse_misbalance_sdi_index(self):
return self.adriatic_index(1, 2, 5, 2)
def min_max_rodi_index(self):
return self.adriatic_index(1, 2, 6, 0.5)
def min_max_di_index(self):
return self.adriatic_index(1, 2, 6, 1)
def min_max_sdi_index(self):
return self.adriatic_index(1, 2, 6, 2)
def max_min_rodi_index(self):
return self.adriatic_index(1, 2, 7, 0.5)
def max_min_di_index(self):
return self.adriatic_index(1, 2, 7, 1)
def max_min_sdi_index(self):
return self.adriatic_index(1, 2, 7, 2)
def symmetric_division_rodi_index(self):
return self.adriatic_index(1, 2, 8, 0.5)
def symmetric_division_di_index(self):
return self.adriatic_index(1, 2, 8, 1)
def symmetric_division_sdi_index(self):
return self.adriatic_index(1, 2, 8, 2)
def randic_type_hadi_index(self):
return self.adriatic_index(1, 3, 1, 0.5)
def randic_type_twodi_index(self):
return self.adriatic_index(1, 3, 1, 2)
def sum_hadi_index(self):
return self.adriatic_index(1, 3, 2, 0.5)
def sum_twodi_index(self):
return self.adriatic_index(1, 3, 2, 2)
def inverse_sum_hadi_index(self):
return self.adriatic_index(1, 3, 3, 0.5)
def inverse_sum_twodi_index(self):
return self.adriatic_index(1, 3, 3, 2)
def misbalance_hadi_index(self):
return self.adriatic_index(1, 3, 4, 0.5)
def misbalance_twodi_index(self):
return self.adriatic_index(1, 3, 4, 2)
def inverse_misbalance_hadi_index(self):
return self.adriatic_index(1, 3, 5, 0.5)
def inverse_misbalance_twodi_index(self):
return self.adriatic_index(1, 3, 5, 2)
def min_max_hadi_index(self):
return self.adriatic_index(1, 3, 6, 0.5)
def min_max_twodi_index(self):
return self.adriatic_index(1, 3, 6, 2)
def max_min_hadi_index(self):
return self.adriatic_index(1, 3, 7, 0.5)
def max_min_twodi_index(self):
return self.adriatic_index(1, 3, 7, 2)
def symmetric_division_hadi_index(self):
return self.adriatic_index(1, 3, 8, 0.5)
def symmetric_division_twodi_index(self):
return self.adriatic_index(1, 3, 8, 2)
| true | true |
f71e212b8f5aad5d329910a235290ceae995a466 | 677 | py | Python | lightbike/tests/test_map.py | ethancharles02/cse210-project | 280b67ae69e84a334b807232c208a4ca4d27c37b | [
"MIT"
] | null | null | null | lightbike/tests/test_map.py | ethancharles02/cse210-project | 280b67ae69e84a334b807232c208a4ca4d27c37b | [
"MIT"
] | null | null | null | lightbike/tests/test_map.py | ethancharles02/cse210-project | 280b67ae69e84a334b807232c208a4ca4d27c37b | [
"MIT"
] | null | null | null | import pytest
from data.map import Map
from data import constants
def test_set_get_map():
map = Map()
map.set_map(
[
[(0, 0), constants.DEFAULT_WALL, 0],
[(0, 1), constants.DEFAULT_WALL, 90],
[(0, 2), constants.DEFAULT_WALL, 180]
]
)
assert map.get_map() == [
[(0, 0), constants.DEFAULT_WALL, 0],
[(0, 1), constants.DEFAULT_WALL, 90],
[(0, 2), constants.DEFAULT_WALL, 180]
]
def test_set_get_mapxy():
map = Map()
map.set_mapx(20)
map.set_mapy(15)
assert map.get_mapx() == 20
assert map.get_mapy() == 15
# pytest.main(["-v", "--tb=no", "test_map.py"]) | 21.83871 | 49 | 0.549483 | import pytest
from data.map import Map
from data import constants
def test_set_get_map():
map = Map()
map.set_map(
[
[(0, 0), constants.DEFAULT_WALL, 0],
[(0, 1), constants.DEFAULT_WALL, 90],
[(0, 2), constants.DEFAULT_WALL, 180]
]
)
assert map.get_map() == [
[(0, 0), constants.DEFAULT_WALL, 0],
[(0, 1), constants.DEFAULT_WALL, 90],
[(0, 2), constants.DEFAULT_WALL, 180]
]
def test_set_get_mapxy():
map = Map()
map.set_mapx(20)
map.set_mapy(15)
assert map.get_mapx() == 20
assert map.get_mapy() == 15
| true | true |
f71e21ff4de761faf4767fffe958f0936712b461 | 5,271 | py | Python | synapse/python_dependencies.py | Benjamin-L/synapse | 35442efb758960d72927d8bd698be657eb0e3037 | [
"Apache-2.0"
] | 1 | 2019-03-20T12:41:08.000Z | 2019-03-20T12:41:08.000Z | synapse/python_dependencies.py | Benjamin-L/synapse | 35442efb758960d72927d8bd698be657eb0e3037 | [
"Apache-2.0"
] | null | null | null | synapse/python_dependencies.py | Benjamin-L/synapse | 35442efb758960d72927d8bd698be657eb0e3037 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from pkg_resources import DistributionNotFound, VersionConflict, get_distribution
logger = logging.getLogger(__name__)
# REQUIREMENTS is a simple list of requirement specifiers[1], and must be
# installed. It is passed to setup() as install_requires in setup.py.
#
# CONDITIONAL_REQUIREMENTS is the optional dependencies, represented as a dict
# of lists. The dict key is the optional dependency name and can be passed to
# pip when installing. The list is a series of requirement specifiers[1] to be
# installed when that optional dependency requirement is specified. It is passed
# to setup() as extras_require in setup.py
#
# [1] https://pip.pypa.io/en/stable/reference/pip_install/#requirement-specifiers.
REQUIREMENTS = [
"jsonschema>=2.5.1",
"frozendict>=1",
"unpaddedbase64>=1.1.0",
"canonicaljson>=1.1.3",
"signedjson>=1.0.0",
"pynacl>=1.2.1",
"service_identity>=16.0.0",
# our logcontext handling relies on the ability to cancel inlineCallbacks
# (https://twistedmatrix.com/trac/ticket/4632) which landed in Twisted 18.7.
"Twisted>=18.7.0",
"treq>=15.1",
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
"pyopenssl>=16.0.0",
"pyyaml>=3.11",
"pyasn1>=0.1.9",
"pyasn1-modules>=0.0.7",
"daemonize>=2.3.1",
"bcrypt>=3.1.0",
"pillow>=3.1.2",
"sortedcontainers>=1.4.4",
"psutil>=2.0.0",
"pymacaroons>=0.13.0",
"msgpack>=0.5.0",
"phonenumbers>=8.2.0",
"six>=1.10",
# prometheus_client 0.4.0 changed the format of counter metrics
# (cf https://github.com/matrix-org/synapse/issues/4001)
"prometheus_client>=0.0.18,<0.4.0",
# we use attr.s(slots), which arrived in 16.0.0
# Twisted 18.7.0 requires attrs>=17.4.0
"attrs>=17.4.0",
"netaddr>=0.7.18",
]
CONDITIONAL_REQUIREMENTS = {
"email.enable_notifs": ["Jinja2>=2.9", "bleach>=1.4.2"],
"matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"],
"postgres": ["psycopg2>=2.6"],
# ConsentResource uses select_autoescape, which arrived in jinja 2.9
"resources.consent": ["Jinja2>=2.9"],
# ACME support is required to provision TLS certificates from authorities
# that use the protocol, such as Let's Encrypt.
"acme": ["txacme>=0.9.2"],
"saml2": ["pysaml2>=4.5.0"],
"url_preview": ["lxml>=3.5.0"],
"test": ["mock>=2.0", "parameterized"],
"sentry": ["sentry-sdk>=0.7.2"],
}
def list_requirements():
deps = set(REQUIREMENTS)
for opt in CONDITIONAL_REQUIREMENTS.values():
deps = set(opt) | deps
return list(deps)
class DependencyException(Exception):
@property
def message(self):
return "\n".join([
"Missing Requirements: %s" % (", ".join(self.dependencies),),
"To install run:",
" pip install --upgrade --force %s" % (" ".join(self.dependencies),),
"",
])
@property
def dependencies(self):
for i in self.args[0]:
yield '"' + i + '"'
def check_requirements(for_feature=None, _get_distribution=get_distribution):
deps_needed = []
errors = []
if for_feature:
reqs = CONDITIONAL_REQUIREMENTS[for_feature]
else:
reqs = REQUIREMENTS
for dependency in reqs:
try:
_get_distribution(dependency)
except VersionConflict as e:
deps_needed.append(dependency)
errors.append(
"Needed %s, got %s==%s"
% (dependency, e.dist.project_name, e.dist.version)
)
except DistributionNotFound:
deps_needed.append(dependency)
errors.append("Needed %s but it was not installed" % (dependency,))
if not for_feature:
# Check the optional dependencies are up to date. We allow them to not be
# installed.
OPTS = sum(CONDITIONAL_REQUIREMENTS.values(), [])
for dependency in OPTS:
try:
_get_distribution(dependency)
except VersionConflict as e:
deps_needed.append(dependency)
errors.append(
"Needed optional %s, got %s==%s"
% (dependency, e.dist.project_name, e.dist.version)
)
except DistributionNotFound:
# If it's not found, we don't care
pass
if deps_needed:
for e in errors:
logging.error(e)
raise DependencyException(deps_needed)
if __name__ == "__main__":
import sys
sys.stdout.writelines(req + "\n" for req in list_requirements())
| 31.375 | 84 | 0.633276 |
import logging
from pkg_resources import DistributionNotFound, VersionConflict, get_distribution
logger = logging.getLogger(__name__)
jsonschema>=2.5.1",
"frozendict>=1",
"unpaddedbase64>=1.1.0",
"canonicaljson>=1.1.3",
"signedjson>=1.0.0",
"pynacl>=1.2.1",
"service_identity>=16.0.0",
"Twisted>=18.7.0",
"treq>=15.1",
"pyopenssl>=16.0.0",
"pyyaml>=3.11",
"pyasn1>=0.1.9",
"pyasn1-modules>=0.0.7",
"daemonize>=2.3.1",
"bcrypt>=3.1.0",
"pillow>=3.1.2",
"sortedcontainers>=1.4.4",
"psutil>=2.0.0",
"pymacaroons>=0.13.0",
"msgpack>=0.5.0",
"phonenumbers>=8.2.0",
"six>=1.10",
"prometheus_client>=0.0.18,<0.4.0",
"attrs>=17.4.0",
"netaddr>=0.7.18",
]
CONDITIONAL_REQUIREMENTS = {
"email.enable_notifs": ["Jinja2>=2.9", "bleach>=1.4.2"],
"matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"],
"postgres": ["psycopg2>=2.6"],
"resources.consent": ["Jinja2>=2.9"],
"acme": ["txacme>=0.9.2"],
"saml2": ["pysaml2>=4.5.0"],
"url_preview": ["lxml>=3.5.0"],
"test": ["mock>=2.0", "parameterized"],
"sentry": ["sentry-sdk>=0.7.2"],
}
def list_requirements():
deps = set(REQUIREMENTS)
for opt in CONDITIONAL_REQUIREMENTS.values():
deps = set(opt) | deps
return list(deps)
class DependencyException(Exception):
@property
def message(self):
return "\n".join([
"Missing Requirements: %s" % (", ".join(self.dependencies),),
"To install run:",
" pip install --upgrade --force %s" % (" ".join(self.dependencies),),
"",
])
@property
def dependencies(self):
for i in self.args[0]:
yield '"' + i + '"'
def check_requirements(for_feature=None, _get_distribution=get_distribution):
deps_needed = []
errors = []
if for_feature:
reqs = CONDITIONAL_REQUIREMENTS[for_feature]
else:
reqs = REQUIREMENTS
for dependency in reqs:
try:
_get_distribution(dependency)
except VersionConflict as e:
deps_needed.append(dependency)
errors.append(
"Needed %s, got %s==%s"
% (dependency, e.dist.project_name, e.dist.version)
)
except DistributionNotFound:
deps_needed.append(dependency)
errors.append("Needed %s but it was not installed" % (dependency,))
if not for_feature:
# Check the optional dependencies are up to date. We allow them to not be
# installed.
OPTS = sum(CONDITIONAL_REQUIREMENTS.values(), [])
for dependency in OPTS:
try:
_get_distribution(dependency)
except VersionConflict as e:
deps_needed.append(dependency)
errors.append(
"Needed optional %s, got %s==%s"
% (dependency, e.dist.project_name, e.dist.version)
)
except DistributionNotFound:
# If it's not found, we don't care
pass
if deps_needed:
for e in errors:
logging.error(e)
raise DependencyException(deps_needed)
if __name__ == "__main__":
import sys
sys.stdout.writelines(req + "\n" for req in list_requirements())
| true | true |
f71e22c1d8dcf03031ef0b71b819aee1ccccc455 | 1,325 | py | Python | covid_world_scraper/nga.py | biglocalnews/covid-world-scraper | 385f792b32d58dbf67a524c36e60d21f76e463ef | [
"0BSD"
] | null | null | null | covid_world_scraper/nga.py | biglocalnews/covid-world-scraper | 385f792b32d58dbf67a524c36e60d21f76e463ef | [
"0BSD"
] | 11 | 2020-07-14T02:16:32.000Z | 2022-01-31T18:06:49.000Z | covid_world_scraper/nga.py | biglocalnews/covid-world-scraper | 385f792b32d58dbf67a524c36e60d21f76e463ef | [
"0BSD"
] | null | null | null | """
Official page for Nigeria COVID figures:
https://covid19.ncdc.gov.ng/
"""
import logging
import os
import re
from bs4 import BeautifulSoup
import requests
from .country_scraper import CountryScraper
logger = logging.getLogger(__name__)
class Nga(CountryScraper):
def fetch(self):
url = 'https://covid19.ncdc.gov.ng/'
response = requests.get(url)
saved_file = self.save_to_raw_cache(response.text, 'html')
return saved_file
def extract(self, source_file):
scrape_date = self.runtimestamp
with open(source_file) as fh:
soup = BeautifulSoup(fh.read(), 'html.parser')
headers = [h.text for h in soup.table.thead.find_all('th')]
headers.extend(['date', 'scrape_date'])
data = []
tbody_rows = soup.table.tbody.find_all('tr')
for tr in tbody_rows:
cells = [
cell.text.strip().replace(',','')
for cell in tr.find_all('td')
]
cells.extend(['', scrape_date])
data.append(cells)
outfile = self.processed_filepath_from_raw(source_file, 'csv')
merged_data = [headers]
merged_data.extend(data)
self.write_csv(merged_data, outfile)
return outfile
| 25.980392 | 71 | 0.593962 | import logging
import os
import re
from bs4 import BeautifulSoup
import requests
from .country_scraper import CountryScraper
logger = logging.getLogger(__name__)
class Nga(CountryScraper):
def fetch(self):
url = 'https://covid19.ncdc.gov.ng/'
response = requests.get(url)
saved_file = self.save_to_raw_cache(response.text, 'html')
return saved_file
def extract(self, source_file):
scrape_date = self.runtimestamp
with open(source_file) as fh:
soup = BeautifulSoup(fh.read(), 'html.parser')
headers = [h.text for h in soup.table.thead.find_all('th')]
headers.extend(['date', 'scrape_date'])
data = []
tbody_rows = soup.table.tbody.find_all('tr')
for tr in tbody_rows:
cells = [
cell.text.strip().replace(',','')
for cell in tr.find_all('td')
]
cells.extend(['', scrape_date])
data.append(cells)
outfile = self.processed_filepath_from_raw(source_file, 'csv')
merged_data = [headers]
merged_data.extend(data)
self.write_csv(merged_data, outfile)
return outfile
| true | true |
f71e22c3be82a7358ce15874fced843315675c8b | 164 | py | Python | manti_by/apps/shortener/apps.py | manti-by/manti.by | 233882fc5e5758ff92f0b7940316f15e4d30af07 | [
"BSD-3-Clause"
] | 1 | 2021-12-11T11:34:04.000Z | 2021-12-11T11:34:04.000Z | manti_by/apps/shortener/apps.py | manti-by/manti.by | 233882fc5e5758ff92f0b7940316f15e4d30af07 | [
"BSD-3-Clause"
] | 11 | 2021-03-23T13:59:39.000Z | 2022-02-02T10:16:58.000Z | manti_by/apps/shortener/apps.py | manti-by/manti.by | 233882fc5e5758ff92f0b7940316f15e4d30af07 | [
"BSD-3-Clause"
] | null | null | null | from django.apps import AppConfig
class ShortenerConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "manti_by.apps.shortener"
| 23.428571 | 56 | 0.77439 | from django.apps import AppConfig
class ShortenerConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "manti_by.apps.shortener"
| true | true |
f71e2402789d811aef5d0e4eab5187008fe4414f | 103 | py | Python | realtimenet/feature_extractors/__init__.py | floriandotpy/20bn-realtimenet | 6cf6359606ccb3cb205fb65dd102402bc84255e2 | [
"MIT"
] | 2 | 2021-03-03T09:36:49.000Z | 2022-03-18T06:36:54.000Z | realtimenet/feature_extractors/__init__.py | mc261670164/20bn-realtimenet | 6d1e21c3ccd3ff7d15af2927a31f1012ae9853e9 | [
"MIT"
] | 1 | 2021-03-10T08:38:03.000Z | 2021-03-10T10:48:13.000Z | realtimenet/feature_extractors/__init__.py | floriandotpy/20bn-realtimenet | 6cf6359606ccb3cb205fb65dd102402bc84255e2 | [
"MIT"
] | 1 | 2022-01-26T02:45:18.000Z | 2022-01-26T02:45:18.000Z | from .mobilenet import StridedInflatedMobileNetV2
from .efficientnet import StridedInflatedEfficientNet | 51.5 | 53 | 0.912621 | from .mobilenet import StridedInflatedMobileNetV2
from .efficientnet import StridedInflatedEfficientNet | true | true |
f71e24272a78f0373f2f2c943c051e634aa62ac6 | 1,276 | py | Python | library_backend/settings/development.py | darth-dodo/library-backend | 93bd419269b2fedecedd68dd76c45f5060a46493 | [
"MIT"
] | null | null | null | library_backend/settings/development.py | darth-dodo/library-backend | 93bd419269b2fedecedd68dd76c45f5060a46493 | [
"MIT"
] | 3 | 2020-02-12T01:07:19.000Z | 2021-06-10T21:50:49.000Z | library_backend/settings/development.py | darth-dodo/library-backend | 93bd419269b2fedecedd68dd76c45f5060a46493 | [
"MIT"
] | null | null | null | from .base import *
DEBUG = get_env_variable('DEBUG_MODE')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': get_env_variable('DATABASE_NAME'),
'USER': get_env_variable('DATABASE_USER'),
'PASSWORD': get_env_variable('DATABASE_PASSWORD'),
'HOST': get_env_variable('DATABASE_HOST'),
'PORT': get_env_variable('DATABASE_PORT'), # Set to empty string for default.
}
}
# toggle sentry
# if config is None, sentry will never be triggered
# if DEBUG:
# RAVEN_CONFIG = dict()
#
RAVEN_CONFIG = dict()
# enable/disable qcount
if True:
MIDDLEWARE += [
'querycount.middleware.QueryCountMiddleware',
]
QUERYCOUNT = {
'THRESHOLDS': {
'MEDIUM': 50,
'HIGH': 200,
'MIN_TIME_TO_LOG': 0,
'MIN_QUERY_COUNT_TO_LOG': 0
},
'IGNORE_REQUEST_PATTERNS': [],
'IGNORE_SQL_PATTERNS': [],
'DISPLAY_DUPLICATES': 20,
}
# enable/disable django debug toolbar
if True:
INSTALLED_APPS += [
'debug_toolbar',
]
MIDDLEWARE += [
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
# django debug toolbar allowed internal ips
INTERNAL_IPS = ['127.0.0.1']
| 23.2 | 86 | 0.60815 | from .base import *
DEBUG = get_env_variable('DEBUG_MODE')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': get_env_variable('DATABASE_NAME'),
'USER': get_env_variable('DATABASE_USER'),
'PASSWORD': get_env_variable('DATABASE_PASSWORD'),
'HOST': get_env_variable('DATABASE_HOST'),
'PORT': get_env_variable('DATABASE_PORT'),
}
}
RAVEN_CONFIG = dict()
if True:
MIDDLEWARE += [
'querycount.middleware.QueryCountMiddleware',
]
QUERYCOUNT = {
'THRESHOLDS': {
'MEDIUM': 50,
'HIGH': 200,
'MIN_TIME_TO_LOG': 0,
'MIN_QUERY_COUNT_TO_LOG': 0
},
'IGNORE_REQUEST_PATTERNS': [],
'IGNORE_SQL_PATTERNS': [],
'DISPLAY_DUPLICATES': 20,
}
if True:
INSTALLED_APPS += [
'debug_toolbar',
]
MIDDLEWARE += [
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
INTERNAL_IPS = ['127.0.0.1']
| true | true |
f71e242ca8e030bb1802a445ec67be8186aabf3a | 709 | py | Python | loader/__init__.py | YaoChengTang/DecNet | b623ac8d0505ec68eb930ad7a21fe9d84dd07543 | [
"MIT"
] | 18 | 2021-04-16T02:24:06.000Z | 2021-12-13T10:55:27.000Z | loader/__init__.py | YaoChengTang/DecNet | b623ac8d0505ec68eb930ad7a21fe9d84dd07543 | [
"MIT"
] | 3 | 2021-04-29T09:05:53.000Z | 2021-09-06T08:35:55.000Z | loader/__init__.py | YaoChengTang/DecNet | b623ac8d0505ec68eb930ad7a21fe9d84dd07543 | [
"MIT"
] | 2 | 2021-08-07T08:00:34.000Z | 2021-09-13T06:14:52.000Z | import json
from loader.KITTI15Mask import KITTI15Mask
from loader.SceneflowMask import SceneflowMask
from loader.DrivingStereoMask import DrivingStereoMask
from loader.MiddleburyMask import MiddleburyMask
def get_loader(name):
"""get_loader
:param name:
"""
print(name.lower())
return {
'kitti15mask': KITTI15Mask,
'sceneflowmask': SceneflowMask,
'drivingstereomask': DrivingStereoMask,
'middleburymask': MiddleburyMask,
}[name.lower()]
def get_data_path(name, config_file='config.json'):
"""get_data_path
:param name:
:param config_file:
"""
data = json.load(open(config_file))
return data[name.lower()]['data_path']
| 23.633333 | 54 | 0.695346 | import json
from loader.KITTI15Mask import KITTI15Mask
from loader.SceneflowMask import SceneflowMask
from loader.DrivingStereoMask import DrivingStereoMask
from loader.MiddleburyMask import MiddleburyMask
def get_loader(name):
print(name.lower())
return {
'kitti15mask': KITTI15Mask,
'sceneflowmask': SceneflowMask,
'drivingstereomask': DrivingStereoMask,
'middleburymask': MiddleburyMask,
}[name.lower()]
def get_data_path(name, config_file='config.json'):
data = json.load(open(config_file))
return data[name.lower()]['data_path']
| true | true |
f71e24c9faaf4bedde9723ddbf104ab813acfc67 | 1,706 | py | Python | profiles_api/migrations/0001_initial.py | pantegra/profiles-rest-api | b9dc03273b50b0e04f11d74781f2659d82b4d271 | [
"MIT"
] | null | null | null | profiles_api/migrations/0001_initial.py | pantegra/profiles-rest-api | b9dc03273b50b0e04f11d74781f2659d82b4d271 | [
"MIT"
] | null | null | null | profiles_api/migrations/0001_initial.py | pantegra/profiles-rest-api | b9dc03273b50b0e04f11d74781f2659d82b4d271 | [
"MIT"
] | null | null | null | # Generated by Django 2.2 on 2021-03-23 12:07
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
| 50.176471 | 266 | 0.638921 |
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
| true | true |
f71e2582319771768abac221ac3650e185cfa10e | 3,198 | py | Python | patterns.py | teddy-dubal/WhatsApp-Analyzer | 1cae2f3be03843741e2b8f5e321ed7a307f3b1f1 | [
"MIT"
] | null | null | null | patterns.py | teddy-dubal/WhatsApp-Analyzer | 1cae2f3be03843741e2b8f5e321ed7a307f3b1f1 | [
"MIT"
] | null | null | null | patterns.py | teddy-dubal/WhatsApp-Analyzer | 1cae2f3be03843741e2b8f5e321ed7a307f3b1f1 | [
"MIT"
] | null | null | null | BAD_CHARS = [
u"\u202a",
u"\u200e",
u"\u202c",
u"\xa0",
]
IS_STARTING_LINE = r"""
(\[?) #Zero or one open square bracket '['
(((\d{1,2}) #1 to 2 digit date
(/|-) #'/' or '-' separator
(\d{1,2}) #1 to 2 digit month
(/|-) #'/' or '-' separator
(\d{2,4})) #2 to 4 digit of year
# ([à ]+)
(,?\s|[à ]+) #Zero or one comma ',' and ingle space
((\d{1,2}) #1 to 2 digit of hour
(:|\.) #Colon ':' or dot '.' separator
(\d{2}) #2 digit of minute
(\.|:)? #Zero or one of dot '.' or colon ':'
(\d{2})? #Zero or one of 2 digits of second
(\s?[apAP]\.?[mM]\.?)?)) #Zero or one of ('space', 'A' or 'P', and 'M'
(\]?\s-?\s?\s?)#Zero or one close square bracket ']', Zero or one (space and '-'), zero or one space
(.+) #One or more character of chat member phone number or contact name
"""
IS_CHAT = r"""
([^:]+)#Chat member
(:) #Colon separator
(.+) #One or more charachter of message content
"""
IS_DELETED_CHAT = [
r".*This message was deleted$",
r".*Pesan ini telah dihapus$"
]
IS_ATTACHMENT = [
r".*<Media omitted>$", # English version of android attachment
r".*<Media tidak disertakan>$", # Indonesia version of android attachment
r".*Archivo omitido*", # Spanish version of android attachment
r".*Pesan tidak didukung$", # Some device not recognize sticker attachment
# Indonesian version of android contact card,
r".+\.vcf \(file\sterlampir\)$",
# Indonesian version of android contact card,
r".+\.vcf \(file\sattached\)$",
r".*image omitted$",
r".*video omitted$",
r".*document omitted$",
r".*Contact card omitted$",
r".*audio omitted$",
r".*GIF omitted$",
r".*sticker omitted$",
r".*imagen omitida*",
r".*audio omitido*",
r".*GIF omitido*",
r".*sticker omitido*",
r".*video omitido*",
]
IS_URL = r"(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,6}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'\".,<>?«»“”‘’]))"
IS_EVENT = [
r"Les messages et les appels sont chiffrés de bout en bout. Aucun tiers, pas même WhatsApp, ne peut les lire ou les écouter. Appuyez pour en savoir plus.\.$", # EN
# Welcoming message
r"Messages to this group are now secured with end-to-end encryption\.$", # EN
# User created group
r".+\screated this group$", # EN
# User left group
r".+\sleft$", # EN
r".+\skeluar$", # ID
# User join group via inviation link
r".+\sjoined using this group's invite link$", # EN
r".+\stelah bergabung menggunakan tautan undangan grup ini$", # ID
# Admin adds member
r".+\sadded\s.+", # EN
r".+\smenambahkan\s.+", # ID
# Admin removes member
r".+\sremoved\s.+", # EN
# Member's security code changed
r".+'s security code changed\.$", # EN
# Member changes phone number
r".*changed their phone number to a new number. Tap to message or add the new number\.$" # EN
r".*telah mengganti nomor teleponnya ke nomor baru. Ketuk untuk mengirim pesan atau menambahkan nomor baru\.$", # ID
]
| 36.758621 | 194 | 0.551907 | BAD_CHARS = [
u"\u202a",
u"\u200e",
u"\u202c",
u"\xa0",
]
IS_STARTING_LINE = r"""
(\[?) #Zero or one open square bracket '['
(((\d{1,2}) #1 to 2 digit date
(/|-) #'/' or '-' separator
(\d{1,2}) #1 to 2 digit month
(/|-) #'/' or '-' separator
(\d{2,4})) #2 to 4 digit of year
# ([à ]+)
(,?\s|[à ]+) #Zero or one comma ',' and ingle space
((\d{1,2}) #1 to 2 digit of hour
(:|\.) #Colon ':' or dot '.' separator
(\d{2}) #2 digit of minute
(\.|:)? #Zero or one of dot '.' or colon ':'
(\d{2})? #Zero or one of 2 digits of second
(\s?[apAP]\.?[mM]\.?)?)) #Zero or one of ('space', 'A' or 'P', and 'M'
(\]?\s-?\s?\s?)#Zero or one close square bracket ']', Zero or one (space and '-'), zero or one space
(.+) #One or more character of chat member phone number or contact name
"""
IS_CHAT = r"""
([^:]+)#Chat member
(:) #Colon separator
(.+) #One or more charachter of message content
"""
IS_DELETED_CHAT = [
r".*This message was deleted$",
r".*Pesan ini telah dihapus$"
]
IS_ATTACHMENT = [
r".*<Media omitted>$",
r".*<Media tidak disertakan>$",
r".*Archivo omitido*",
r".*Pesan tidak didukung$",
r".+\.vcf \(file\sterlampir\)$",
r".+\.vcf \(file\sattached\)$",
r".*image omitted$",
r".*video omitted$",
r".*document omitted$",
r".*Contact card omitted$",
r".*audio omitted$",
r".*GIF omitted$",
r".*sticker omitted$",
r".*imagen omitida*",
r".*audio omitido*",
r".*GIF omitido*",
r".*sticker omitido*",
r".*video omitido*",
]
IS_URL = r"(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,6}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'\".,<>?«»“”‘’]))"
IS_EVENT = [
r"Les messages et les appels sont chiffrés de bout en bout. Aucun tiers, pas même WhatsApp, ne peut les lire ou les écouter. Appuyez pour en savoir plus.\.$", # EN
# Welcoming message
r"Messages to this group are now secured with end-to-end encryption\.$", # EN
# User created group
r".+\screated this group$", # EN
# User left group
r".+\sleft$", # EN
r".+\skeluar$", # ID
# User join group via inviation link
r".+\sjoined using this group's invite link$", # EN
r".+\stelah bergabung menggunakan tautan undangan grup ini$", # ID
# Admin adds member
r".+\sadded\s.+", # EN
r".+\smenambahkan\s.+", # ID
# Admin removes member
r".+\sremoved\s.+", # EN
# Member's security code changed
r".+'s security code changed\.$", # EN
# Member changes phone number
r".*changed their phone number to a new number. Tap to message or add the new number\.$" # EN
r".*telah mengganti nomor teleponnya ke nomor baru. Ketuk untuk mengirim pesan atau menambahkan nomor baru\.$", # ID
]
| true | true |
f71e272f3182e9115d5ba90240accd4ee5011de3 | 27,886 | py | Python | sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_generated/aio/operations/_indexers_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_generated/aio/operations/_indexers_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 2 | 2021-11-03T06:10:36.000Z | 2021-12-01T06:29:39.000Z | sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_generated/aio/operations/_indexers_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 1 | 2021-05-19T02:55:10.000Z | 2021-05-19T02:55:10.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class IndexersOperations:
"""IndexersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.search.documents.indexes.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def reset(
self,
indexer_name: str,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> None:
"""Resets the change tracking state associated with an indexer.
:param indexer_name: The name of the indexer to reset.
:type indexer_name: str
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.indexes.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.reset.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
reset.metadata = {'url': '/indexers(\'{indexerName}\')/search.reset'} # type: ignore
async def run(
self,
indexer_name: str,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> None:
"""Runs an indexer on-demand.
:param indexer_name: The name of the indexer to run.
:type indexer_name: str
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.indexes.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.run.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
run.metadata = {'url': '/indexers(\'{indexerName}\')/search.run'} # type: ignore
async def create_or_update(
self,
indexer_name: str,
indexer: "models.SearchIndexer",
if_match: Optional[str] = None,
if_none_match: Optional[str] = None,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> "models.SearchIndexer":
"""Creates a new indexer or updates an indexer if it already exists.
:param indexer_name: The name of the indexer to create or update.
:type indexer_name: str
:param indexer: The definition of the indexer to create or update.
:type indexer: ~azure.search.documents.indexes.models.SearchIndexer
:param if_match: Defines the If-Match condition. The operation will be performed only if the
ETag on the server matches this value.
:type if_match: str
:param if_none_match: Defines the If-None-Match condition. The operation will be performed only
if the ETag on the server does not match this value.
:type if_none_match: str
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.indexes.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SearchIndexer, or the result of cls(response)
:rtype: ~azure.search.documents.indexes.models.SearchIndexer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SearchIndexer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
prefer = "return=representation"
api_version = "2020-06-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
header_parameters['Prefer'] = self._serialize.header("prefer", prefer, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(indexer, 'SearchIndexer')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
if response.status_code == 200:
deserialized = self._deserialize('SearchIndexer', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('SearchIndexer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/indexers(\'{indexerName}\')'} # type: ignore
async def delete(
self,
indexer_name: str,
if_match: Optional[str] = None,
if_none_match: Optional[str] = None,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> None:
"""Deletes an indexer.
:param indexer_name: The name of the indexer to delete.
:type indexer_name: str
:param if_match: Defines the If-Match condition. The operation will be performed only if the
ETag on the server matches this value.
:type if_match: str
:param if_none_match: Defines the If-None-Match condition. The operation will be performed only
if the ETag on the server does not match this value.
:type if_none_match: str
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.indexes.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/indexers(\'{indexerName}\')'} # type: ignore
async def get(
self,
indexer_name: str,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> "models.SearchIndexer":
"""Retrieves an indexer definition.
:param indexer_name: The name of the indexer to retrieve.
:type indexer_name: str
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.indexes.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SearchIndexer, or the result of cls(response)
:rtype: ~azure.search.documents.indexes.models.SearchIndexer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SearchIndexer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SearchIndexer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/indexers(\'{indexerName}\')'} # type: ignore
async def list(
self,
select: Optional[str] = None,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> "models.ListIndexersResult":
"""Lists all indexers available for a search service.
:param select: Selects which top-level properties of the indexers to retrieve. Specified as a
comma-separated list of JSON property names, or '*' for all properties. The default is all
properties.
:type select: str
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.indexes.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListIndexersResult, or the result of cls(response)
:rtype: ~azure.search.documents.indexes.models.ListIndexersResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ListIndexersResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ListIndexersResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/indexers'} # type: ignore
async def create(
self,
indexer: "models.SearchIndexer",
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> "models.SearchIndexer":
"""Creates a new indexer.
:param indexer: The definition of the indexer to create.
:type indexer: ~azure.search.documents.indexes.models.SearchIndexer
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.indexes.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SearchIndexer, or the result of cls(response)
:rtype: ~azure.search.documents.indexes.models.SearchIndexer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SearchIndexer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(indexer, 'SearchIndexer')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SearchIndexer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/indexers'} # type: ignore
async def get_status(
self,
indexer_name: str,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> "models.SearchIndexerStatus":
"""Returns the current status and execution history of an indexer.
:param indexer_name: The name of the indexer for which to retrieve status.
:type indexer_name: str
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.indexes.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SearchIndexerStatus, or the result of cls(response)
:rtype: ~azure.search.documents.indexes.models.SearchIndexerStatus
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SearchIndexerStatus"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.get_status.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SearchIndexerStatus', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_status.metadata = {'url': '/indexers(\'{indexerName}\')/search.status'} # type: ignore
| 47.344652 | 138 | 0.664957 |
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class IndexersOperations:
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def reset(
self,
indexer_name: str,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> None:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
url = self.reset.metadata['url']
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
reset.metadata = {'url': '/indexers(\'{indexerName}\')/search.reset'}
async def run(
self,
indexer_name: str,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> None:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
url = self.run.metadata['url']
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
run.metadata = {'url': '/indexers(\'{indexerName}\')/search.run'}
async def create_or_update(
self,
indexer_name: str,
indexer: "models.SearchIndexer",
if_match: Optional[str] = None,
if_none_match: Optional[str] = None,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> "models.SearchIndexer":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
prefer = "return=representation"
api_version = "2020-06-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self.create_or_update.metadata['url']
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
header_parameters['Prefer'] = self._serialize.header("prefer", prefer, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(indexer, 'SearchIndexer')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
if response.status_code == 200:
deserialized = self._deserialize('SearchIndexer', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('SearchIndexer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/indexers(\'{indexerName}\')'}
async def delete(
self,
indexer_name: str,
if_match: Optional[str] = None,
if_none_match: Optional[str] = None,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> None:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
url = self.delete.metadata['url']
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/indexers(\'{indexerName}\')'}
async def get(
self,
indexer_name: str,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> "models.SearchIndexer":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SearchIndexer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/indexers(\'{indexerName}\')'}
async def list(
self,
select: Optional[str] = None,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> "models.ListIndexersResult":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
url = self.list.metadata['url']
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ListIndexersResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/indexers'}
async def create(
self,
indexer: "models.SearchIndexer",
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> "models.SearchIndexer":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self.create.metadata['url']
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(indexer, 'SearchIndexer')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SearchIndexer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/indexers'}
async def get_status(
self,
indexer_name: str,
request_options: Optional["models.RequestOptions"] = None,
**kwargs
) -> "models.SearchIndexerStatus":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
url = self.get_status.metadata['url']
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexerName': self._serialize.url("indexer_name", indexer_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SearchIndexerStatus', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_status.metadata = {'url': '/indexers(\'{indexerName}\')/search.status'}
| true | true |
f71e27f8193a9613f9f3d5d8bad3b7ec30aa51dc | 3,787 | py | Python | sentry_sdk/consts.py | bartolootrit/sentry-python | 4e346acbabb1fd5592663bf9acd580835236fcf0 | [
"BSD-2-Clause"
] | 1 | 2021-04-04T09:46:28.000Z | 2021-04-04T09:46:28.000Z | sentry_sdk/consts.py | maltalk/sentry-python | 4e346acbabb1fd5592663bf9acd580835236fcf0 | [
"BSD-2-Clause"
] | 40 | 2021-07-13T06:16:51.000Z | 2022-03-28T15:10:33.000Z | sentry_sdk/consts.py | maltalk/sentry-python | 4e346acbabb1fd5592663bf9acd580835236fcf0 | [
"BSD-2-Clause"
] | null | null | null | from sentry_sdk._types import MYPY
if MYPY:
import sentry_sdk
from typing import Optional
from typing import Callable
from typing import Union
from typing import List
from typing import Type
from typing import Dict
from typing import Any
from typing import Sequence
from typing_extensions import TypedDict
from sentry_sdk.integrations import Integration
from sentry_sdk._types import (
BreadcrumbProcessor,
Event,
EventProcessor,
TracesSampler,
)
# Experiments are feature flags to enable and disable certain unstable SDK
# functionality. Changing them from the defaults (`None`) in production
# code is highly discouraged. They are not subject to any stability
# guarantees such as the ones from semantic versioning.
Experiments = TypedDict(
"Experiments",
{
"max_spans": Optional[int],
"record_sql_params": Optional[bool],
"smart_transaction_trimming": Optional[bool],
"propagate_tracestate": Optional[bool],
},
total=False,
)
DEFAULT_QUEUE_SIZE = 100
DEFAULT_MAX_BREADCRUMBS = 100
# This type exists to trick mypy and PyCharm into thinking `init` and `Client`
# take these arguments (even though they take opaque **kwargs)
class ClientConstructor(object):
def __init__(
self,
dsn=None, # type: Optional[str]
with_locals=True, # type: bool
max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS, # type: int
release=None, # type: Optional[str]
environment=None, # type: Optional[str]
server_name=None, # type: Optional[str]
shutdown_timeout=2, # type: float
integrations=[], # type: Sequence[Integration] # noqa: B006
in_app_include=[], # type: List[str] # noqa: B006
in_app_exclude=[], # type: List[str] # noqa: B006
default_integrations=True, # type: bool
dist=None, # type: Optional[str]
transport=None, # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]]
transport_queue_size=DEFAULT_QUEUE_SIZE, # type: int
sample_rate=1.0, # type: float
send_default_pii=False, # type: bool
http_proxy=None, # type: Optional[str]
https_proxy=None, # type: Optional[str]
ignore_errors=[], # type: List[Union[type, str]] # noqa: B006
request_bodies="medium", # type: str
before_send=None, # type: Optional[EventProcessor]
before_breadcrumb=None, # type: Optional[BreadcrumbProcessor]
debug=False, # type: bool
attach_stacktrace=False, # type: bool
ca_certs=None, # type: Optional[str]
propagate_traces=True, # type: bool
traces_sample_rate=None, # type: Optional[float]
traces_sampler=None, # type: Optional[TracesSampler]
auto_enabling_integrations=True, # type: bool
auto_session_tracking=True, # type: bool
send_client_reports=True, # type: bool
_experiments={}, # type: Experiments # noqa: B006
):
# type: (...) -> None
pass
def _get_default_options():
# type: () -> Dict[str, Any]
import inspect
if hasattr(inspect, "getfullargspec"):
getargspec = inspect.getfullargspec
else:
getargspec = inspect.getargspec # type: ignore
a = getargspec(ClientConstructor.__init__)
defaults = a.defaults or ()
return dict(zip(a.args[-len(defaults) :], defaults))
DEFAULT_OPTIONS = _get_default_options()
del _get_default_options
VERSION = "1.5.2"
SDK_INFO = {
"name": "sentry.python",
"version": VERSION,
"packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
}
| 34.427273 | 143 | 0.651439 | from sentry_sdk._types import MYPY
if MYPY:
import sentry_sdk
from typing import Optional
from typing import Callable
from typing import Union
from typing import List
from typing import Type
from typing import Dict
from typing import Any
from typing import Sequence
from typing_extensions import TypedDict
from sentry_sdk.integrations import Integration
from sentry_sdk._types import (
BreadcrumbProcessor,
Event,
EventProcessor,
TracesSampler,
)
Experiments = TypedDict(
"Experiments",
{
"max_spans": Optional[int],
"record_sql_params": Optional[bool],
"smart_transaction_trimming": Optional[bool],
"propagate_tracestate": Optional[bool],
},
total=False,
)
DEFAULT_QUEUE_SIZE = 100
DEFAULT_MAX_BREADCRUMBS = 100
class ClientConstructor(object):
def __init__(
self,
dsn=None,
with_locals=True,
max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS,
release=None,
environment=None,
server_name=None,
shutdown_timeout=2,
integrations=[], app_include=[], app_exclude=[], ault_integrations=True,
dist=None,
transport=None,
transport_queue_size=DEFAULT_QUEUE_SIZE,
sample_rate=1.0,
send_default_pii=False,
http_proxy=None,
https_proxy=None,
ignore_errors=[], uest_bodies="medium",
before_send=None,
before_breadcrumb=None,
debug=False,
attach_stacktrace=False,
ca_certs=None,
propagate_traces=True,
traces_sample_rate=None,
traces_sampler=None,
auto_enabling_integrations=True,
auto_session_tracking=True,
send_client_reports=True,
_experiments={},
pass
def _get_default_options():
import inspect
if hasattr(inspect, "getfullargspec"):
getargspec = inspect.getfullargspec
else:
getargspec = inspect.getargspec
a = getargspec(ClientConstructor.__init__)
defaults = a.defaults or ()
return dict(zip(a.args[-len(defaults) :], defaults))
DEFAULT_OPTIONS = _get_default_options()
del _get_default_options
VERSION = "1.5.2"
SDK_INFO = {
"name": "sentry.python",
"version": VERSION,
"packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
}
| true | true |
f71e28052636c0f424f1a7e1fcc7329cd9859dbb | 2,007 | py | Python | accounts/api/serializers.py | astaqc/django-job-portal | 59e83fb0a99c947f426378d78c16b829604a747b | [
"MIT"
] | 348 | 2019-04-11T06:25:21.000Z | 2022-03-29T21:27:54.000Z | accounts/api/serializers.py | gibmax/django-job-portal | c85a6dd3dd958474980b2cac187305e08745b241 | [
"MIT"
] | 33 | 2019-04-18T18:51:18.000Z | 2022-03-12T00:53:02.000Z | accounts/api/serializers.py | gibmax/django-job-portal | c85a6dd3dd958474980b2cac187305e08745b241 | [
"MIT"
] | 145 | 2019-04-11T05:15:18.000Z | 2022-03-13T15:10:25.000Z | from rest_framework import serializers
from ..models import *
class UserSerializer(serializers.ModelSerializer):
def __init__(self, *args, **kwargs):
kwargs["partial"] = True
super(UserSerializer, self).__init__(*args, **kwargs)
class Meta:
model = User
# fields = "__all__"
exclude = (
"password",
"user_permissions",
"groups",
"is_staff",
"is_active",
"is_superuser",
"last_login",
)
class UserCreateSerializer(serializers.ModelSerializer):
password = serializers.CharField(write_only=True, required=True, style={"input_type": "password"})
password2 = serializers.CharField(style={"input_type": "password"}, write_only=True, label="Confirm password")
class Meta:
model = User
fields = [
"email",
"password",
"password2",
"gender",
"role",
]
extra_kwargs = {"password": {"write_only": True}}
def create(self, validated_data):
email = validated_data["email"]
password = validated_data["password"]
password2 = validated_data["password2"]
gender = validated_data["gender"]
role = validated_data["role"]
if email and User.objects.filter(email=email).exists():
raise serializers.ValidationError({"email": "Email addresses must be unique."})
if password != password2:
raise serializers.ValidationError({"password": "The two passwords differ."})
user = User(email=email, gender=gender, role=role)
user.set_password(password)
user.save()
return user
class SocialSerializer(serializers.Serializer):
"""
Serializer which accepts an OAuth2 access token and provider.
"""
provider = serializers.CharField(max_length=255, required=True)
access_token = serializers.CharField(max_length=4096, required=True, trim_whitespace=True)
| 31.857143 | 114 | 0.617339 | from rest_framework import serializers
from ..models import *
class UserSerializer(serializers.ModelSerializer):
def __init__(self, *args, **kwargs):
kwargs["partial"] = True
super(UserSerializer, self).__init__(*args, **kwargs)
class Meta:
model = User
exclude = (
"password",
"user_permissions",
"groups",
"is_staff",
"is_active",
"is_superuser",
"last_login",
)
class UserCreateSerializer(serializers.ModelSerializer):
password = serializers.CharField(write_only=True, required=True, style={"input_type": "password"})
password2 = serializers.CharField(style={"input_type": "password"}, write_only=True, label="Confirm password")
class Meta:
model = User
fields = [
"email",
"password",
"password2",
"gender",
"role",
]
extra_kwargs = {"password": {"write_only": True}}
def create(self, validated_data):
email = validated_data["email"]
password = validated_data["password"]
password2 = validated_data["password2"]
gender = validated_data["gender"]
role = validated_data["role"]
if email and User.objects.filter(email=email).exists():
raise serializers.ValidationError({"email": "Email addresses must be unique."})
if password != password2:
raise serializers.ValidationError({"password": "The two passwords differ."})
user = User(email=email, gender=gender, role=role)
user.set_password(password)
user.save()
return user
class SocialSerializer(serializers.Serializer):
provider = serializers.CharField(max_length=255, required=True)
access_token = serializers.CharField(max_length=4096, required=True, trim_whitespace=True)
| true | true |
f71e28b46cb7cd347ea1092293af4977215d2ea3 | 2,323 | py | Python | flask_app/app/controller.py | presthatdynamic/Fast-Food-Fast-v1 | 2b8331273cc41cf35cdda1262c1d1e062196bbc4 | [
"MIT"
] | 1 | 2018-09-17T14:56:01.000Z | 2018-09-17T14:56:01.000Z | flask_app/app/controller.py | presthatdynamic/Fast-Food-Fast-v1 | 2b8331273cc41cf35cdda1262c1d1e062196bbc4 | [
"MIT"
] | 1 | 2018-09-17T20:45:12.000Z | 2018-09-18T06:53:48.000Z | flask_app/app/controller.py | presthatdynamic/Fast-Food-Fast-v1 | 2b8331273cc41cf35cdda1262c1d1e062196bbc4 | [
"MIT"
] | null | null | null | """
This module holds functionality that connects the models to the views
"""
from flask import session
from app.models import db
from app import utilities
def process_form_data(dict_form_data, *args):
"""
After casting form data to dict, the values
become lists. Transform the lists to non-iterables
"""
new_dict = {}
try:
for key in dict_form_data.keys():
new_dict[key] = dict_form_data[key][0]
except AttributeError:
raise AttributeError('The input should be a dictionary')
# check for mandatory fields as directed by args
for arg in args:
try:
value = new_dict[arg]
if isinstance(value, str):
if len(value.strip()) == 0:
raise ValueError('%s should not be an empty string' % str(arg))
except KeyError:
raise ValueError('%s is an expected key' % str(arg))
return new_dict
def process_args_data(dict_args_data, *args):
"""
Raise ValueError if mandatory values are empty strings or
non-existent
"""
if utilities.check_type(dict_args_data, dict):
for arg in args:
try:
value = dict_args_data[arg]
if isinstance(value, str):
if len(value.strip()) == 0:
raise ValueError('%s should not be an empty string' % str(arg))
except KeyError:
raise ValueError('%s is an expected key' % str(arg))
return dict_args_data
def get_logged_in_user_key():
"""
This checks the session and gets the logged in user's key
"""
if 'user_key' in session.keys():
return session['user_key']
else:
return None
def remove_user_from_session():
"""
Removes the session variable user_key
from the session to logout the user
"""
if 'user_key' in session.keys():
session.pop('user_key')
session.modified = True
else:
raise KeyError('User does not exist in the session')
def add_user_to_session(user_key):
"""
Adds the session variable user_key for
logged in user
"""
user = db.get_user(user_key)
if user is None:
raise KeyError('User does not exist')
session['user_key'] = user_key
session.modified = True
| 29.0375 | 87 | 0.610418 | from flask import session
from app.models import db
from app import utilities
def process_form_data(dict_form_data, *args):
new_dict = {}
try:
for key in dict_form_data.keys():
new_dict[key] = dict_form_data[key][0]
except AttributeError:
raise AttributeError('The input should be a dictionary')
for arg in args:
try:
value = new_dict[arg]
if isinstance(value, str):
if len(value.strip()) == 0:
raise ValueError('%s should not be an empty string' % str(arg))
except KeyError:
raise ValueError('%s is an expected key' % str(arg))
return new_dict
def process_args_data(dict_args_data, *args):
if utilities.check_type(dict_args_data, dict):
for arg in args:
try:
value = dict_args_data[arg]
if isinstance(value, str):
if len(value.strip()) == 0:
raise ValueError('%s should not be an empty string' % str(arg))
except KeyError:
raise ValueError('%s is an expected key' % str(arg))
return dict_args_data
def get_logged_in_user_key():
if 'user_key' in session.keys():
return session['user_key']
else:
return None
def remove_user_from_session():
if 'user_key' in session.keys():
session.pop('user_key')
session.modified = True
else:
raise KeyError('User does not exist in the session')
def add_user_to_session(user_key):
user = db.get_user(user_key)
if user is None:
raise KeyError('User does not exist')
session['user_key'] = user_key
session.modified = True
| true | true |
f71e291050a727f1202cf508a3fdd2e01df1214a | 2,253 | py | Python | TeteRousse/Step1/Makegeo.py | ElmerCSC/ElmerIceCourses | 6ff1011f3a1311d84699a30da9f8fc56cb984a08 | [
"CC0-1.0"
] | null | null | null | TeteRousse/Step1/Makegeo.py | ElmerCSC/ElmerIceCourses | 6ff1011f3a1311d84699a30da9f8fc56cb984a08 | [
"CC0-1.0"
] | null | null | null | TeteRousse/Step1/Makegeo.py | ElmerCSC/ElmerIceCourses | 6ff1011f3a1311d84699a30da9f8fc56cb984a08 | [
"CC0-1.0"
] | 2 | 2022-01-30T16:26:25.000Z | 2022-03-26T14:04:56.000Z | # -*- coding: utf-8 -*-
# Create a geo (gmsh input file) file from a contour file
# the contour file contains the (x,y) coordinates of the ordered
# points defining the contour of the domain
#
import numpy as np
import matplotlib.pyplot as plt
# Test these options
# edge size of the elements
el_size = 18.0
# Spline or line
spline = True
Contour = np.loadtxt('./../Data/Contour_TR_glacier.dat')
x = Contour[:,0]
y = Contour[:,1]
if x[0]==x[-1] and y[0]==y[-1]:
print('Same first and last points in contour file')
Npt = len(x)-1
else:
Npt = len(x)
# Open the output file
geo = open('teterousse0.geo', 'w')
geo.write('// This a a geo file created using the python script Makegeo.py // \n')
geo.write('Mesh.Algorithm=5; \n')
geo.write('// To controle the element size, one can directly modify the lc value in the geo file // \n')
geo.write('lc = {0} ; \n'.format(el_size))
# write the points coordinates (x,y,0,lc)
np=0
for j in range(0,Npt):
np=np+1
geo.write('Point({0}) = '.format(np)+r'{'+' {0}, {1}, 0.0, lc'.format(x[j],y[j])+r'}'+'; \n')
# if spline
if spline:
geo.write('Spline(1) = {')
for j in range(0,Npt):
geo.write('{0},'.format(j+1))
geo.write('1}; \n')
geo.write('Line Loop(2) = {1}; \n')
geo.write('Plane Surface(3) = {2}; \n')
geo.write('Physical Line(4) = {1}; \n')
geo.write('Physical Surface(5) = {3}; \n')
# else it is lines, as a spline might not work in all case
else:
nl=0
for j in range(0,Npt-1):
nl=nl+1
geo.write('Line({0}) = '.format(nl)+r'{'+'{0},{1}'.format(j+1,j+2)+r'}'+'; \n')
geo.write('Line({0}) = '.format(nl+1)+r'{'+'{0},{1}'.format(j+2,1)+r'}'+'; \n')
geo.write('Compound Line({0}) = '.format(nl+2)+r'{')
for j in range(0,Npt-1):
geo.write('{0}, '.format(j+1))
geo.write('{0}'.format(j+2)+'}; \n')
geo.write('Line Loop({0}) = '.format(nl+3)+r'{'+'{0}'.format(nl+2)+r'};'+' \n')
geo.write('Plane Surface({0}) = '.format(nl+4)+r'{'+'{0}'.format(nl+3)+r'};'+' \n')
geo.write('Physical Line({0}) = '.format(nl+5)+r'{'+'{0}'.format(nl+2)+r'};'+' \n')
geo.write('Physical Surface({0}) = '.format(nl+6)+r'{'+'{0}'.format(nl+4)+r'};'+' \n')
geo.close()
| 31.732394 | 104 | 0.560142 |
import numpy as np
import matplotlib.pyplot as plt
el_size = 18.0
spline = True
Contour = np.loadtxt('./../Data/Contour_TR_glacier.dat')
x = Contour[:,0]
y = Contour[:,1]
if x[0]==x[-1] and y[0]==y[-1]:
print('Same first and last points in contour file')
Npt = len(x)-1
else:
Npt = len(x)
geo = open('teterousse0.geo', 'w')
geo.write('// This a a geo file created using the python script Makegeo.py // \n')
geo.write('Mesh.Algorithm=5; \n')
geo.write('// To controle the element size, one can directly modify the lc value in the geo file // \n')
geo.write('lc = {0} ; \n'.format(el_size))
np=0
for j in range(0,Npt):
np=np+1
geo.write('Point({0}) = '.format(np)+r'{'+' {0}, {1}, 0.0, lc'.format(x[j],y[j])+r'}'+'; \n')
if spline:
geo.write('Spline(1) = {')
for j in range(0,Npt):
geo.write('{0},'.format(j+1))
geo.write('1}; \n')
geo.write('Line Loop(2) = {1}; \n')
geo.write('Plane Surface(3) = {2}; \n')
geo.write('Physical Line(4) = {1}; \n')
geo.write('Physical Surface(5) = {3}; \n')
else:
nl=0
for j in range(0,Npt-1):
nl=nl+1
geo.write('Line({0}) = '.format(nl)+r'{'+'{0},{1}'.format(j+1,j+2)+r'}'+'; \n')
geo.write('Line({0}) = '.format(nl+1)+r'{'+'{0},{1}'.format(j+2,1)+r'}'+'; \n')
geo.write('Compound Line({0}) = '.format(nl+2)+r'{')
for j in range(0,Npt-1):
geo.write('{0}, '.format(j+1))
geo.write('{0}'.format(j+2)+'}; \n')
geo.write('Line Loop({0}) = '.format(nl+3)+r'{'+'{0}'.format(nl+2)+r'};'+' \n')
geo.write('Plane Surface({0}) = '.format(nl+4)+r'{'+'{0}'.format(nl+3)+r'};'+' \n')
geo.write('Physical Line({0}) = '.format(nl+5)+r'{'+'{0}'.format(nl+2)+r'};'+' \n')
geo.write('Physical Surface({0}) = '.format(nl+6)+r'{'+'{0}'.format(nl+4)+r'};'+' \n')
geo.close()
| true | true |
f71e29bc5af8517918504a9bb99932a80d7a5328 | 6,053 | py | Python | benchmarks/f3_wrong_hints_permutations/scaling_nonlinear_software/4-19_17.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/f3_wrong_hints_permutations/scaling_nonlinear_software/4-19_17.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/f3_wrong_hints_permutations/scaling_nonlinear_software/4-19_17.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z | from typing import FrozenSet, Tuple
import pysmt.typing as types
from pysmt.environment import Environment as PysmtEnv
from pysmt.fnode import FNode
from utils import symb_to_next
from hint import Hint, Location
def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode,
FNode]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
z = mgr.Symbol("z", types.INT)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
x_z = symb_to_next(mgr, z)
symbols = frozenset([pc, x, y, z])
n_locs = 5
int_bound = n_locs
pcs = []
x_pcs = []
ints = [mgr.Int(i) for i in range(int_bound)]
for l in range(n_locs):
n = ints[l]
pcs.append(mgr.Equals(pc, n))
x_pcs.append(mgr.Equals(x_pc, n))
m_1 = mgr.Int(-1)
pcend = mgr.Equals(pc, m_1)
x_pcend = mgr.Equals(x_pc, m_1)
# initial location.
init = pcs[0]
# control flow graph.
cfg = mgr.And(
# pc = -1 : -1,
mgr.Implies(pcend, x_pcend),
# pc = 0 & !(y >= 1) : -1,
mgr.Implies(mgr.And(pcs[0], mgr.Not(mgr.GE(y, ints[1]))), x_pcend),
# pc = 0 & y >= 1 : 1,
mgr.Implies(mgr.And(pcs[0], mgr.GE(y, ints[1])), x_pcs[1]),
# pc = 1 & !(z >= 1) : -1,
mgr.Implies(mgr.And(pcs[1], mgr.Not(mgr.GE(z, ints[1]))), x_pcend),
# pc = 1 & z >= 1 : 2,
mgr.Implies(mgr.And(pcs[1], mgr.GE(z, ints[1])), x_pcs[2]),
# pc = 2 & !(x >= 0) : -1,
mgr.Implies(mgr.And(pcs[2], mgr.Not(mgr.GE(x, ints[0]))), x_pcend),
# pc = 2 & x >= 0 : 3,
mgr.Implies(mgr.And(pcs[2], mgr.GE(x, ints[0])), x_pcs[3]),
# pc = 3 : 4,
mgr.Implies(pcs[3], x_pcs[4]),
# pc = 4 : 2,
mgr.Implies(pcs[4], x_pcs[2]))
# transition labels.
labels = mgr.And(
# (pc = -1 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcend, x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 0 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[0], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 0 & pc' = 1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[0], x_pcs[1]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 1 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[1], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 1 & pc' = 2) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[1], x_pcs[2]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 2 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[2], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 2 & pc' = 3) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[2], x_pcs[3]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 3 & pc' = 4) -> (x' = y*z - 1 & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[3], x_pcs[4]),
mgr.And(mgr.Equals(x_x, mgr.Minus(mgr.Times(y, z), ints[1])),
mgr.Equals(x_y, y), mgr.Equals(x_z, z))),
# (pc = 4 & pc' = 2) -> (x' = x & y' = y+1 & z' = z),
mgr.Implies(
mgr.And(pcs[4], x_pcs[2]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, mgr.Plus(y, ints[1])),
mgr.Equals(x_z, z))))
# transition relation.
trans = mgr.And(cfg, labels)
# fairness.
fairness = mgr.Not(pcend)
return symbols, init, trans, fairness
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
z = mgr.Symbol("z", types.INT)
symbs = frozenset([pc, x, y, z])
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
x_z = symb_to_next(mgr, z)
res = []
i_0 = mgr.Int(0)
i_1 = mgr.Int(1)
i_2 = mgr.Int(2)
i_3 = mgr.Int(3)
loc = Location(env, mgr.LE(z, i_0))
loc.set_progress(0, mgr.Equals(x_z, z))
h_z = Hint("h_z0", env, frozenset([z]), symbs)
h_z.set_locs([loc])
res.append(h_z)
loc0 = Location(env, mgr.GE(z, i_3), mgr.GE(y, i_0))
loc0.set_progress(1, mgr.Equals(x_z, y))
loc1 = Location(env, mgr.GE(z, i_0), mgr.GE(x, i_3))
loc1.set_progress(0, mgr.GE(x_z, mgr.Plus(z, x)))
h_z = Hint("h_z3", env, frozenset([z]), symbs)
h_z.set_locs([loc0, loc1])
res.append(h_z)
loc0 = Location(env, mgr.Equals(pc, i_0))
loc0.set_progress(1, mgr.Equals(x_pc, i_1))
loc1 = Location(env, mgr.Equals(pc, i_1))
loc1.set_progress(2, mgr.Equals(x_pc, i_2))
loc2 = Location(env, mgr.Equals(pc, i_2))
loc2.set_progress(0, mgr.Equals(x_pc, i_3))
loc3 = Location(env, mgr.Equals(pc, i_3))
loc3.set_progress(0, mgr.Equals(x_pc, i_0))
h_pc = Hint("h_pc1", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1, loc2, loc3])
res.append(h_pc)
loc0 = Location(env, mgr.GE(z, i_0))
loc0.set_progress(1, mgr.Equals(x_z, z))
loc1 = Location(env, mgr.GE(z, i_0))
loc1.set_progress(0, mgr.Equals(x_z, mgr.Plus(z, i_3)))
h_z = Hint("h_z4", env, frozenset([z]), symbs)
h_z.set_locs([loc0, loc1])
res.append(h_z)
return frozenset(res)
| 34.005618 | 78 | 0.508178 | from typing import FrozenSet, Tuple
import pysmt.typing as types
from pysmt.environment import Environment as PysmtEnv
from pysmt.fnode import FNode
from utils import symb_to_next
from hint import Hint, Location
def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode,
FNode]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
z = mgr.Symbol("z", types.INT)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
x_z = symb_to_next(mgr, z)
symbols = frozenset([pc, x, y, z])
n_locs = 5
int_bound = n_locs
pcs = []
x_pcs = []
ints = [mgr.Int(i) for i in range(int_bound)]
for l in range(n_locs):
n = ints[l]
pcs.append(mgr.Equals(pc, n))
x_pcs.append(mgr.Equals(x_pc, n))
m_1 = mgr.Int(-1)
pcend = mgr.Equals(pc, m_1)
x_pcend = mgr.Equals(x_pc, m_1)
init = pcs[0]
cfg = mgr.And(
mgr.Implies(pcend, x_pcend),
mgr.Implies(mgr.And(pcs[0], mgr.Not(mgr.GE(y, ints[1]))), x_pcend),
mgr.Implies(mgr.And(pcs[0], mgr.GE(y, ints[1])), x_pcs[1]),
mgr.Implies(mgr.And(pcs[1], mgr.Not(mgr.GE(z, ints[1]))), x_pcend),
mgr.Implies(mgr.And(pcs[1], mgr.GE(z, ints[1])), x_pcs[2]),
mgr.Implies(mgr.And(pcs[2], mgr.Not(mgr.GE(x, ints[0]))), x_pcend),
mgr.Implies(mgr.And(pcs[2], mgr.GE(x, ints[0])), x_pcs[3]),
mgr.Implies(pcs[3], x_pcs[4]),
mgr.Implies(pcs[4], x_pcs[2]))
labels = mgr.And(
mgr.Implies(
mgr.And(pcend, x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[0], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[0], x_pcs[1]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[1], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[1], x_pcs[2]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[2], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[2], x_pcs[3]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[3], x_pcs[4]),
mgr.And(mgr.Equals(x_x, mgr.Minus(mgr.Times(y, z), ints[1])),
mgr.Equals(x_y, y), mgr.Equals(x_z, z))),
mgr.Implies(
mgr.And(pcs[4], x_pcs[2]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, mgr.Plus(y, ints[1])),
mgr.Equals(x_z, z))))
trans = mgr.And(cfg, labels)
fairness = mgr.Not(pcend)
return symbols, init, trans, fairness
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
z = mgr.Symbol("z", types.INT)
symbs = frozenset([pc, x, y, z])
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
x_z = symb_to_next(mgr, z)
res = []
i_0 = mgr.Int(0)
i_1 = mgr.Int(1)
i_2 = mgr.Int(2)
i_3 = mgr.Int(3)
loc = Location(env, mgr.LE(z, i_0))
loc.set_progress(0, mgr.Equals(x_z, z))
h_z = Hint("h_z0", env, frozenset([z]), symbs)
h_z.set_locs([loc])
res.append(h_z)
loc0 = Location(env, mgr.GE(z, i_3), mgr.GE(y, i_0))
loc0.set_progress(1, mgr.Equals(x_z, y))
loc1 = Location(env, mgr.GE(z, i_0), mgr.GE(x, i_3))
loc1.set_progress(0, mgr.GE(x_z, mgr.Plus(z, x)))
h_z = Hint("h_z3", env, frozenset([z]), symbs)
h_z.set_locs([loc0, loc1])
res.append(h_z)
loc0 = Location(env, mgr.Equals(pc, i_0))
loc0.set_progress(1, mgr.Equals(x_pc, i_1))
loc1 = Location(env, mgr.Equals(pc, i_1))
loc1.set_progress(2, mgr.Equals(x_pc, i_2))
loc2 = Location(env, mgr.Equals(pc, i_2))
loc2.set_progress(0, mgr.Equals(x_pc, i_3))
loc3 = Location(env, mgr.Equals(pc, i_3))
loc3.set_progress(0, mgr.Equals(x_pc, i_0))
h_pc = Hint("h_pc1", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1, loc2, loc3])
res.append(h_pc)
loc0 = Location(env, mgr.GE(z, i_0))
loc0.set_progress(1, mgr.Equals(x_z, z))
loc1 = Location(env, mgr.GE(z, i_0))
loc1.set_progress(0, mgr.Equals(x_z, mgr.Plus(z, i_3)))
h_z = Hint("h_z4", env, frozenset([z]), symbs)
h_z.set_locs([loc0, loc1])
res.append(h_z)
return frozenset(res)
| true | true |
f71e2a0eadfa12605a87df1a68e086508ced255b | 1,813 | py | Python | people/models.py | morion4000/ophion | 016310ec0f702cb7f5d6b4e72e11817ef8bcdfae | [
"MIT"
] | null | null | null | people/models.py | morion4000/ophion | 016310ec0f702cb7f5d6b4e72e11817ef8bcdfae | [
"MIT"
] | 3 | 2020-02-12T03:26:52.000Z | 2021-06-10T22:29:27.000Z | people/models.py | morion4000/ophion | 016310ec0f702cb7f5d6b4e72e11817ef8bcdfae | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
from django.dispatch import receiver
from django.db.models.signals import post_save
from lists.models import List
class Person(models.Model):
user = models.OneToOneField(User)
bio = models.TextField()
dob = models.DateField()
location = models.CharField(max_length=100)
web = models.URLField()
avatar = models.URLField()
def custom_lists(self):
lists = List.objects.filter(user=self.user, type=List.CUSTOM)
return lists
def favorite_list(self):
favorite_list = List.objects.get(
user=self.user,
type=List.FAVORITE
)
return favorite_list
def seen_list(self):
seen_list = List.objects.get(
user=self.user,
type=List.SEEN
)
return seen_list
def must_see_list(self):
must_see_list = List.objects.get(
user=self.user,
type=List.MUST_SEE
)
return must_see_list
@receiver(post_save, sender=User)
def create_standard_lists(sender, instance, created, **kwargs):
if created:
favorite_list = List.objects.create(
user=instance,
name='Favorite',
description='A list with movies of the favorite movies of the user.',
type=List.FAVORITE
)
seen_list = List.objects.create(
user=instance,
name='Seen',
description='A list with movies of the seen movies of the user.',
type=List.SEEN
)
must_see_list = List.objects.create(
user=instance,
name='Must See',
description='A list with movies of the must-see movies of the user.',
type=List.MUST_SEE
)
| 25.9 | 81 | 0.607281 | from django.db import models
from django.contrib.auth.models import User
from django.dispatch import receiver
from django.db.models.signals import post_save
from lists.models import List
class Person(models.Model):
user = models.OneToOneField(User)
bio = models.TextField()
dob = models.DateField()
location = models.CharField(max_length=100)
web = models.URLField()
avatar = models.URLField()
def custom_lists(self):
lists = List.objects.filter(user=self.user, type=List.CUSTOM)
return lists
def favorite_list(self):
favorite_list = List.objects.get(
user=self.user,
type=List.FAVORITE
)
return favorite_list
def seen_list(self):
seen_list = List.objects.get(
user=self.user,
type=List.SEEN
)
return seen_list
def must_see_list(self):
must_see_list = List.objects.get(
user=self.user,
type=List.MUST_SEE
)
return must_see_list
@receiver(post_save, sender=User)
def create_standard_lists(sender, instance, created, **kwargs):
if created:
favorite_list = List.objects.create(
user=instance,
name='Favorite',
description='A list with movies of the favorite movies of the user.',
type=List.FAVORITE
)
seen_list = List.objects.create(
user=instance,
name='Seen',
description='A list with movies of the seen movies of the user.',
type=List.SEEN
)
must_see_list = List.objects.create(
user=instance,
name='Must See',
description='A list with movies of the must-see movies of the user.',
type=List.MUST_SEE
)
| true | true |
f71e2b1b304f0ba0229a525aef1d124b4077c13c | 8,985 | py | Python | demo.py | SpikeKing/UGATIT-new | 4efbdc4e6d8046f5dd452616975d88c01fcbab72 | [
"MIT"
] | 1 | 2020-08-06T02:19:13.000Z | 2020-08-06T02:19:13.000Z | demo.py | SpikeKing/UGATIT-new | 4efbdc4e6d8046f5dd452616975d88c01fcbab72 | [
"MIT"
] | null | null | null | demo.py | SpikeKing/UGATIT-new | 4efbdc4e6d8046f5dd452616975d88c01fcbab72 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -- coding: utf-8 --
"""
Copyright (c) 2019. All rights reserved.
Created by C. L. Wang on 2020/1/2
"""
import math
from UGATIT import UGATIT
from main import parse_args
from root_dir import DATA_DIR
from utils.project_utils import traverse_dir_files, mkdir_if_not_exist
from utils.ugatit_utils import *
class ImgPredictor(object):
"""
图像预测类
"""
def __init__(self, gan_type='lsgan', adv_weight=1, cycle_weight=10, identity_weight=10, cam_weight=1000):
self.gan_type = gan_type
self.adv_weight = adv_weight
self.cycle_weight = cycle_weight
self.identity_weight = identity_weight
self.cam_weight = cam_weight
self.gan, self.sess = self.init_model()
def init_model(self):
args = parse_args()
if args is None:
exit()
args.phase = 'test'
# args.dataset = 's2a4zhengsheng'
args.dataset = 'selfie2anime'
# args.dataset = 's2azsV1s2a'
args.img_size = 256
# n_epoch = "1020000"
n_epoch = None
args.gan_type = self.gan_type
args.adv_weight = self.adv_weight
args.cycle_weight = self.cycle_weight
args.identity_weight = self.identity_weight
args.cam_weight = self.cam_weight
# open session
sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True))
gan = UGATIT(sess, args)
# build graph
gan.build_model()
# show network architecture
# show_all_variables()
gan.init_model(sess, n_epoch=n_epoch)
return gan, sess
def predict_img(self, img_path):
print('[Info] 预测图像路径: {}'.format(img_path))
img_np = self.gan.read_img(img_path)
print('[Info] 输入尺寸: {}'.format(img_np.shape))
img_fake = self.gan.predict_img(img_np, self.sess)
img_fake = np.squeeze(img_fake, axis=0)
print('[Info] 输出尺寸: {}'.format(img_fake.shape))
img_fake = inverse_transform(img_fake)
img_fake = img_fake.astype(np.uint8)
# show_img_rgb(img_fake)
return img_fake
def export_model(self):
self.gan.export_model()
def close_sess(self):
self.sess.close()
def img_predictor_test():
"""
图像预测测试
"""
# img_dir = os.path.join(DATA_DIR, 'heads-60')
img_dir = os.path.join(DATA_DIR, 'heads')
img_out_dir = os.path.join(DATA_DIR, 'outputs')
mkdir_if_not_exist(img_out_dir)
paths_list, names_list = traverse_dir_files(img_dir)
ip = ImgPredictor()
for path, name in zip(paths_list, names_list):
img_fake = ip.predict_img(path)
img_fake = cv2.cvtColor(img_fake, cv2.COLOR_RGB2BGR)
img_fake_path = os.path.join(img_out_dir, '{}.out.jpg'.format(name))
cv2.imwrite(img_fake_path, img_fake)
print('[Info] 写入图像路径: {}'.format(img_fake_path))
print('[Info] 测试完成!')
def img_predictor_test_v2():
"""
图像预测测试
"""
# img_dir = os.path.join(DATA_DIR, 'imgs')
img_dir = os.path.join(DATA_DIR, 'heads')
img_out_dir = os.path.join(DATA_DIR, 'outputs-v2')
mkdir_if_not_exist(img_out_dir)
paths_list, names_list = traverse_dir_files(img_dir)
ip = ImgPredictor(gan_type='dragan')
for path, name in zip(paths_list, names_list):
img_fake = ip.predict_img(path)
img_fake = cv2.cvtColor(img_fake, cv2.COLOR_RGB2BGR)
img_fake_path = os.path.join(img_out_dir, '{}.out.jpg'.format(name))
cv2.imwrite(img_fake_path, img_fake)
print('[Info] 写入图像路径: {}'.format(img_fake_path))
print('[Info] 测试完成!')
def img_predictor_test_v3():
"""
图像预测测试
"""
img_dir = os.path.join(DATA_DIR, 'heads')
img_out_dir = os.path.join(DATA_DIR, 'outputs-v3')
mkdir_if_not_exist(img_out_dir)
paths_list, names_list = traverse_dir_files(img_dir)
ip = ImgPredictor(gan_type='dragan', adv_weight=1, cycle_weight=20, identity_weight=10, cam_weight=2000)
for path, name in zip(paths_list, names_list):
img_fake = ip.predict_img(path)
img_fake = cv2.cvtColor(img_fake, cv2.COLOR_RGB2BGR)
img_fake_path = os.path.join(img_out_dir, '{}.out.jpg'.format(name))
cv2.imwrite(img_fake_path, img_fake)
print('[Info] 写入图像路径: {}'.format(img_fake_path))
print('[Info] 测试完成!')
def merge_imgs(imgs, cols=6, rows=6):
"""
合并图像
:param imgs: 图像序列
:param cols: 行数
:param rows: 列数
:param sk: 间隔,当sk=2时,即0, 2, 4, 6
:return: 大图
"""
if not imgs:
raise Exception('[Exception] 合并图像的输入为空!')
img_shape = imgs[0].shape
h, w, _ = img_shape
large_imgs = np.ones((rows * h, cols * w, 3)) * 255 # 大图
for j in range(rows):
for i in range(cols):
idx = j * cols + i
if idx > len(imgs) - 1: # 少于帧数,输出透明帧
break
# print('[Info] 帧的idx: {}, i: {}, j:{}'.format(idx, i, j))
large_imgs[(j * h):(j * h + h), (i * w): (i * w + w)] = imgs[idx]
# print(large_imgs.shape)
# show_png(large_imgs)
# show_png(large_imgs)
return large_imgs
def merge_one_img():
img_dir = os.path.join(DATA_DIR, 'imgs')
img_out_dir = os.path.join(DATA_DIR, 'outputs')
img_merge_dir = os.path.join(DATA_DIR, 'merge')
paths_list, names_list = traverse_dir_files(img_dir)
out_paths_list, out_names_list = traverse_dir_files(img_out_dir)
merge_paths_list, merge_names_list = traverse_dir_files(img_merge_dir)
img_size = 256
img_list = []
for path, out_path, merge_path in zip(paths_list, out_paths_list, merge_paths_list):
img = cv2.imread(path)
img = cv2.resize(img, (img_size, img_size))
img_list.append(img)
img_out = cv2.imread(out_path)
img_out = cv2.resize(img_out, (img_size, img_size))
img_list.append(img_out)
img_merge = cv2.imread(merge_path)
img_merge = cv2.resize(img_merge, (img_size, img_size))
img_list.append(img_merge)
large_img = merge_imgs(img_list, cols=3, rows=7)
large_img_path = os.path.join(DATA_DIR, 'large_img.jpg')
cv2.imwrite(large_img_path, large_img)
def merge_outputs():
img_dir = os.path.join(DATA_DIR, 'outputs-105')
paths_list, names_list = traverse_dir_files(img_dir)
print('[Info] 图像数: {}'.format(len(paths_list)))
img_size = 256
img_list = []
for path, name in zip(paths_list, names_list):
img = cv2.imread(path)
img = cv2.resize(img, (img_size, img_size))
img_list.append(img)
large_img = merge_imgs(img_list, cols=10, rows=6)
large_img_path = os.path.join(DATA_DIR, 'xxx-out-105.jpg')
cv2.imwrite(large_img_path, large_img)
def merge_sample():
# sample_dir = 'UGATIT_s2a4zhengsheng_dragan_4resblock_6dis_1_1_10_10_1000_sn_smoothing'
# sample_dir = 'UGATIT_s2a4zhengsheng_dragan_4resblock_6dis_1_1_15_10_1500_sn_smoothing'
# sample_dir = 'UGATIT_selfie2anime4zs_lsgan_4resblock_6dis_1_1_10_10_1000_sn_smoothing'
sample_dir = 'UGATIT_s2a4zhengsheng_lsgan_4resblock_6dis_1_1_10_10_1000_sn_smoothing'
img_dir = os.path.join(DATA_DIR, 'samples', sample_dir)
paths_list, names_list = traverse_dir_files(img_dir)
img_size = 256
img_fake_list = []
for path, name in zip(paths_list, names_list):
if not name.startswith('fake'):
continue
img_fake_list.append(path)
img_real_list = []
for path, name in zip(paths_list, names_list):
if not name.startswith('real'):
continue
img_real_list.append(path)
img_list = []
for p_fake, p_real in zip(img_fake_list, img_real_list):
img = cv2.imread(p_real)
img = cv2.resize(img, (img_size, img_size * 2))
img_list.append(img)
img = cv2.imread(p_fake)
img = cv2.resize(img, (img_size, img_size * 2))
img_list.append(img)
n_imgs = len(img_list)
print('图像数量: {}'.format(n_imgs))
cols = 64
rows = int(math.ceil(n_imgs / float(cols)))
large_img = merge_imgs(img_list, cols=cols, rows=rows)
large_img_path = os.path.join(DATA_DIR, '{}.jpg'.format(sample_dir))
cv2.imwrite(large_img_path, large_img)
def resize_folder():
img_dir = os.path.join(DATA_DIR, 'heads-60')
img_out_dir = os.path.join(DATA_DIR, 'heads-60w256')
mkdir_if_not_exist(img_out_dir)
paths_list, names_list = traverse_dir_files(img_dir)
print('[Info] 图像数: {}'.format(len(paths_list)))
img_size = 256
for path, name in zip(paths_list, names_list):
img = cv2.imread(path)
img = cv2.resize(img, (img_size, img_size))
cv2.imwrite(os.path.join(img_out_dir, name), img)
def model_export():
ip = ImgPredictor()
ip.export_model()
def main():
model_export()
# img_predictor_test()
# img_predictor_test_v2()
# img_predictor_test_v3()
# merge_outputs()
# merge_sample()
# resize_folder()
if __name__ == '__main__':
main()
| 30.35473 | 109 | 0.648191 |
import math
from UGATIT import UGATIT
from main import parse_args
from root_dir import DATA_DIR
from utils.project_utils import traverse_dir_files, mkdir_if_not_exist
from utils.ugatit_utils import *
class ImgPredictor(object):
def __init__(self, gan_type='lsgan', adv_weight=1, cycle_weight=10, identity_weight=10, cam_weight=1000):
self.gan_type = gan_type
self.adv_weight = adv_weight
self.cycle_weight = cycle_weight
self.identity_weight = identity_weight
self.cam_weight = cam_weight
self.gan, self.sess = self.init_model()
def init_model(self):
args = parse_args()
if args is None:
exit()
args.phase = 'test'
args.dataset = 'selfie2anime'
args.img_size = 256
n_epoch = None
args.gan_type = self.gan_type
args.adv_weight = self.adv_weight
args.cycle_weight = self.cycle_weight
args.identity_weight = self.identity_weight
args.cam_weight = self.cam_weight
sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True))
gan = UGATIT(sess, args)
gan.build_model()
gan.init_model(sess, n_epoch=n_epoch)
return gan, sess
def predict_img(self, img_path):
print('[Info] 预测图像路径: {}'.format(img_path))
img_np = self.gan.read_img(img_path)
print('[Info] 输入尺寸: {}'.format(img_np.shape))
img_fake = self.gan.predict_img(img_np, self.sess)
img_fake = np.squeeze(img_fake, axis=0)
print('[Info] 输出尺寸: {}'.format(img_fake.shape))
img_fake = inverse_transform(img_fake)
img_fake = img_fake.astype(np.uint8)
return img_fake
def export_model(self):
self.gan.export_model()
def close_sess(self):
self.sess.close()
def img_predictor_test():
img_dir = os.path.join(DATA_DIR, 'heads')
img_out_dir = os.path.join(DATA_DIR, 'outputs')
mkdir_if_not_exist(img_out_dir)
paths_list, names_list = traverse_dir_files(img_dir)
ip = ImgPredictor()
for path, name in zip(paths_list, names_list):
img_fake = ip.predict_img(path)
img_fake = cv2.cvtColor(img_fake, cv2.COLOR_RGB2BGR)
img_fake_path = os.path.join(img_out_dir, '{}.out.jpg'.format(name))
cv2.imwrite(img_fake_path, img_fake)
print('[Info] 写入图像路径: {}'.format(img_fake_path))
print('[Info] 测试完成!')
def img_predictor_test_v2():
img_dir = os.path.join(DATA_DIR, 'heads')
img_out_dir = os.path.join(DATA_DIR, 'outputs-v2')
mkdir_if_not_exist(img_out_dir)
paths_list, names_list = traverse_dir_files(img_dir)
ip = ImgPredictor(gan_type='dragan')
for path, name in zip(paths_list, names_list):
img_fake = ip.predict_img(path)
img_fake = cv2.cvtColor(img_fake, cv2.COLOR_RGB2BGR)
img_fake_path = os.path.join(img_out_dir, '{}.out.jpg'.format(name))
cv2.imwrite(img_fake_path, img_fake)
print('[Info] 写入图像路径: {}'.format(img_fake_path))
print('[Info] 测试完成!')
def img_predictor_test_v3():
img_dir = os.path.join(DATA_DIR, 'heads')
img_out_dir = os.path.join(DATA_DIR, 'outputs-v3')
mkdir_if_not_exist(img_out_dir)
paths_list, names_list = traverse_dir_files(img_dir)
ip = ImgPredictor(gan_type='dragan', adv_weight=1, cycle_weight=20, identity_weight=10, cam_weight=2000)
for path, name in zip(paths_list, names_list):
img_fake = ip.predict_img(path)
img_fake = cv2.cvtColor(img_fake, cv2.COLOR_RGB2BGR)
img_fake_path = os.path.join(img_out_dir, '{}.out.jpg'.format(name))
cv2.imwrite(img_fake_path, img_fake)
print('[Info] 写入图像路径: {}'.format(img_fake_path))
print('[Info] 测试完成!')
def merge_imgs(imgs, cols=6, rows=6):
if not imgs:
raise Exception('[Exception] 合并图像的输入为空!')
img_shape = imgs[0].shape
h, w, _ = img_shape
large_imgs = np.ones((rows * h, cols * w, 3)) * 255
for j in range(rows):
for i in range(cols):
idx = j * cols + i
if idx > len(imgs) - 1:
break
large_imgs[(j * h):(j * h + h), (i * w): (i * w + w)] = imgs[idx]
return large_imgs
def merge_one_img():
img_dir = os.path.join(DATA_DIR, 'imgs')
img_out_dir = os.path.join(DATA_DIR, 'outputs')
img_merge_dir = os.path.join(DATA_DIR, 'merge')
paths_list, names_list = traverse_dir_files(img_dir)
out_paths_list, out_names_list = traverse_dir_files(img_out_dir)
merge_paths_list, merge_names_list = traverse_dir_files(img_merge_dir)
img_size = 256
img_list = []
for path, out_path, merge_path in zip(paths_list, out_paths_list, merge_paths_list):
img = cv2.imread(path)
img = cv2.resize(img, (img_size, img_size))
img_list.append(img)
img_out = cv2.imread(out_path)
img_out = cv2.resize(img_out, (img_size, img_size))
img_list.append(img_out)
img_merge = cv2.imread(merge_path)
img_merge = cv2.resize(img_merge, (img_size, img_size))
img_list.append(img_merge)
large_img = merge_imgs(img_list, cols=3, rows=7)
large_img_path = os.path.join(DATA_DIR, 'large_img.jpg')
cv2.imwrite(large_img_path, large_img)
def merge_outputs():
img_dir = os.path.join(DATA_DIR, 'outputs-105')
paths_list, names_list = traverse_dir_files(img_dir)
print('[Info] 图像数: {}'.format(len(paths_list)))
img_size = 256
img_list = []
for path, name in zip(paths_list, names_list):
img = cv2.imread(path)
img = cv2.resize(img, (img_size, img_size))
img_list.append(img)
large_img = merge_imgs(img_list, cols=10, rows=6)
large_img_path = os.path.join(DATA_DIR, 'xxx-out-105.jpg')
cv2.imwrite(large_img_path, large_img)
def merge_sample():
sample_dir = 'UGATIT_s2a4zhengsheng_lsgan_4resblock_6dis_1_1_10_10_1000_sn_smoothing'
img_dir = os.path.join(DATA_DIR, 'samples', sample_dir)
paths_list, names_list = traverse_dir_files(img_dir)
img_size = 256
img_fake_list = []
for path, name in zip(paths_list, names_list):
if not name.startswith('fake'):
continue
img_fake_list.append(path)
img_real_list = []
for path, name in zip(paths_list, names_list):
if not name.startswith('real'):
continue
img_real_list.append(path)
img_list = []
for p_fake, p_real in zip(img_fake_list, img_real_list):
img = cv2.imread(p_real)
img = cv2.resize(img, (img_size, img_size * 2))
img_list.append(img)
img = cv2.imread(p_fake)
img = cv2.resize(img, (img_size, img_size * 2))
img_list.append(img)
n_imgs = len(img_list)
print('图像数量: {}'.format(n_imgs))
cols = 64
rows = int(math.ceil(n_imgs / float(cols)))
large_img = merge_imgs(img_list, cols=cols, rows=rows)
large_img_path = os.path.join(DATA_DIR, '{}.jpg'.format(sample_dir))
cv2.imwrite(large_img_path, large_img)
def resize_folder():
img_dir = os.path.join(DATA_DIR, 'heads-60')
img_out_dir = os.path.join(DATA_DIR, 'heads-60w256')
mkdir_if_not_exist(img_out_dir)
paths_list, names_list = traverse_dir_files(img_dir)
print('[Info] 图像数: {}'.format(len(paths_list)))
img_size = 256
for path, name in zip(paths_list, names_list):
img = cv2.imread(path)
img = cv2.resize(img, (img_size, img_size))
cv2.imwrite(os.path.join(img_out_dir, name), img)
def model_export():
ip = ImgPredictor()
ip.export_model()
def main():
model_export()
if __name__ == '__main__':
main()
| true | true |
f71e2b1de028bb07a72e5add71ffb3879b603bc4 | 6,718 | py | Python | store/serializers.py | nettaku2/django_learn | ce07c97c8b2dd4828cae1d1b176674c843e66b35 | [
"MIT"
] | null | null | null | store/serializers.py | nettaku2/django_learn | ce07c97c8b2dd4828cae1d1b176674c843e66b35 | [
"MIT"
] | null | null | null | store/serializers.py | nettaku2/django_learn | ce07c97c8b2dd4828cae1d1b176674c843e66b35 | [
"MIT"
] | null | null | null | from django.db.models import fields
from rest_framework import serializers
from decimal import Decimal
from .models import Cart, CartItem, Product, Collection, Customer, Order, OrderItem, Review
from uuid import uuid4
class ReviewSerializer(serializers.ModelSerializer):
class Meta:
model = Review
fields = ['id', 'name', 'description', 'date']
def create(self, validated_data):
product_id = self.context['product_id']
return Review.objects.create(product_id=product_id, **validated_data)
class OrderItemSerializer(serializers.ModelSerializer):
class Meta:
model = OrderItem
fields = ['id', 'quantity', 'unit_price', 'order', 'product']
class OrderSerializer(serializers.ModelSerializer):
class Meta:
model = Order
fields = ['id', 'placed_at', 'payment_status', 'customer']
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = ['id', 'user_id', 'phone', 'birth_date', 'membership']
user_id = serializers.IntegerField(read_only=True)
class CollectionSerializer(serializers.ModelSerializer):
class Meta:
model = Collection
fields = ['id', 'title', 'products_count']
# products_count = serializers.IntegerField()
products_count = serializers.SerializerMethodField('count_products')
def count_products(self, collection):
return collection.product_set.count()
# class CollectionSerializer(serializers.Serializer):
# id = serializers.IntegerField(required=False)
# title = serializers.CharField(max_length=255)
# products_count = serializers.SerializerMethodField('count_products')
# def count_products(self, collection):
# return collection.product_set.count()
# def create(self, validated_data):
# collection = Collection(**validated_data)
# collection.save()
# return collection
# def update(self, instance, validated_data):
# instance.title = validated_data["title"]
# instance.save()
# return instance
class ProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = ['id', 'title', 'slug', 'description',
'inventory', 'price', 'price_with_tax', 'last_update', 'collection']
# fields = ['id', 'title', 'price', 'price_with_tax', 'collection_number',
# 'collection_title', 'collection_object', 'collection_link']
price = serializers.DecimalField(
max_digits=6, decimal_places=2, source='unit_price')
price_with_tax = serializers.SerializerMethodField(
method_name='calculate_tax')
# collection_number = serializers.PrimaryKeyRelatedField(
# queryset=Collection.objects.all(), source='collection'
# )
# collection_title = serializers.StringRelatedField(source='collection')
# collection_object = CollectionSerializer(source='collection')
# collection_link = serializers.HyperlinkedRelatedField(
# queryset=Collection.objects.all(),
# view_name='collection-detail',
# source='collection'
# )
def calculate_tax(self, product):
return product.unit_price * Decimal(1.1)
# def validate(self, data):
# if data['password'] != data['confirm_password']:
# return serializers.ValidationError('Passwords do not match')
# return data
# def create(self, validated_data):
# product = Product(**validated_data)
# product.other = 1
# product.save()
# return product
# def update(self, instance, validated_data):
# instance.other = 1
# instance.save()
# return instance
# class ProductSerializer(serializers.Serializer):
# id = serializers.IntegerField()
# title = serializers.CharField(max_length=255)
# price = serializers.DecimalField(
# max_digits=6, decimal_places=2, source='unit_price')
# price_with_tax = serializers.SerializerMethodField(
# method_name='calculate_tax')
# collection_number = serializers.PrimaryKeyRelatedField(
# queryset=Collection.objects.all(), source='collection'
# )
# collection_title = serializers.StringRelatedField(source='collection')
# collection_object = CollectionSerializer(source='collection')
# collection_link = serializers.HyperlinkedRelatedField(
# queryset=Collection.objects.all(),
# view_name='collection-detail',
# source='collection'
# )
# def calculate_tax(self, product):
# return product.unit_price * Decimal(1.1)
class SimpleProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = ['title', 'unit_price']
class CartItemSerializer(serializers.ModelSerializer):
class Meta:
model = CartItem
fields = ['id', 'quantity', 'product', 'total_price']
total_price = serializers.SerializerMethodField('get_total_price')
def get_total_price(self, cartitem):
return Decimal(cartitem.quantity) * cartitem.product.unit_price
product = SimpleProductSerializer()
class AddCartItemSerializer(serializers.ModelSerializer):
class Meta:
model = CartItem
fields = ['id', 'product_id', 'quantity']
def validate_product_id(self, value):
if not Product.objects.filter(pk=value).exists():
raise serializers.ValidationError(
"No Product with the given id was found")
return value
def save(self, **kwargs):
cart_id = self.context['cart_id']
product_id = self.validated_data['product_id']
quantity = self.validated_data['quantity']
try:
cart_item = CartItem.objects.get(
cart_id=cart_id, product_id=product_id)
cart_item.quantity += quantity
cart_item.save()
self.instance = cart_item
except CartItem.DoesNotExist:
self.instance = CartItem.objects.create(
cart_id=cart_id, **self.validated_data)
return self.instance
return super().save(**kwargs)
product_id = serializers.IntegerField()
class CartSerializer(serializers.ModelSerializer):
class Meta:
model = Cart
fields = ['id', 'cartitem_set', 'total_price']
id = serializers.UUIDField(read_only=True)
cartitem_set = CartItemSerializer(many=True, read_only=True)
# created_at = serializers.DateTimeField(read_only=True)
total_price = serializers.SerializerMethodField('get_total_price')
def get_total_price(self, cart):
return sum([item.quantity * item.product.unit_price for item in cart.cartitem_set.all()])
| 33.929293 | 97 | 0.67401 | from django.db.models import fields
from rest_framework import serializers
from decimal import Decimal
from .models import Cart, CartItem, Product, Collection, Customer, Order, OrderItem, Review
from uuid import uuid4
class ReviewSerializer(serializers.ModelSerializer):
class Meta:
model = Review
fields = ['id', 'name', 'description', 'date']
def create(self, validated_data):
product_id = self.context['product_id']
return Review.objects.create(product_id=product_id, **validated_data)
class OrderItemSerializer(serializers.ModelSerializer):
class Meta:
model = OrderItem
fields = ['id', 'quantity', 'unit_price', 'order', 'product']
class OrderSerializer(serializers.ModelSerializer):
class Meta:
model = Order
fields = ['id', 'placed_at', 'payment_status', 'customer']
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = ['id', 'user_id', 'phone', 'birth_date', 'membership']
user_id = serializers.IntegerField(read_only=True)
class CollectionSerializer(serializers.ModelSerializer):
class Meta:
model = Collection
fields = ['id', 'title', 'products_count']
products_count = serializers.SerializerMethodField('count_products')
def count_products(self, collection):
return collection.product_set.count()
class ProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = ['id', 'title', 'slug', 'description',
'inventory', 'price', 'price_with_tax', 'last_update', 'collection']
price = serializers.DecimalField(
max_digits=6, decimal_places=2, source='unit_price')
price_with_tax = serializers.SerializerMethodField(
method_name='calculate_tax')
def calculate_tax(self, product):
return product.unit_price * Decimal(1.1)
class SimpleProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = ['title', 'unit_price']
class CartItemSerializer(serializers.ModelSerializer):
class Meta:
model = CartItem
fields = ['id', 'quantity', 'product', 'total_price']
total_price = serializers.SerializerMethodField('get_total_price')
def get_total_price(self, cartitem):
return Decimal(cartitem.quantity) * cartitem.product.unit_price
product = SimpleProductSerializer()
class AddCartItemSerializer(serializers.ModelSerializer):
class Meta:
model = CartItem
fields = ['id', 'product_id', 'quantity']
def validate_product_id(self, value):
if not Product.objects.filter(pk=value).exists():
raise serializers.ValidationError(
"No Product with the given id was found")
return value
def save(self, **kwargs):
cart_id = self.context['cart_id']
product_id = self.validated_data['product_id']
quantity = self.validated_data['quantity']
try:
cart_item = CartItem.objects.get(
cart_id=cart_id, product_id=product_id)
cart_item.quantity += quantity
cart_item.save()
self.instance = cart_item
except CartItem.DoesNotExist:
self.instance = CartItem.objects.create(
cart_id=cart_id, **self.validated_data)
return self.instance
return super().save(**kwargs)
product_id = serializers.IntegerField()
class CartSerializer(serializers.ModelSerializer):
class Meta:
model = Cart
fields = ['id', 'cartitem_set', 'total_price']
id = serializers.UUIDField(read_only=True)
cartitem_set = CartItemSerializer(many=True, read_only=True)
total_price = serializers.SerializerMethodField('get_total_price')
def get_total_price(self, cart):
return sum([item.quantity * item.product.unit_price for item in cart.cartitem_set.all()])
| true | true |
f71e2ba69c70290aca4d5103ca3a8346e15ea63b | 6,154 | py | Python | app/routes.py | vedala/microblog_cli | b48b21c1395e6ba0c1f851990d0e370cb6d09fee | [
"MIT"
] | null | null | null | app/routes.py | vedala/microblog_cli | b48b21c1395e6ba0c1f851990d0e370cb6d09fee | [
"MIT"
] | null | null | null | app/routes.py | vedala/microblog_cli | b48b21c1395e6ba0c1f851990d0e370cb6d09fee | [
"MIT"
] | null | null | null | from datetime import datetime
from flask import render_template, flash, redirect, url_for, request
from flask_login import login_user, logout_user, current_user, login_required
from werkzeug.urls import url_parse
from app import app, db
from app.forms import LoginForm, RegistrationForm, EditProfileForm, \
EmptyForm, PostForm
from app.models import User, Post
@app.before_request
def before_request():
if current_user.is_authenticated:
current_user.last_seen = datetime.utcnow()
db.session.commit()
@app.route('/', methods=['GET', 'POST'])
@app.route('/index', methods=['GET', 'POST'])
@login_required
def index():
form = PostForm()
if form.validate_on_submit():
post = Post(body=form.post.data, author=current_user)
db.session.add(post)
db.session.commit()
flash('Your post is now live!')
return redirect(url_for('index'))
page = request.args.get('page', 1, type=int)
posts = current_user.followed_posts().paginate(
page, app.config['POSTS_PER_PAGE'], False)
next_url = url_for('index', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('index', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title='Home', form=form,
posts=posts.items, next_url=next_url,
prev_url=prev_url)
@app.route('/explore')
@login_required
def explore():
page = request.args.get('page', 1, type=int)
posts = Post.query.order_by(Post.timestamp.desc()).paginate(
page, app.config['POSTS_PER_PAGE'], False)
next_url = url_for('explore', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('explore', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title='Explore', posts=posts.items,
next_url=next_url, prev_url=prev_url)
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user is None or not user.check_password(form.password.data):
flash('Invalid username or password')
return redirect(url_for('login'))
login_user(user, remember=form.remember_me.data)
next_page = request.args.get('next')
if not next_page or url_parse(next_page).netloc != '':
next_page = url_for('index')
return redirect(next_page)
return render_template('login.html', title='Sign In', form=form)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = RegistrationForm()
if form.validate_on_submit():
user = User(username=form.username.data, email=form.email.data)
user.set_password(form.password.data)
db.session.add(user)
db.session.commit()
flash('Congratulations, you are now a registered user!')
return redirect(url_for('login'))
return render_template('register.html', title='Register', form=form)
@app.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
posts = user.posts.order_by(Post.timestamp.desc()).paginate(
page, app.config['POSTS_PER_PAGE'], False)
next_url = url_for('user', username=user.username, page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('user', username=user.username, page=posts.prev_num) \
if posts.has_prev else None
form = EmptyForm()
return render_template('user.html', user=user, posts=posts.items,
next_url=next_url, prev_url=prev_url, form=form)
@app.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm(current_user.username)
if form.validate_on_submit():
current_user.username = form.username.data
current_user.about_me = form.about_me.data
db.session.commit()
flash('Your changes have been saved.')
return redirect(url_for('edit_profile'))
elif request.method == 'GET':
form.username.data = current_user.username
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', title='Edit Profile',
form=form)
@app.route('/follow/<username>', methods=['POST'])
@login_required
def follow(username):
form = EmptyForm()
if form.validate_on_submit():
user = User.query.filter_by(username=username).first()
if user is None:
flash('User {} not found.'.format(username))
return redirect(url_for('index'))
if user == current_user:
flash('You cannot follow yourself!')
return redirect(url_for('user', username=username))
current_user.follow(user)
db.session.commit()
flash('You are following {}!'.format(username))
return redirect(url_for('user', username=username))
else:
return redirect(url_for('index'))
@app.route('/unfollow/<username>', methods=['POST'])
@login_required
def unfollow(username):
form = EmptyForm()
if form.validate_on_submit():
user = User.query.filter_by(username=username).first()
if user is None:
flash('User {} not found.'.format(username))
return redirect(url_for('index'))
if user == current_user:
flash('You cannot unfollow yourself!')
return redirect(url_for('user', username=username))
current_user.unfollow(user)
db.session.commit()
flash('You are not following {}.'.format(username))
return redirect(url_for('user', username=username))
else:
return redirect(url_for('index'))
| 37.29697 | 77 | 0.656321 | from datetime import datetime
from flask import render_template, flash, redirect, url_for, request
from flask_login import login_user, logout_user, current_user, login_required
from werkzeug.urls import url_parse
from app import app, db
from app.forms import LoginForm, RegistrationForm, EditProfileForm, \
EmptyForm, PostForm
from app.models import User, Post
@app.before_request
def before_request():
if current_user.is_authenticated:
current_user.last_seen = datetime.utcnow()
db.session.commit()
@app.route('/', methods=['GET', 'POST'])
@app.route('/index', methods=['GET', 'POST'])
@login_required
def index():
form = PostForm()
if form.validate_on_submit():
post = Post(body=form.post.data, author=current_user)
db.session.add(post)
db.session.commit()
flash('Your post is now live!')
return redirect(url_for('index'))
page = request.args.get('page', 1, type=int)
posts = current_user.followed_posts().paginate(
page, app.config['POSTS_PER_PAGE'], False)
next_url = url_for('index', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('index', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title='Home', form=form,
posts=posts.items, next_url=next_url,
prev_url=prev_url)
@app.route('/explore')
@login_required
def explore():
page = request.args.get('page', 1, type=int)
posts = Post.query.order_by(Post.timestamp.desc()).paginate(
page, app.config['POSTS_PER_PAGE'], False)
next_url = url_for('explore', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('explore', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title='Explore', posts=posts.items,
next_url=next_url, prev_url=prev_url)
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user is None or not user.check_password(form.password.data):
flash('Invalid username or password')
return redirect(url_for('login'))
login_user(user, remember=form.remember_me.data)
next_page = request.args.get('next')
if not next_page or url_parse(next_page).netloc != '':
next_page = url_for('index')
return redirect(next_page)
return render_template('login.html', title='Sign In', form=form)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = RegistrationForm()
if form.validate_on_submit():
user = User(username=form.username.data, email=form.email.data)
user.set_password(form.password.data)
db.session.add(user)
db.session.commit()
flash('Congratulations, you are now a registered user!')
return redirect(url_for('login'))
return render_template('register.html', title='Register', form=form)
@app.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
posts = user.posts.order_by(Post.timestamp.desc()).paginate(
page, app.config['POSTS_PER_PAGE'], False)
next_url = url_for('user', username=user.username, page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('user', username=user.username, page=posts.prev_num) \
if posts.has_prev else None
form = EmptyForm()
return render_template('user.html', user=user, posts=posts.items,
next_url=next_url, prev_url=prev_url, form=form)
@app.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm(current_user.username)
if form.validate_on_submit():
current_user.username = form.username.data
current_user.about_me = form.about_me.data
db.session.commit()
flash('Your changes have been saved.')
return redirect(url_for('edit_profile'))
elif request.method == 'GET':
form.username.data = current_user.username
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', title='Edit Profile',
form=form)
@app.route('/follow/<username>', methods=['POST'])
@login_required
def follow(username):
form = EmptyForm()
if form.validate_on_submit():
user = User.query.filter_by(username=username).first()
if user is None:
flash('User {} not found.'.format(username))
return redirect(url_for('index'))
if user == current_user:
flash('You cannot follow yourself!')
return redirect(url_for('user', username=username))
current_user.follow(user)
db.session.commit()
flash('You are following {}!'.format(username))
return redirect(url_for('user', username=username))
else:
return redirect(url_for('index'))
@app.route('/unfollow/<username>', methods=['POST'])
@login_required
def unfollow(username):
form = EmptyForm()
if form.validate_on_submit():
user = User.query.filter_by(username=username).first()
if user is None:
flash('User {} not found.'.format(username))
return redirect(url_for('index'))
if user == current_user:
flash('You cannot unfollow yourself!')
return redirect(url_for('user', username=username))
current_user.unfollow(user)
db.session.commit()
flash('You are not following {}.'.format(username))
return redirect(url_for('user', username=username))
else:
return redirect(url_for('index'))
| true | true |
f71e2c2c054051139eb8224bb194121ba49209d1 | 27,272 | py | Python | src/transformers/configuration_utils.py | gaodong2/transformers | 83984a61c657023a69e78951b338e378a0f866c2 | [
"Apache-2.0"
] | 77 | 2020-11-12T18:40:25.000Z | 2022-03-27T06:41:30.000Z | src/transformers/configuration_utils.py | gaodong2/transformers | 83984a61c657023a69e78951b338e378a0f866c2 | [
"Apache-2.0"
] | 7 | 2021-03-11T14:00:58.000Z | 2022-01-18T05:51:22.000Z | src/transformers/configuration_utils.py | gaodong2/transformers | 83984a61c657023a69e78951b338e378a0f866c2 | [
"Apache-2.0"
] | 23 | 2020-12-08T12:42:24.000Z | 2022-02-11T13:55:24.000Z | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Configuration base class and utilities."""
import copy
import json
import logging
import os
from typing import Any, Dict, Tuple
from .file_utils import CONFIG_NAME, cached_path, hf_bucket_url, is_remote_url
logger = logging.getLogger(__name__)
class PretrainedConfig(object):
r""" Base class for all configuration classes.
Handles a few parameters common to all models' configurations as well as methods for loading/downloading/saving
configurations.
Note:
A configuration file can be loaded and saved to disk. Loading the configuration file and using this file to
initialize a model does **not** load the model weights.
It only affects the model's configuration.
Class attributes (overridden by derived classes)
- **model_type** (:obj:`str`): An identifier for the model type, serialized into the JSON file, and used to
recreate the correct object in :class:`~transformers.AutoConfig`.
Args:
output_hidden_states (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not the model should return all hidden-states.
output_attentions (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not the model should returns all attentions.
use_cache (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether or not the model should return the last key/values attentions (not used by all models).
return_dict (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not the model should return a :class:`~transformers.file_utils.ModelOutput` instead of a
plain tuple.
is_encoder_decoder (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether the model is used as an encoder/decoder or not.
is_decoder (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether the model is used as decoder or not (in which case it's used as an encoder).
add_cross_attention (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether cross-attention layers should be added to the model. Note, this option is only relevant for models that can be used as decoder models within the `:class:~transformers.EncoderDecoderModel` class, which consists of all models in ``AUTO_MODELS_FOR_CAUSAL_LM``.
prune_heads (:obj:`Dict[int, List[int]]`, `optional`, defaults to :obj:`{}`):
Pruned heads of the model. The keys are the selected layer indices and the associated values, the list
of heads to prune in said layer.
For instance ``{1: [0, 2], 2: [2, 3]}`` will prune heads 0 and 2 on layer 1 and heads 2 and 3 on layer
2.
xla_device (:obj:`bool`, `optional`):
A flag to indicate if TPU are available or not.
chunk_size_feed_forward (:obj:`int`, `optional`, defaults to :obj:`0`):
The chunk size of all feed forward layers in the residual attention blocks.
A chunk size of :obj:`0` means that the feed forward layer is not chunked.
A chunk size of n means that the feed forward layer processes :obj:`n` < sequence_length embeddings at a time.
For more information on feed forward chunking, see `How does Feed Forward Chunking work? <../glossary.html#feed-forward-chunking>`__ .
Parameters for sequence generation
- **max_length** (:obj:`int`, `optional`, defaults to 20) -- Maximum length that will be used by
default in the :obj:`generate` method of the model.
- **min_length** (:obj:`int`, `optional`, defaults to 10) -- Minimum length that will be used by
default in the :obj:`generate` method of the model.
- **do_sample** (:obj:`bool`, `optional`, defaults to :obj:`False`) -- Flag that will be used by default in
the :obj:`generate` method of the model. Whether or not to use sampling ; use greedy decoding otherwise.
- **early_stopping** (:obj:`bool`, `optional`, defaults to :obj:`False`) -- Flag that will be used by
default in the :obj:`generate` method of the model. Whether to stop the beam search when at least
``num_beams`` sentences are finished per batch or not.
- **num_beams** (:obj:`int`, `optional`, defaults to 1) -- Number of beams for beam search that will be
used by default in the :obj:`generate` method of the model. 1 means no beam search.
- **temperature** (:obj:`float`, `optional`, defaults to 1) -- The value used to module the next token
probabilities that will be used by default in the :obj:`generate` method of the model. Must be strictly
positive.
- **top_k** (:obj:`int`, `optional`, defaults to 50) -- Number of highest probability vocabulary tokens to
keep for top-k-filtering that will be used by default in the :obj:`generate` method of the model.
- **top_p** (:obj:`float`, `optional`, defaults to 1) -- Value that will be used by default in the
:obj:`generate` method of the model for ``top_p``. If set to float < 1, only the most probable tokens
with probabilities that add up to ``top_p`` or highest are kept for generation.
- **repetition_penalty** (:obj:`float`, `optional`, defaults to 1) -- Parameter for repetition penalty
that will be used by default in the :obj:`generate` method of the model. 1.0 means no penalty.
- **length_penalty** (:obj:`float`, `optional`, defaults to 1) -- Exponential penalty to the length that
will be used by default in the :obj:`generate` method of the model.
- **no_repeat_ngram_size** (:obj:`int`, `optional`, defaults to 0) -- Value that will be used by default
in the :obj:`generate` method of the model for ``no_repeat_ngram_size``. If set to int > 0, all ngrams of
that size can only occur once.
- **bad_words_ids** (:obj:`List[int]`, `optional`) -- List of token ids that are not allowed to be
generated that will be used by default in the :obj:`generate` method of the model. In order to get the
tokens of the words that should not appear in the generated text, use
:obj:`tokenizer.encode(bad_word, add_prefix_space=True)`.
- **num_return_sequences** (:obj:`int`, `optional`, defaults to 1) -- Number of independently computed
returned sequences for each element in the batch that will be used by default in the :obj:`generate`
method of the model.
Parameters for fine-tuning tasks
- **architectures** (:obj:`List[str]`, `optional`) -- Model architectures that can be used with the
model pretrained weights.
- **finetuning_task** (:obj:`str`, `optional`) -- Name of the task used to fine-tune the model. This can be
used when converting from an original (TensorFlow or PyTorch) checkpoint.
- **id2label** (:obj:`List[str]`, `optional`) -- A map from index (for instance prediction index, or target
index) to label.
- **label2id** (:obj:`Dict[str, int]`, `optional`) -- A map from label to index for the model.
- **num_labels** (:obj:`int`, `optional`) -- Number of labels to use in the last layer added to the model,
typically for a classification task.
- **task_specific_params** (:obj:`Dict[str, Any]`, `optional`) -- Additional keyword arguments to store for
the current task.
Parameters linked to the tokenizer
- **prefix** (:obj:`str`, `optional`) -- A specific prompt that should be added at the beginning of each
text before calling the model.
- **bos_token_id** (:obj:`int`, `optional`)) -- The id of the `beginning-of-stream` token.
- **pad_token_id** (:obj:`int`, `optional`)) -- The id of the `padding` token.
- **eos_token_id** (:obj:`int`, `optional`)) -- The id of the `end-of-stream` token.
- **decoder_start_token_id** (:obj:`int`, `optional`)) -- If an encoder-decoder model starts decoding with
a different token than `bos`, the id of that token.
PyTorch specific parameters
- **torchscript** (:obj:`bool`, `optional`, defaults to :obj:`False`) -- Whether or not the model should be
used with Torchscript.
TensorFlow specific parameters
- **use_bfloat16** (:obj:`bool`, `optional`, defaults to :obj:`False`) -- Whether or not the model should
use BFloat16 scalars (only used by some TensorFlow models).
"""
model_type: str = ""
def __init__(self, **kwargs):
# Attributes with defaults
self.return_dict = kwargs.pop("return_dict", False)
self.output_hidden_states = kwargs.pop("output_hidden_states", False)
self.output_attentions = kwargs.pop("output_attentions", False)
self.use_cache = kwargs.pop("use_cache", True) # Not used by all models
self.torchscript = kwargs.pop("torchscript", False) # Only used by PyTorch models
self.use_bfloat16 = kwargs.pop("use_bfloat16", False)
self.pruned_heads = kwargs.pop("pruned_heads", {})
# Is decoder is used in encoder-decoder models to differentiate encoder from decoder
self.is_encoder_decoder = kwargs.pop("is_encoder_decoder", False)
self.is_decoder = kwargs.pop("is_decoder", False)
self.add_cross_attention = kwargs.pop("add_cross_attention", False)
# Parameters for sequence generation
self.max_length = kwargs.pop("max_length", 20)
self.min_length = kwargs.pop("min_length", 0)
self.do_sample = kwargs.pop("do_sample", False)
self.early_stopping = kwargs.pop("early_stopping", False)
self.num_beams = kwargs.pop("num_beams", 1)
self.temperature = kwargs.pop("temperature", 1.0)
self.top_k = kwargs.pop("top_k", 50)
self.top_p = kwargs.pop("top_p", 1.0)
self.repetition_penalty = kwargs.pop("repetition_penalty", 1.0)
self.length_penalty = kwargs.pop("length_penalty", 1.0)
self.no_repeat_ngram_size = kwargs.pop("no_repeat_ngram_size", 0)
self.bad_words_ids = kwargs.pop("bad_words_ids", None)
self.num_return_sequences = kwargs.pop("num_return_sequences", 1)
self.chunk_size_feed_forward = kwargs.pop("chunk_size_feed_forward", 0)
# Fine-tuning task arguments
self.architectures = kwargs.pop("architectures", None)
self.finetuning_task = kwargs.pop("finetuning_task", None)
self.id2label = kwargs.pop("id2label", None)
self.label2id = kwargs.pop("label2id", None)
if self.id2label is not None:
kwargs.pop("num_labels", None)
self.id2label = dict((int(key), value) for key, value in self.id2label.items())
# Keys are always strings in JSON so convert ids to int here.
else:
self.num_labels = kwargs.pop("num_labels", 2)
# Tokenizer arguments TODO: eventually tokenizer and models should share the same config
self.prefix = kwargs.pop("prefix", None)
self.bos_token_id = kwargs.pop("bos_token_id", None)
self.pad_token_id = kwargs.pop("pad_token_id", None)
self.eos_token_id = kwargs.pop("eos_token_id", None)
self.decoder_start_token_id = kwargs.pop("decoder_start_token_id", None)
# task specific arguments
self.task_specific_params = kwargs.pop("task_specific_params", None)
# TPU arguments
self.xla_device = kwargs.pop("xla_device", None)
# Additional attributes without default values
for key, value in kwargs.items():
try:
setattr(self, key, value)
except AttributeError as err:
logger.error("Can't set {} with value {} for {}".format(key, value, self))
raise err
@property
def use_return_dict(self) -> bool:
"""
:obj:`bool`: Whether or not return :class:`~transformers.file_utils.ModelOutput` instead of tuples.
"""
# If torchscript is set, force `return_dict=False` to avoid jit errors
return self.return_dict and not self.torchscript
@property
def num_labels(self) -> int:
"""
:obj:`int`: The number of labels for classification models.
"""
return len(self.id2label)
@num_labels.setter
def num_labels(self, num_labels: int):
self.id2label = {i: "LABEL_{}".format(i) for i in range(num_labels)}
self.label2id = dict(zip(self.id2label.values(), self.id2label.keys()))
def save_pretrained(self, save_directory: str):
"""
Save a configuration object to the directory ``save_directory``, so that it can be re-loaded using the
:func:`~transformers.PretrainedConfig.from_pretrained` class method.
Args:
save_directory (:obj:`str`):
Directory where the configuration JSON file will be saved (will be created if it does not exist).
"""
if os.path.isfile(save_directory):
raise AssertionError("Provided path ({}) should be a directory, not a file".format(save_directory))
os.makedirs(save_directory, exist_ok=True)
# If we save using the predefined names, we can load using `from_pretrained`
output_config_file = os.path.join(save_directory, CONFIG_NAME)
self.to_json_file(output_config_file, use_diff=True)
logger.info("Configuration saved in {}".format(output_config_file))
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path: str, **kwargs) -> "PretrainedConfig":
r"""
Instantiate a :class:`~transformers.PretrainedConfig` (or a derived class) from a pretrained model
configuration.
Args:
pretrained_model_name_or_path (:obj:`str`):
This can be either:
- the `shortcut name` of a pretrained model configuration to load from cache or download, e.g.,
``bert-base-uncased``.
- the `identifier name` of a pretrained model configuration that was uploaded to our S3 by any user,
e.g., ``dbmdz/bert-base-german-cased``.
- a path to a `directory` containing a configuration file saved using the
:func:`~transformers.PretrainedConfig.save_pretrained` method, e.g., ``./my_model_directory/``.
- a path or url to a saved configuration JSON `file`, e.g.,
``./my_model_directory/configuration.json``.
cache_dir (:obj:`str`, `optional`):
Path to a directory in which a downloaded pretrained model configuration should be cached if the
standard cache should not be used.
force_download (:obj:`bool`, `optional`, defaults to :obj:`False`):
Wheter or not to force to (re-)download the configuration files and override the cached versions if they
exist.
resume_download (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to delete incompletely received file. Attempts to resume the download if such a file
exists.
proxies (:obj:`Dict[str, str]`, `optional`):
A dictionary of proxy servers to use by protocol or endpoint, e.g.,
:obj:`{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.`
The proxies are used on each request.
return_unused_kwargs (:obj:`bool`, `optional`, defaults to :obj:`False`):
If :obj:`False`, then this function returns just the final configuration object.
If :obj:`True`, then this functions returns a :obj:`Tuple(config, unused_kwargs)` where `unused_kwargs`
is a dictionary consisting of the key/value pairs whose keys are not configuration attributes: i.e.,
the part of ``kwargs`` which has not been used to update ``config`` and is otherwise ignored.
kwargs (:obj:`Dict[str, Any]`, `optional`):
The values in kwargs of any keys which are configuration attributes will be used to override the loaded
values. Behavior concerning key/value pairs whose keys are *not* configuration attributes is
controlled by the ``return_unused_kwargs`` keyword parameter.
Returns:
:class:`PretrainedConfig`: The configuration object instantiated from this pretrained model.
Examples::
# We can't instantiate directly the base class `PretrainedConfig` so let's show the examples on a
# derived class: BertConfig
config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from S3 and cache.
config = BertConfig.from_pretrained('./test/saved_model/') # E.g. config (or model) was saved using `save_pretrained('./test/saved_model/')`
config = BertConfig.from_pretrained('./test/saved_model/my_configuration.json')
config = BertConfig.from_pretrained('bert-base-uncased', output_attention=True, foo=False)
assert config.output_attention == True
config, unused_kwargs = BertConfig.from_pretrained('bert-base-uncased', output_attention=True,
foo=False, return_unused_kwargs=True)
assert config.output_attention == True
assert unused_kwargs == {'foo': False}
"""
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
return cls.from_dict(config_dict, **kwargs)
@classmethod
def get_config_dict(cls, pretrained_model_name_or_path: str, **kwargs) -> Tuple[Dict[str, Any], Dict[str, Any]]:
"""
From a ``pretrained_model_name_or_path``, resolve to a dictionary of parameters, to be used
for instantiating a :class:`~transformers.PretrainedConfig` using ``from_dict``.
Parameters:
pretrained_model_name_or_path (:obj:`str`):
The identifier of the pre-trained checkpoint from which we want the dictionary of parameters.
Returns:
:obj:`Tuple[Dict, Dict]`: The dictionary(ies) that will be used to instantiate the configuration object.
"""
cache_dir = kwargs.pop("cache_dir", None)
force_download = kwargs.pop("force_download", False)
resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None)
local_files_only = kwargs.pop("local_files_only", False)
if os.path.isdir(pretrained_model_name_or_path):
config_file = os.path.join(pretrained_model_name_or_path, CONFIG_NAME)
elif os.path.isfile(pretrained_model_name_or_path) or is_remote_url(pretrained_model_name_or_path):
config_file = pretrained_model_name_or_path
else:
config_file = hf_bucket_url(pretrained_model_name_or_path, filename=CONFIG_NAME, use_cdn=False)
try:
# Load from URL or cache if already cached
resolved_config_file = cached_path(
config_file,
cache_dir=cache_dir,
force_download=force_download,
proxies=proxies,
resume_download=resume_download,
local_files_only=local_files_only,
)
# Load config dict
if resolved_config_file is None:
raise EnvironmentError
config_dict = cls._dict_from_json_file(resolved_config_file)
except EnvironmentError:
msg = (
f"Can't load config for '{pretrained_model_name_or_path}'. Make sure that:\n\n"
f"- '{pretrained_model_name_or_path}' is a correct model identifier listed on 'https://huggingface.co/models'\n\n"
f"- or '{pretrained_model_name_or_path}' is the correct path to a directory containing a {CONFIG_NAME} file\n\n"
)
raise EnvironmentError(msg)
except json.JSONDecodeError:
msg = (
"Couldn't reach server at '{}' to download configuration file or "
"configuration file is not a valid JSON file. "
"Please check network or file content here: {}.".format(config_file, resolved_config_file)
)
raise EnvironmentError(msg)
if resolved_config_file == config_file:
logger.info("loading configuration file {}".format(config_file))
else:
logger.info("loading configuration file {} from cache at {}".format(config_file, resolved_config_file))
return config_dict, kwargs
@classmethod
def from_dict(cls, config_dict: Dict[str, Any], **kwargs) -> "PretrainedConfig":
"""
Instantiates a :class:`~transformers.PretrainedConfig` from a Python dictionary of parameters.
Args:
config_dict (:obj:`Dict[str, Any]`):
Dictionary that will be used to instantiate the configuration object. Such a dictionary can be
retrieved from a pretrained checkpoint by leveraging the
:func:`~transformers.PretrainedConfig.get_config_dict` method.
kwargs (:obj:`Dict[str, Any]`):
Additional parameters from which to initialize the configuration object.
Returns:
:class:`PretrainedConfig`: The configuration object instantiated from those parameters.
"""
return_unused_kwargs = kwargs.pop("return_unused_kwargs", False)
config = cls(**config_dict)
if hasattr(config, "pruned_heads"):
config.pruned_heads = dict((int(key), value) for key, value in config.pruned_heads.items())
# Update config with kwargs if needed
to_remove = []
for key, value in kwargs.items():
if hasattr(config, key):
setattr(config, key, value)
to_remove.append(key)
for key in to_remove:
kwargs.pop(key, None)
logger.info("Model config %s", str(config))
if return_unused_kwargs:
return config, kwargs
else:
return config
@classmethod
def from_json_file(cls, json_file: str) -> "PretrainedConfig":
"""
Instantiates a :class:`~transformers.PretrainedConfig` from the path to a JSON file of parameters.
Args:
json_file (:obj:`str`):
Path to the JSON file containing the parameters.
Returns:
:class:`PretrainedConfig`: The configuration object instantiated from that JSON file.
"""
config_dict = cls._dict_from_json_file(json_file)
return cls(**config_dict)
@classmethod
def _dict_from_json_file(cls, json_file: str):
with open(json_file, "r", encoding="utf-8") as reader:
text = reader.read()
return json.loads(text)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return "{} {}".format(self.__class__.__name__, self.to_json_string())
def to_diff_dict(self) -> Dict[str, Any]:
"""
Removes all attributes from config which correspond to the default
config attributes for better readability and serializes to a Python
dictionary.
Returns:
:obj:`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance,
"""
config_dict = self.to_dict()
# get the default config dict
default_config_dict = PretrainedConfig().to_dict()
serializable_config_dict = {}
# only serialize values that differ from the default config
for key, value in config_dict.items():
if key not in default_config_dict or value != default_config_dict[key]:
serializable_config_dict[key] = value
return serializable_config_dict
def to_dict(self) -> Dict[str, Any]:
"""
Serializes this instance to a Python dictionary.
Returns:
:obj:`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance.
"""
output = copy.deepcopy(self.__dict__)
if hasattr(self.__class__, "model_type"):
output["model_type"] = self.__class__.model_type
return output
def to_json_string(self, use_diff: bool = True) -> str:
"""
Serializes this instance to a JSON string.
Args:
use_diff (:obj:`bool`, `optional`, defaults to :obj:`True`):
If set to ``True``, only the difference between the config instance and the default
``PretrainedConfig()`` is serialized to JSON string.
Returns:
:obj:`str`: String containing all the attributes that make up this configuration instance in JSON format.
"""
if use_diff is True:
config_dict = self.to_diff_dict()
else:
config_dict = self.to_dict()
return json.dumps(config_dict, indent=2, sort_keys=True) + "\n"
def to_json_file(self, json_file_path: str, use_diff: bool = True):
"""
Save this instance to a JSON file.
Args:
json_file_path (:obj:`str`):
Path to the JSON file in which this configuration instance's parameters will be saved.
use_diff (:obj:`bool`, `optional`, defaults to :obj:`True`):
If set to ``True``, only the difference between the config instance and the default
``PretrainedConfig()`` is serialized to JSON file.
"""
with open(json_file_path, "w", encoding="utf-8") as writer:
writer.write(self.to_json_string(use_diff=use_diff))
def update(self, config_dict: Dict[str, Any]):
"""
Updates attributes of this class with attributes from ``config_dict``.
Args:
config_dict (:obj:`Dict[str, Any]`): Dictionary of attributes that shall be updated for this class.
"""
for key, value in config_dict.items():
setattr(self, key, value)
| 53.058366 | 281 | 0.635707 |
import copy
import json
import logging
import os
from typing import Any, Dict, Tuple
from .file_utils import CONFIG_NAME, cached_path, hf_bucket_url, is_remote_url
logger = logging.getLogger(__name__)
class PretrainedConfig(object):
model_type: str = ""
def __init__(self, **kwargs):
self.return_dict = kwargs.pop("return_dict", False)
self.output_hidden_states = kwargs.pop("output_hidden_states", False)
self.output_attentions = kwargs.pop("output_attentions", False)
self.use_cache = kwargs.pop("use_cache", True)
self.torchscript = kwargs.pop("torchscript", False)
self.use_bfloat16 = kwargs.pop("use_bfloat16", False)
self.pruned_heads = kwargs.pop("pruned_heads", {})
self.is_encoder_decoder = kwargs.pop("is_encoder_decoder", False)
self.is_decoder = kwargs.pop("is_decoder", False)
self.add_cross_attention = kwargs.pop("add_cross_attention", False)
self.max_length = kwargs.pop("max_length", 20)
self.min_length = kwargs.pop("min_length", 0)
self.do_sample = kwargs.pop("do_sample", False)
self.early_stopping = kwargs.pop("early_stopping", False)
self.num_beams = kwargs.pop("num_beams", 1)
self.temperature = kwargs.pop("temperature", 1.0)
self.top_k = kwargs.pop("top_k", 50)
self.top_p = kwargs.pop("top_p", 1.0)
self.repetition_penalty = kwargs.pop("repetition_penalty", 1.0)
self.length_penalty = kwargs.pop("length_penalty", 1.0)
self.no_repeat_ngram_size = kwargs.pop("no_repeat_ngram_size", 0)
self.bad_words_ids = kwargs.pop("bad_words_ids", None)
self.num_return_sequences = kwargs.pop("num_return_sequences", 1)
self.chunk_size_feed_forward = kwargs.pop("chunk_size_feed_forward", 0)
self.architectures = kwargs.pop("architectures", None)
self.finetuning_task = kwargs.pop("finetuning_task", None)
self.id2label = kwargs.pop("id2label", None)
self.label2id = kwargs.pop("label2id", None)
if self.id2label is not None:
kwargs.pop("num_labels", None)
self.id2label = dict((int(key), value) for key, value in self.id2label.items())
else:
self.num_labels = kwargs.pop("num_labels", 2)
self.prefix = kwargs.pop("prefix", None)
self.bos_token_id = kwargs.pop("bos_token_id", None)
self.pad_token_id = kwargs.pop("pad_token_id", None)
self.eos_token_id = kwargs.pop("eos_token_id", None)
self.decoder_start_token_id = kwargs.pop("decoder_start_token_id", None)
self.task_specific_params = kwargs.pop("task_specific_params", None)
self.xla_device = kwargs.pop("xla_device", None)
for key, value in kwargs.items():
try:
setattr(self, key, value)
except AttributeError as err:
logger.error("Can't set {} with value {} for {}".format(key, value, self))
raise err
@property
def use_return_dict(self) -> bool:
# If torchscript is set, force `return_dict=False` to avoid jit errors
return self.return_dict and not self.torchscript
@property
def num_labels(self) -> int:
return len(self.id2label)
@num_labels.setter
def num_labels(self, num_labels: int):
self.id2label = {i: "LABEL_{}".format(i) for i in range(num_labels)}
self.label2id = dict(zip(self.id2label.values(), self.id2label.keys()))
def save_pretrained(self, save_directory: str):
if os.path.isfile(save_directory):
raise AssertionError("Provided path ({}) should be a directory, not a file".format(save_directory))
os.makedirs(save_directory, exist_ok=True)
# If we save using the predefined names, we can load using `from_pretrained`
output_config_file = os.path.join(save_directory, CONFIG_NAME)
self.to_json_file(output_config_file, use_diff=True)
logger.info("Configuration saved in {}".format(output_config_file))
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path: str, **kwargs) -> "PretrainedConfig":
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
return cls.from_dict(config_dict, **kwargs)
@classmethod
def get_config_dict(cls, pretrained_model_name_or_path: str, **kwargs) -> Tuple[Dict[str, Any], Dict[str, Any]]:
cache_dir = kwargs.pop("cache_dir", None)
force_download = kwargs.pop("force_download", False)
resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None)
local_files_only = kwargs.pop("local_files_only", False)
if os.path.isdir(pretrained_model_name_or_path):
config_file = os.path.join(pretrained_model_name_or_path, CONFIG_NAME)
elif os.path.isfile(pretrained_model_name_or_path) or is_remote_url(pretrained_model_name_or_path):
config_file = pretrained_model_name_or_path
else:
config_file = hf_bucket_url(pretrained_model_name_or_path, filename=CONFIG_NAME, use_cdn=False)
try:
# Load from URL or cache if already cached
resolved_config_file = cached_path(
config_file,
cache_dir=cache_dir,
force_download=force_download,
proxies=proxies,
resume_download=resume_download,
local_files_only=local_files_only,
)
# Load config dict
if resolved_config_file is None:
raise EnvironmentError
config_dict = cls._dict_from_json_file(resolved_config_file)
except EnvironmentError:
msg = (
f"Can't load config for '{pretrained_model_name_or_path}'. Make sure that:\n\n"
f"- '{pretrained_model_name_or_path}' is a correct model identifier listed on 'https://huggingface.co/models'\n\n"
f"- or '{pretrained_model_name_or_path}' is the correct path to a directory containing a {CONFIG_NAME} file\n\n"
)
raise EnvironmentError(msg)
except json.JSONDecodeError:
msg = (
"Couldn't reach server at '{}' to download configuration file or "
"configuration file is not a valid JSON file. "
"Please check network or file content here: {}.".format(config_file, resolved_config_file)
)
raise EnvironmentError(msg)
if resolved_config_file == config_file:
logger.info("loading configuration file {}".format(config_file))
else:
logger.info("loading configuration file {} from cache at {}".format(config_file, resolved_config_file))
return config_dict, kwargs
@classmethod
def from_dict(cls, config_dict: Dict[str, Any], **kwargs) -> "PretrainedConfig":
return_unused_kwargs = kwargs.pop("return_unused_kwargs", False)
config = cls(**config_dict)
if hasattr(config, "pruned_heads"):
config.pruned_heads = dict((int(key), value) for key, value in config.pruned_heads.items())
# Update config with kwargs if needed
to_remove = []
for key, value in kwargs.items():
if hasattr(config, key):
setattr(config, key, value)
to_remove.append(key)
for key in to_remove:
kwargs.pop(key, None)
logger.info("Model config %s", str(config))
if return_unused_kwargs:
return config, kwargs
else:
return config
@classmethod
def from_json_file(cls, json_file: str) -> "PretrainedConfig":
config_dict = cls._dict_from_json_file(json_file)
return cls(**config_dict)
@classmethod
def _dict_from_json_file(cls, json_file: str):
with open(json_file, "r", encoding="utf-8") as reader:
text = reader.read()
return json.loads(text)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return "{} {}".format(self.__class__.__name__, self.to_json_string())
def to_diff_dict(self) -> Dict[str, Any]:
config_dict = self.to_dict()
# get the default config dict
default_config_dict = PretrainedConfig().to_dict()
serializable_config_dict = {}
# only serialize values that differ from the default config
for key, value in config_dict.items():
if key not in default_config_dict or value != default_config_dict[key]:
serializable_config_dict[key] = value
return serializable_config_dict
def to_dict(self) -> Dict[str, Any]:
output = copy.deepcopy(self.__dict__)
if hasattr(self.__class__, "model_type"):
output["model_type"] = self.__class__.model_type
return output
def to_json_string(self, use_diff: bool = True) -> str:
if use_diff is True:
config_dict = self.to_diff_dict()
else:
config_dict = self.to_dict()
return json.dumps(config_dict, indent=2, sort_keys=True) + "\n"
def to_json_file(self, json_file_path: str, use_diff: bool = True):
with open(json_file_path, "w", encoding="utf-8") as writer:
writer.write(self.to_json_string(use_diff=use_diff))
def update(self, config_dict: Dict[str, Any]):
for key, value in config_dict.items():
setattr(self, key, value)
| true | true |
f71e2c81e27ecbc20c0d52809f200f13303188d9 | 42,451 | py | Python | tests/python/unittest/test_optimizer.py | qingyuanxingsi/incubator-mxnet | bbffdaf29459e15a703055e449a477a9862de8c6 | [
"Apache-2.0"
] | 1 | 2019-11-14T06:43:46.000Z | 2019-11-14T06:43:46.000Z | tests/python/unittest/test_optimizer.py | greenpea0104/incubator-mxnet | fc9e70bf2d349ad4c6cb65ff3f0958e23a7410bf | [
"Apache-2.0"
] | null | null | null | tests/python/unittest/test_optimizer.py | greenpea0104/incubator-mxnet | fc9e70bf2d349ad4c6cb65ff3f0958e23a7410bf | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import mxnet as mx
import mxnet.lr_scheduler as lr_scheduler
from mxnet import gluon
import unittest
from nose.tools import raises
import math
from mxnet.test_utils import *
from common import setup_module, with_seed
@with_seed()
def test_learning_rate():
o1 = mx.optimizer.Optimizer(learning_rate=0.01)
o1.set_learning_rate(0.2)
assert o1.learning_rate == 0.2
lr_s = lr_scheduler.FactorScheduler(step=1)
o2 = mx.optimizer.Optimizer(lr_scheduler=lr_s, learning_rate=0.3)
assert o2.learning_rate == 0.3
o2.lr_scheduler.base_lr = 0.4
assert o2.learning_rate == 0.4
@raises(UserWarning)
@with_seed()
def test_learning_rate_expect_user_warning():
lr_s = lr_scheduler.FactorScheduler(step=1)
o = mx.optimizer.Optimizer(lr_scheduler=lr_s, learning_rate=0.3)
o.set_learning_rate(0.5)
@with_seed()
def test_lr_wd_mult():
data = mx.sym.Variable('data')
bias = mx.sym.Variable('fc1_bias', lr_mult=1.0)
fc1 = mx.sym.FullyConnected(data=data, bias=bias, name='fc1', num_hidden=10, lr_mult=0)
fc2 = mx.sym.FullyConnected(data=fc1, name='fc2', num_hidden=10, wd_mult=0.5)
mod = mx.mod.Module(symbol=fc2, label_names=None, context=default_context())
mod.bind(data_shapes=[('data', (5,10))])
mod.init_params(initializer=mx.init.Uniform(1.0))
mod.init_optimizer(optimizer_params={'learning_rate': 1.0})
args1, _ = mod.get_params()
args1 = {k: v.asnumpy() for k, v in args1.items()}
mod.forward(mx.io.DataBatch(data=[mx.random.uniform(low=-1.0, high=1.0, shape=(5,10))], label=None), is_train=True)
mod.backward(mod.get_outputs())
mod.update()
args2, _ = mod.get_params()
args2 = {k: v.asnumpy() for k, v in args2.items()}
assert mod._optimizer.lr_mult == {'fc1_bias': 1.0, 'fc1_weight': 0.0}
assert mod._optimizer.wd_mult == {'fc2_bias': 0.5, 'fc2_weight': 0.5, 'fc1_bias': 0.0}
assert mx.test_utils.almost_equal(args1['fc1_weight'], args2['fc1_weight'], 1e-10)
assert not mx.test_utils.almost_equal(args1['fc1_bias'], args2['fc1_bias'], 1e-1)
assert not mx.test_utils.almost_equal(args1['fc2_weight'], args2['fc2_weight'], 1e-1)
def compare_ndarray_tuple(t1, t2, rtol=None, atol=None):
if t1 is not None and t2 is not None:
if isinstance(t1, tuple):
for s1, s2 in zip(t1, t2):
compare_ndarray_tuple(s1, s2, rtol, atol)
else:
assert_almost_equal(t1.asnumpy(), t2.asnumpy(), rtol=rtol, atol=atol)
def compare_optimizer(opt1, opt2, shape, dtype, w_stype='default', g_stype='default',
rtol=1e-4, atol=1e-5):
if w_stype == 'default':
w2 = mx.random.uniform(shape=shape, ctx=default_context(), dtype=dtype)
w1 = w2.copyto(default_context())
elif w_stype == 'row_sparse' or w_stype == 'csr':
w2 = rand_ndarray(shape, w_stype, density=1, dtype=dtype)
w1 = w2.copyto(default_context()).tostype('default')
else:
raise Exception("type not supported yet")
if g_stype == 'default':
g2 = mx.random.uniform(shape=shape, ctx=default_context(), dtype=dtype)
g1 = g2.copyto(default_context())
elif g_stype == 'row_sparse' or g_stype == 'csr':
g2 = rand_ndarray(shape, g_stype, dtype=dtype)
g1 = g2.copyto(default_context()).tostype('default')
else:
raise Exception("type not supported yet")
state1 = opt1.create_state_multi_precision(0, w1)
state2 = opt2.create_state_multi_precision(0, w2)
compare_ndarray_tuple(state1, state2)
opt1.update_multi_precision(0, w1, g1, state1)
opt2.update_multi_precision(0, w2, g2, state2)
compare_ndarray_tuple(state1, state2, rtol=rtol, atol=atol)
assert_almost_equal(w1.asnumpy(), w2.asnumpy(), rtol=rtol, atol=atol)
# SGD
class PySGD(mx.optimizer.Optimizer):
"""python reference implemenation of sgd"""
def __init__(self, learning_rate=0.01, momentum=0.0, multi_precision=False, **kwargs):
super(PySGD, self).__init__(learning_rate=learning_rate, **kwargs)
self.momentum = momentum
self.multi_precision = multi_precision
def create_state(self, index, weight):
"""Create additional optimizer state: momentum
Parameters
----------
weight : NDArray
The weight data
"""
momentum = None
weight_master_copy = None
do_multi_precision = self.multi_precision and weight.dtype == np.float16
if do_multi_precision:
if self.momentum != 0.0:
momentum = mx.nd.zeros(weight.shape, weight.context, dtype=np.float32)
weight_master_copy = array(weight, ctx=weight.context, dtype=np.float32)
return (momentum, weight_master_copy)
else:
if self.momentum != 0.0:
momentum = mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)
return momentum
def create_state_multi_precision(self, index, weight):
return self.create_state(index, weight)
def update(self, index, weight, grad, state):
"""Update the parameters.
Parameters
----------
index : int
An unique integer key used to index the parameters
weight : NDArray
weight ndarray
grad : NDArray
grad ndarray
state : NDArray or other objects returned by init_state
The auxiliary state used in optimization.
"""
lr = self._get_lr(index)
wd = self._get_wd(index)
self._update_count(index)
use_multi_precision = isinstance(state, list) or isinstance(state, tuple)
if not use_multi_precision:
if self.momentum == 0.0:
if self.clip_gradient is not None:
weight[:] = ((1 - lr*wd)*weight -
lr*mx.nd.clip(grad*self.rescale_grad, -self.clip_gradient, self.clip_gradient))
else:
weight[:] = (1 - lr*wd)*weight - lr*self.rescale_grad*grad
else:
mom = state
if self.clip_gradient is not None:
mom[:] = (self.momentum*mom - lr*wd*weight -
lr*mx.nd.clip(grad*self.rescale_grad, -self.clip_gradient, self.clip_gradient))
weight += mom
else:
mom[:] = self.momentum*mom - lr*wd*weight - lr*self.rescale_grad*grad
weight += mom
else:
grad32 = array(grad, ctx=grad.context, dtype=np.float32)
mom = state[0]
weight32 = state[1]
if self.momentum == 0.0:
if self.clip_gradient is not None:
weight32[:] = ((1 - lr*wd)*weight32 -
lr*mx.nd.clip(grad32*self.rescale_grad, -self.clip_gradient, self.clip_gradient))
else:
weight32[:] = (1 - lr*wd)*weight32 - lr*self.rescale_grad*grad32
else:
if self.clip_gradient is not None:
mom[:] = (self.momentum*mom - lr*wd*weight32 -
lr*mx.nd.clip(grad32*self.rescale_grad, -self.clip_gradient, self.clip_gradient))
weight32 += mom
else:
mom[:] = self.momentum*mom - lr*wd*weight32 - lr*self.rescale_grad*grad32
weight32 += mom
tmp = weight32.astype(weight.dtype)
tmp.copyto(weight)
def update_multi_precision(self, index, weight, grad, state):
self.update(index, weight, grad, state)
@unittest.skip("Test fails intermittently. Temporarily disabled until fixed. Tracked at https://github.com/apache/incubator-mxnet/issues/9000")
@with_seed()
def test_sgd():
opt1 = PySGD
opt2 = mx.optimizer.SGD
shape = (3, 4, 5)
mom_options = [{}, {'momentum': 0.9}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}]
for dtype in [np.float16, np.float32, np.float64]:
for mom_option in mom_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in mp_options:
kwarg = {}
kwarg.update(mom_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
if (dtype == np.float16 and
('multi_precision' not in kwarg or
not kwarg['multi_precision'])):
continue
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
# test operator fallback on cpu
if (default_context() == mx.cpu()):
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype,
g_stype='row_sparse')
if dtype != np.float16:
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape[:2],
dtype, w_stype='csr', g_stype='csr')
# test optimizer with a big shape
big_shape = (54686454, 1)
kwarg = {'momentum': 0.9, 'wd': 0.05}
compare_optimizer(opt1(**kwarg), opt2(**kwarg), big_shape, np.float32)
class PySparseSGD(mx.optimizer.Optimizer):
"""python reference implemenation of sgd"""
def __init__(self, learning_rate=0.01, momentum=0.0, **kwargs):
super(PySparseSGD, self).__init__(learning_rate=learning_rate, **kwargs)
self.momentum = momentum
def create_state(self, index, weight):
"""Create additional optimizer state: momentum
Parameters
----------
weight : NDArray
The weight data
"""
if self.momentum == 0.0:
return None
else:
return mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)
def update(self, index, weight, grad, state):
"""Update the parameters.
Parameters
----------
index : int
An unique integer key used to index the parameters
weight : NDArray
weight ndarray
grad : NDArray
grad ndarray
state : NDArray or other objects returned by init_state
The auxiliary state used in optimization.
"""
lr = self._get_lr(index)
wd = self._get_wd(index)
self._update_count(index)
num_rows = weight.shape[0]
if self.momentum == 0.0:
# Update on a per row basis, skip all-zero rows
for row in range(num_rows):
grad_row = grad[row].asnumpy()
all_zeros = mx.test_utils.almost_equal(grad_row, np.zeros_like(grad_row))
if all_zeros:
continue
if self.clip_gradient is not None:
weight[row] = ((1 - lr*wd)*weight[row] -
lr*mx.nd.clip(grad[row]*self.rescale_grad,
-self.clip_gradient, self.clip_gradient))
else:
weight[row] = (1 - lr*wd)*weight[row] - lr*self.rescale_grad*grad[row]
else:
mom = state
for row in range(num_rows):
grad_row = grad[row].asnumpy()
all_zeros = mx.test_utils.almost_equal(grad_row, np.zeros_like(grad_row))
if all_zeros:
continue
if self.clip_gradient is not None:
mom[row] = (self.momentum*mom[row] - lr*wd*weight[row] -
lr*mx.nd.clip(grad[row]*self.rescale_grad, -self.clip_gradient, self.clip_gradient))
weight[row] += mom[row]
else:
mom[row] = self.momentum*mom[row] - lr*wd*weight[row] - lr*self.rescale_grad*grad[row]
weight[row] += mom[row]
@with_seed()
def test_sparse_sgd():
opt1 = PySparseSGD
opt2 = mx.optimizer.SGD
shape = (3, 4, 5)
mom_options = [{}, {'momentum': 0.9}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}]
for dtype in [np.float32]:
for mom_option in mom_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in mp_options:
kwarg = {}
kwarg.update(mom_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype,
w_stype='row_sparse', g_stype='row_sparse')
@with_seed(0)
def test_std_sparse_sgd():
opt1 = PySGD
opt2 = mx.optimizer.SGD
shape = (3, 4, 5)
mom_options = [{'momentum': 0.9}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
for dtype in [np.float32]:
for mom_option in mom_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
kwarg = {}
kwarg.update(mom_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
compare_optimizer(opt1(**kwarg), opt2(lazy_update=False, **kwarg), shape, dtype,
w_stype='row_sparse', g_stype='row_sparse')
class PyNAG(PySGD):
def __init__(self, **kwargs):
super(PyNAG, self).__init__(**kwargs)
def create_state(self, index, weight):
"""Create additional optimizer state: momentum
Parameters
----------
weight : NDArray
The weight data
"""
momentum = None
weight_master_copy = None
do_multi_precision = self.multi_precision and weight.dtype == np.float16
if do_multi_precision:
if self.momentum != 0.0:
momentum = mx.nd.zeros(weight.shape, weight.context, dtype=np.float32)
weight_master_copy = array(weight, ctx=weight.context, dtype=np.float32)
return (weight_master_copy, momentum)
else:
if self.momentum != 0.0:
momentum = mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)
return momentum
def create_state_multi_precision(self, index, weight):
return self.create_state(index, weight)
def update(self, index, weight, grad, state):
"""Update the parameters.
Parameters
----------
index : int
An unique integer key used to index the parameters
weight : NDArray
weight ndarray
grad : NDArray
grad ndarray
state : NDArray or other objects returned by init_state
The auxiliary state used in optimization.
"""
lr = self._get_lr(index)
wd = self._get_wd(index)
self._update_count(index)
use_multi_precision = isinstance(state, list) or isinstance(state, tuple)
if not use_multi_precision:
grad = grad * self.rescale_grad
if self.clip_gradient is not None:
grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient)
if self.momentum == 0.0:
weight[:] += -lr * (grad + wd * weight)
else:
mom = state
mom[:] *= self.momentum
grad += wd * weight
mom[:] += grad
grad[:] += self.momentum * mom
weight[:] += -lr * grad
else:
grad32 = array(grad, ctx=grad.context, dtype=np.float32)
grad32 = grad32 * self.rescale_grad
if self.clip_gradient is not None:
grad32 = mx.nd.clip(grad32, -self.clip_gradient, self.clip_gradient)
mom = state[1]
weight32 = state[0]
if self.momentum == 0.0:
weight32[:] += -lr * (grad32 + wd * weight32)
else:
mom[:] *= self.momentum
grad32 += wd * weight32
mom[:] += grad32
grad32[:] += self.momentum * mom
weight32[:] += -lr * grad32
tmp = weight32.astype(weight.dtype)
tmp.copyto(weight)
@with_seed(0)
def test_nag():
opt1 = PyNAG
opt2 = mx.optimizer.NAG
shape = (3, 4, 5)
mom_options = [{}, {'momentum': 0.9}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}]
for dtype in [np.float16, np.float32, np.float64]:
for mom_option in mom_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in mp_options:
kwarg = {}
kwarg.update(mom_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
if (dtype == np.float16 and
('multi_precision' not in kwarg or
not kwarg['multi_precision'])):
continue
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
# FTML
class PyFTML(mx.optimizer.Optimizer):
"""python reference implemenation of FTML"""
def __init__(self, beta1=0.6, beta2=0.999, epsilon=1e-8, **kwargs):
super(PyFTML, self).__init__(**kwargs)
self.beta1 = beta1
self.beta2 = beta2
self.epsilon = epsilon
def create_state(self, index, weight):
return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype), # d_0
mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype), # v_0
mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)) # z_0
def update(self, index, weight, grad, state):
assert(isinstance(weight, mx.nd. NDArray))
assert(isinstance(grad, mx.nd.NDArray))
self._update_count(index)
lr = self._get_lr(index)
wd = self._get_wd(index)
t = self._index_update_count[index]
grad = grad * self.rescale_grad + wd * weight
if self.clip_gradient is not None:
grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient)
# get previous states
prev_d, prev_v, prev_z = state
# compute states
v_t = self.beta2 * prev_v + (1 - self.beta2) * mx.nd.square(grad)
d_t = (1 - pow(self.beta1, t)) / lr * (mx.nd.sqrt(v_t / (1 - pow(self.beta2, t))) + self.epsilon)
sigma_t = d_t - self.beta1 * prev_d
z_t = self.beta1 * prev_z + (1 - self.beta1) * grad - sigma_t * weight
# update weight
weight[:] = - z_t / d_t
# update states
prev_d[:] = d_t
prev_v[:] = v_t
prev_z[:] = z_t
@with_seed(0)
def test_ftml():
opt1 = PyFTML
opt2 = mx.optimizer.FTML
shape = (3, 4, 5)
beta1_options = [{}, {'beta1': 0.5}, {'beta1': 0.7}]
beta2_options = [{}, {'beta2': 0.8}, {'beta2': 0.9}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
for dtype in [np.float32]:
for beta1_option in beta1_options:
for beta2_option in beta2_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
kwarg = {}
kwarg.update(beta1_option)
kwarg.update(beta2_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
# ADAM
class PyAdam(mx.optimizer.Optimizer):
"""python reference implemenation of adam"""
def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8,
decay_factor=(1 - 1e-8), sparse_update=False, **kwargs):
super(PyAdam, self).__init__(learning_rate=learning_rate, **kwargs)
self.beta1 = beta1
self.beta2 = beta2
self.epsilon = epsilon
self.decay_factor = decay_factor
self.sparse_update = sparse_update
def create_state(self, index, weight):
"""Create additional optimizer state: mean, variance
Parameters
----------
weight : NDArray
The weight data
"""
return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype), # mean
mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)) # variance
def update(self, index, weight, grad, state):
"""Update the parameters.
Parameters
----------
index : int
An unique integer key used to index the parameters
weight : NDArray
weight ndarray
grad : NDArray
grad ndarray
state : NDArray or other objects returned by init_state
The auxiliary state used in optimization.
"""
lr = self._get_lr(index)
self._update_count(index)
t = self._index_update_count[index]
mean, variance = state
wd = self._get_wd(index)
num_rows = weight.shape[0]
coef1 = 1. - self.beta1**t
coef2 = 1. - self.beta2**t
lr *= math.sqrt(coef2)/coef1
for row in range(num_rows):
# check row slices of all zeros
all_zeros = mx.test_utils.almost_equal(grad[row].asnumpy(), np.zeros_like(grad[row].asnumpy()))
# skip zeros during sparse update
if all_zeros and self.sparse_update:
continue
grad[row] = grad[row] * self.rescale_grad + wd * weight[row]
# clip gradients
if self.clip_gradient is not None:
mx.nd.clip(grad[row], -self.clip_gradient, self.clip_gradient, out=grad[row])
# update mean
mean[row] *= self.beta1
mean[row] += grad[row] * (1. - self.beta1)
# update variance
variance[row] *= self.beta2
variance[row] += (1 - self.beta2) * mx.nd.square(grad[row], out=grad[row])
# update weight
weight[row] -= lr*mean[row]/(mx.nd.sqrt(variance[row]) + self.epsilon)
@with_seed()
def test_adam():
opt1 = PyAdam
opt2 = mx.optimizer.Adam
shape = (3, 4, 5)
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}]
for dtype in [np.float16, np.float32, np.float64]:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in mp_options:
kwarg = {}
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
if (dtype == np.float16 and
('multi_precision' not in kwarg or
not kwarg['multi_precision'])):
continue
# atol 2e-5 needed to pass with seed 1248389097
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype,
rtol=1e-4, atol=2e-5)
# atol 2e-5 needed to pass with seed 781809840
compare_optimizer(opt1(sparse_update=True, **kwarg), opt2(**kwarg), shape,
dtype, w_stype='row_sparse', g_stype='row_sparse',
rtol=1e-4, atol=2e-5)
compare_optimizer(opt1(**kwarg), opt2(lazy_update=False, **kwarg), shape,
dtype, w_stype='row_sparse', g_stype='row_sparse',
rtol=1e-4, atol=2e-5)
# Signum
class PySignum(mx.optimizer.Optimizer):
"""The python reference of Signum optimizer.
The optimizer updates the weight by:
rescaled_grad = rescale_grad * clip(grad, clip_gradient) + wd * weight
state = momentum * state + (1-momentum)*rescaled_grad
weight = (1 - lr * wd_lh) * weight - lr * sign(state)
See the original paper at: https://jeremybernste.in/projects/amazon/signum.pdf
For details of the update algorithm see
:class:`~mxnet.ndarray.signsgd_update` and :class:`~mxnet.ndarray.signum_update`.
This optimizer accepts the following parameters in addition to those accepted
by :class:`.Optimizer`.
Parameters
----------
momentum : float, optional
The momentum value.
wd_lh : float, optitional
The amount of decoupled weight decay regularization.
"""
def __init__(self, learning_rate=0.01, momentum=0.9, wd_lh = 0.0, **kwargs):
super(PySignum, self).__init__(learning_rate = learning_rate, **kwargs)
self.momentum = momentum
self.wd_lh = wd_lh
def create_state(self, index, weight):
momentum = None
if self.momentum != 0.0:
momentum = mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype, stype=weight.stype)
return momentum
def update(self, index, weight, grad, state):
self._update_count(index)
lr = self._get_lr(index)
wd = self._get_wd(index)
if state is not None:
mom = state
if self.clip_gradient is not None:
mom[:] = (self.momentum*mom - (1-self.momentum)*(wd*weight +
mx.nd.clip(grad*self.rescale_grad, -self.clip_gradient, self.clip_gradient)))
else:
mom[:] = self.momentum*mom - (1-self.momentum)*wd*weight - (1-self.momentum)*self.rescale_grad*grad
weight[:] = (1 - lr*self.wd_lh)*weight + lr*mx.nd.sign(mom)
else:
weight[:] = (1 - lr*(wd+self.wd_lh))*weight - lr*mx.nd.sign(grad)
@with_seed(0)
def test_signum():
opt1 = PySignum
opt2 = mx.optimizer.Signum
shape = (3, 4, 5)
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
wd_lh_options = [{}, {'wd_lh': 0.015}, {'wd_lh': 0.0}]
mom_options = [{}, {'momentum': 0.9}]
lr_options = [{'learning_rate': 0.05},{'learning_rate': 0.01}]
for dtype in [np.float32, np.float64]:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in wd_lh_options:
for lr_option in lr_options:
for mom_option in mom_options:
kwarg = {}
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
kwarg.update(lr_option)
kwarg.update(mom_option)
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
# RMSProp
class PyRMSProp(mx.optimizer.Optimizer):
"""RMSProp optimizer of Tieleman & Hinton, 2012,
For centered=False, the code follows the version in
http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf by
Tieleman & Hinton, 2012
For centered=True, the code follows the version in
http://arxiv.org/pdf/1308.0850v5.pdf Eq(38) - Eq(45) by Alex Graves, 2013.
Parameters
----------
learning_rate : float, optional
Step size.
Default value is set to 0.001.
gamma1: float, optional
decay factor of moving average for gradient, gradient^2.
Default value is set to 0.9.
gamma2: float, optional
"momentum" factor.
Default value if set to 0.9.
Only used if centered=True
epsilon : float, optional
Default value is set to 1e-8.
centered : boolean, optional
Use Graves or Tielemans & Hintons version of RMSProp
wd : float, optional
L2 regularization coefficient add to all the weights
rescale_grad : float, optional
rescaling factor of gradient.
clip_gradient : float, optional
clip gradient in range [-clip_gradient, clip_gradient]
clip_weights : float, optional
clip weights in range [-clip_weights, clip_weights]
"""
def __init__(self, learning_rate=0.001, gamma1=0.9, gamma2=0.9,
epsilon=1e-8, centered=False, clip_weights=None, **kwargs):
super(PyRMSProp, self).__init__(learning_rate=learning_rate, **kwargs)
self.centered = centered
self.gamma1 = gamma1
self.gamma2 = gamma2
self.epsilon = epsilon
self.clip_weights = clip_weights
def create_state(self, index, weight):
"""Create additional optimizer state.
For centered=False: n
For centered=True: n, g, delta
Parameters
----------
weight : NDArray
The weight data
"""
if self.centered:
return (mx.nd.zeros(weight.shape, weight.context), # n
mx.nd.zeros(weight.shape, weight.context), # g
mx.nd.zeros(weight.shape, weight.context)) # delta
else:
return (mx.nd.zeros(weight.shape, weight.context), ) # n
def update(self, index, weight, grad, state):
"""Update the parameters.
Parameters
----------
index : int
An unique integer key used to index the parameters
weight : NDArray
weight ndarray
grad : NDArray
grad ndarray
state : NDArray or other objects returned by init_state
The auxiliary state used in optimization.
"""
lr = self._get_lr(index)
wd = self._get_wd(index)
self._update_count(index)
grad = grad * self.rescale_grad + wd * weight
if not self.centered:
(n, ) = state
if self.clip_gradient is not None:
grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient)
n[:] = (1 - self.gamma1) * (grad * grad) + self.gamma1 * n
weight[:] -= lr * grad/(mx.nd.sqrt(n + self.epsilon))
else:
n, g, delta = state
if self.clip_gradient is not None:
grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient)
n[:] = (1 - self.gamma1) * (grad * grad) + self.gamma1 * n
g[:] = (1 - self.gamma1) * grad + self.gamma1 * g
delta[:] = (self.gamma2) * delta - lr * grad/(mx.nd.sqrt(n - g*g + self.epsilon))
weight[:] += delta
if self.clip_weights:
mx.ndarray.clip(weight, -self.clip_weights, self.clip_weights, out=weight)
@unittest.skip("Test fails intermittently. Temporarily disabled until fixed. Tracked at https://github.com/apache/incubator-mxnet/issues/8230")
@with_seed(0)
def test_rms():
opt1 = PyRMSProp
opt2 = mx.optimizer.RMSProp
shape = (3, 4, 5)
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
cw_options = [{}, {'clip_weights': 0.01}]
center_options = [{}, {'centered': False}, {'centered': True}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}]
for dtype in [np.float16, np.float32]:
for cw_option in cw_options:
for cg_option in cg_options:
for center_option in center_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in mp_options:
kwarg = {}
kwarg.update(cw_option)
kwarg.update(cg_option)
kwarg.update(center_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
if (dtype == np.float16 and
('multi_precision' not in kwarg or
not kwarg['multi_precision'])):
continue
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
if (default_context() == mx.cpu()):
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype, g_stype='row_sparse')
class PyFtrl(mx.optimizer.Optimizer):
"""The Ftrl optimizer.
Referenced from *Ad Click Prediction: a View from the Trenches*, available at
http://dl.acm.org/citation.cfm?id=2488200.
Parameters
----------
lamda1 : float, optional
L1 regularization coefficient.
learning_rate : float, optional
The initial learning rate.
beta : float, optional
Per-coordinate learning rate correlation parameter.
eta :
.. math::
\\eta_{t,i} = \\frac{learningrate}{\\beta+\\sqrt{\\sum_{s=1}^tg_{s,i}^t}}
"""
def __init__(self, lamda1=0.01, learning_rate=0.1, beta=1, sparse_update=False, **kwargs):
super(PyFtrl, self).__init__(**kwargs)
self.lamda1 = lamda1
self.beta = beta
self.lr = learning_rate
self.sparse_update = sparse_update
def create_state(self, index, weight):
return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype), # dn
mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)) # n
def update(self, index, weight, grad, state):
self._update_count(index)
wd = self._get_wd(index)
lr = self._get_lr(index)
num_rows = weight.shape[0]
dn, n = state
for row in range(num_rows):
all_zeros = mx.test_utils.almost_equal(grad[row].asnumpy(), np.zeros_like(grad[row].asnumpy()))
if all_zeros and self.sparse_update:
continue
grad[row] = grad[row] * self.rescale_grad
if self.clip_gradient is not None:
mx.nd.clip(grad[row], -self.clip_gradient, self.clip_gradient, out=grad[row])
#update dn, n
dn[row] += grad[row] - (mx.nd.sqrt(n[row] + grad[row] * grad[row]) - mx.nd.sqrt(n[row])) * weight[row] / lr
n[row] += grad[row] * grad[row]
# update weight
weight[row] = (mx.nd.sign(dn[row]) * self.lamda1 - dn[row]) / \
((self.beta + mx.nd.sqrt(n[row])) / lr + wd) * (mx.nd.abs(dn[row]) > self.lamda1)
@with_seed()
def test_ftrl():
opt1 = PyFtrl
opt2 = mx.optimizer.Ftrl
shape = (3, 4, 5)
kwargs = [{},
{'clip_gradient': 0.5},
{'clip_gradient': 0.4, 'rescale_grad': 0.14},
{'rescale_grad': 0.8},
{'clip_gradient': 0.5, 'wd': 0.07},
{'clip_gradient': 0.4, 'rescale_grad': 0.14, 'wd': 0.03},
{'rescale_grad': 0.8, 'wd': 0.05},
{'rescale_grad': 0.8, 'wd': 0.05, 'lamda1': 0.01},
{'clip_gradient': 0.5, 'wd': 0.07, 'lamda1': 1.0}]
for kwarg in kwargs:
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, np.float32)
compare_optimizer(opt1(sparse_update=True, **kwarg), opt2(**kwarg), shape,
np.float32, w_stype='row_sparse', g_stype='row_sparse')
@with_seed(1234)
def test_nadam():
def get_net(num_hidden, flatten=True):
data = mx.symbol.Variable('data')
fc1 = mx.symbol.FullyConnected(data, name='fc1', num_hidden=128, flatten=flatten)
act1 = mx.symbol.Activation(fc1, name='relu1', act_type="relu")
fc2 = mx.symbol.FullyConnected(act1, name = 'fc2', num_hidden = 64, flatten=flatten)
act2 = mx.symbol.Activation(fc2, name='relu2', act_type="relu")
fc3 = mx.symbol.FullyConnected(act2, name='fc3', num_hidden=num_hidden, flatten=flatten)
return fc3
N = 20
data = mx.random.uniform(-1, 1, shape=(N, 10))
label = mx.random.uniform(-1, 1, shape=(N, 1))
data_iter = mx.io.NDArrayIter(data, label, batch_size=5, label_name='label', shuffle=True)
output = get_net(1)
l = mx.symbol.Variable('label')
Loss = gluon.loss.L1Loss()
loss = Loss(output, l)
loss = mx.sym.make_loss(loss)
mod = mx.mod.Module(loss, data_names=('data',), label_names=('label',))
mod.fit(data_iter, num_epoch=60, optimizer_params={'learning_rate': 0.0005, 'wd': 0.0005},
initializer=mx.init.Xavier(magnitude=2), eval_metric=mx.metric.Loss(),
optimizer='nadam')
assert mod.score(data_iter, eval_metric=mx.metric.Loss())[0][1] < 0.1
# AdaGrad
class PyAdaGrad(mx.optimizer.Optimizer):
"""The python reference of AdaGrad optimizer.
This class implements the AdaGrad optimizer described in *Adaptive Subgradient
Methods for Online Learning and Stochastic Optimization*, and available at
http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf.
Updates are applied by::
rescaled_grad = clip(grad * rescale_grad + wd * weight, clip_gradient)
history = history + square(rescaled_grad)
w = w - learning_rate * rescaled_grad / sqrt(history + epsilon)
This optimizer accepts the following parameters in addition to those accepted
by :class:`.Optimizer`.
Parameters
----------
eps: float, optional
Small value to avoid division by 0.
"""
def __init__(self, eps=1e-7, **kwargs):
super(PyAdaGrad, self).__init__(**kwargs)
self.float_stable_eps = eps
def create_state(self, index, weight):
return mx.nd.zeros(weight.shape, weight.context, stype=weight.stype)
def update(self, index, weight, grad, state):
self._update_count(index)
lr = self._get_lr(index)
wd = self._get_wd(index)
history = state
grad = grad * self.rescale_grad
if self.clip_gradient is not None:
grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient)
history[:] += mx.nd.square(grad)
div = grad / mx.nd.sqrt(history + self.float_stable_eps)
weight[:] += (div + weight * wd) * -lr
def test_adagrad():
mx.random.seed(0)
opt1 = PyAdaGrad
opt2 = mx.optimizer.AdaGrad
shape = (3, 4, 5)
eps_options = [{}, {'eps': 1e-8}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.0}]
for dtype in [np.float32]:
for eps_option in eps_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
kwarg = {}
kwarg.update(eps_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
if wd_option.get('wd', 0.0) == 0.0:
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype,
w_stype='row_sparse', g_stype='row_sparse')
if __name__ == '__main__':
import nose
nose.runmodule()
| 40.896917 | 143 | 0.562649 |
import numpy as np
import mxnet as mx
import mxnet.lr_scheduler as lr_scheduler
from mxnet import gluon
import unittest
from nose.tools import raises
import math
from mxnet.test_utils import *
from common import setup_module, with_seed
@with_seed()
def test_learning_rate():
o1 = mx.optimizer.Optimizer(learning_rate=0.01)
o1.set_learning_rate(0.2)
assert o1.learning_rate == 0.2
lr_s = lr_scheduler.FactorScheduler(step=1)
o2 = mx.optimizer.Optimizer(lr_scheduler=lr_s, learning_rate=0.3)
assert o2.learning_rate == 0.3
o2.lr_scheduler.base_lr = 0.4
assert o2.learning_rate == 0.4
@raises(UserWarning)
@with_seed()
def test_learning_rate_expect_user_warning():
lr_s = lr_scheduler.FactorScheduler(step=1)
o = mx.optimizer.Optimizer(lr_scheduler=lr_s, learning_rate=0.3)
o.set_learning_rate(0.5)
@with_seed()
def test_lr_wd_mult():
data = mx.sym.Variable('data')
bias = mx.sym.Variable('fc1_bias', lr_mult=1.0)
fc1 = mx.sym.FullyConnected(data=data, bias=bias, name='fc1', num_hidden=10, lr_mult=0)
fc2 = mx.sym.FullyConnected(data=fc1, name='fc2', num_hidden=10, wd_mult=0.5)
mod = mx.mod.Module(symbol=fc2, label_names=None, context=default_context())
mod.bind(data_shapes=[('data', (5,10))])
mod.init_params(initializer=mx.init.Uniform(1.0))
mod.init_optimizer(optimizer_params={'learning_rate': 1.0})
args1, _ = mod.get_params()
args1 = {k: v.asnumpy() for k, v in args1.items()}
mod.forward(mx.io.DataBatch(data=[mx.random.uniform(low=-1.0, high=1.0, shape=(5,10))], label=None), is_train=True)
mod.backward(mod.get_outputs())
mod.update()
args2, _ = mod.get_params()
args2 = {k: v.asnumpy() for k, v in args2.items()}
assert mod._optimizer.lr_mult == {'fc1_bias': 1.0, 'fc1_weight': 0.0}
assert mod._optimizer.wd_mult == {'fc2_bias': 0.5, 'fc2_weight': 0.5, 'fc1_bias': 0.0}
assert mx.test_utils.almost_equal(args1['fc1_weight'], args2['fc1_weight'], 1e-10)
assert not mx.test_utils.almost_equal(args1['fc1_bias'], args2['fc1_bias'], 1e-1)
assert not mx.test_utils.almost_equal(args1['fc2_weight'], args2['fc2_weight'], 1e-1)
def compare_ndarray_tuple(t1, t2, rtol=None, atol=None):
if t1 is not None and t2 is not None:
if isinstance(t1, tuple):
for s1, s2 in zip(t1, t2):
compare_ndarray_tuple(s1, s2, rtol, atol)
else:
assert_almost_equal(t1.asnumpy(), t2.asnumpy(), rtol=rtol, atol=atol)
def compare_optimizer(opt1, opt2, shape, dtype, w_stype='default', g_stype='default',
rtol=1e-4, atol=1e-5):
if w_stype == 'default':
w2 = mx.random.uniform(shape=shape, ctx=default_context(), dtype=dtype)
w1 = w2.copyto(default_context())
elif w_stype == 'row_sparse' or w_stype == 'csr':
w2 = rand_ndarray(shape, w_stype, density=1, dtype=dtype)
w1 = w2.copyto(default_context()).tostype('default')
else:
raise Exception("type not supported yet")
if g_stype == 'default':
g2 = mx.random.uniform(shape=shape, ctx=default_context(), dtype=dtype)
g1 = g2.copyto(default_context())
elif g_stype == 'row_sparse' or g_stype == 'csr':
g2 = rand_ndarray(shape, g_stype, dtype=dtype)
g1 = g2.copyto(default_context()).tostype('default')
else:
raise Exception("type not supported yet")
state1 = opt1.create_state_multi_precision(0, w1)
state2 = opt2.create_state_multi_precision(0, w2)
compare_ndarray_tuple(state1, state2)
opt1.update_multi_precision(0, w1, g1, state1)
opt2.update_multi_precision(0, w2, g2, state2)
compare_ndarray_tuple(state1, state2, rtol=rtol, atol=atol)
assert_almost_equal(w1.asnumpy(), w2.asnumpy(), rtol=rtol, atol=atol)
class PySGD(mx.optimizer.Optimizer):
def __init__(self, learning_rate=0.01, momentum=0.0, multi_precision=False, **kwargs):
super(PySGD, self).__init__(learning_rate=learning_rate, **kwargs)
self.momentum = momentum
self.multi_precision = multi_precision
def create_state(self, index, weight):
momentum = None
weight_master_copy = None
do_multi_precision = self.multi_precision and weight.dtype == np.float16
if do_multi_precision:
if self.momentum != 0.0:
momentum = mx.nd.zeros(weight.shape, weight.context, dtype=np.float32)
weight_master_copy = array(weight, ctx=weight.context, dtype=np.float32)
return (momentum, weight_master_copy)
else:
if self.momentum != 0.0:
momentum = mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)
return momentum
def create_state_multi_precision(self, index, weight):
return self.create_state(index, weight)
def update(self, index, weight, grad, state):
lr = self._get_lr(index)
wd = self._get_wd(index)
self._update_count(index)
use_multi_precision = isinstance(state, list) or isinstance(state, tuple)
if not use_multi_precision:
if self.momentum == 0.0:
if self.clip_gradient is not None:
weight[:] = ((1 - lr*wd)*weight -
lr*mx.nd.clip(grad*self.rescale_grad, -self.clip_gradient, self.clip_gradient))
else:
weight[:] = (1 - lr*wd)*weight - lr*self.rescale_grad*grad
else:
mom = state
if self.clip_gradient is not None:
mom[:] = (self.momentum*mom - lr*wd*weight -
lr*mx.nd.clip(grad*self.rescale_grad, -self.clip_gradient, self.clip_gradient))
weight += mom
else:
mom[:] = self.momentum*mom - lr*wd*weight - lr*self.rescale_grad*grad
weight += mom
else:
grad32 = array(grad, ctx=grad.context, dtype=np.float32)
mom = state[0]
weight32 = state[1]
if self.momentum == 0.0:
if self.clip_gradient is not None:
weight32[:] = ((1 - lr*wd)*weight32 -
lr*mx.nd.clip(grad32*self.rescale_grad, -self.clip_gradient, self.clip_gradient))
else:
weight32[:] = (1 - lr*wd)*weight32 - lr*self.rescale_grad*grad32
else:
if self.clip_gradient is not None:
mom[:] = (self.momentum*mom - lr*wd*weight32 -
lr*mx.nd.clip(grad32*self.rescale_grad, -self.clip_gradient, self.clip_gradient))
weight32 += mom
else:
mom[:] = self.momentum*mom - lr*wd*weight32 - lr*self.rescale_grad*grad32
weight32 += mom
tmp = weight32.astype(weight.dtype)
tmp.copyto(weight)
def update_multi_precision(self, index, weight, grad, state):
self.update(index, weight, grad, state)
@unittest.skip("Test fails intermittently. Temporarily disabled until fixed. Tracked at https://github.com/apache/incubator-mxnet/issues/9000")
@with_seed()
def test_sgd():
opt1 = PySGD
opt2 = mx.optimizer.SGD
shape = (3, 4, 5)
mom_options = [{}, {'momentum': 0.9}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}]
for dtype in [np.float16, np.float32, np.float64]:
for mom_option in mom_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in mp_options:
kwarg = {}
kwarg.update(mom_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
if (dtype == np.float16 and
('multi_precision' not in kwarg or
not kwarg['multi_precision'])):
continue
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
if (default_context() == mx.cpu()):
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype,
g_stype='row_sparse')
if dtype != np.float16:
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape[:2],
dtype, w_stype='csr', g_stype='csr')
big_shape = (54686454, 1)
kwarg = {'momentum': 0.9, 'wd': 0.05}
compare_optimizer(opt1(**kwarg), opt2(**kwarg), big_shape, np.float32)
class PySparseSGD(mx.optimizer.Optimizer):
def __init__(self, learning_rate=0.01, momentum=0.0, **kwargs):
super(PySparseSGD, self).__init__(learning_rate=learning_rate, **kwargs)
self.momentum = momentum
def create_state(self, index, weight):
if self.momentum == 0.0:
return None
else:
return mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)
def update(self, index, weight, grad, state):
lr = self._get_lr(index)
wd = self._get_wd(index)
self._update_count(index)
num_rows = weight.shape[0]
if self.momentum == 0.0:
for row in range(num_rows):
grad_row = grad[row].asnumpy()
all_zeros = mx.test_utils.almost_equal(grad_row, np.zeros_like(grad_row))
if all_zeros:
continue
if self.clip_gradient is not None:
weight[row] = ((1 - lr*wd)*weight[row] -
lr*mx.nd.clip(grad[row]*self.rescale_grad,
-self.clip_gradient, self.clip_gradient))
else:
weight[row] = (1 - lr*wd)*weight[row] - lr*self.rescale_grad*grad[row]
else:
mom = state
for row in range(num_rows):
grad_row = grad[row].asnumpy()
all_zeros = mx.test_utils.almost_equal(grad_row, np.zeros_like(grad_row))
if all_zeros:
continue
if self.clip_gradient is not None:
mom[row] = (self.momentum*mom[row] - lr*wd*weight[row] -
lr*mx.nd.clip(grad[row]*self.rescale_grad, -self.clip_gradient, self.clip_gradient))
weight[row] += mom[row]
else:
mom[row] = self.momentum*mom[row] - lr*wd*weight[row] - lr*self.rescale_grad*grad[row]
weight[row] += mom[row]
@with_seed()
def test_sparse_sgd():
opt1 = PySparseSGD
opt2 = mx.optimizer.SGD
shape = (3, 4, 5)
mom_options = [{}, {'momentum': 0.9}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}]
for dtype in [np.float32]:
for mom_option in mom_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in mp_options:
kwarg = {}
kwarg.update(mom_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype,
w_stype='row_sparse', g_stype='row_sparse')
@with_seed(0)
def test_std_sparse_sgd():
opt1 = PySGD
opt2 = mx.optimizer.SGD
shape = (3, 4, 5)
mom_options = [{'momentum': 0.9}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
for dtype in [np.float32]:
for mom_option in mom_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
kwarg = {}
kwarg.update(mom_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
compare_optimizer(opt1(**kwarg), opt2(lazy_update=False, **kwarg), shape, dtype,
w_stype='row_sparse', g_stype='row_sparse')
class PyNAG(PySGD):
def __init__(self, **kwargs):
super(PyNAG, self).__init__(**kwargs)
def create_state(self, index, weight):
momentum = None
weight_master_copy = None
do_multi_precision = self.multi_precision and weight.dtype == np.float16
if do_multi_precision:
if self.momentum != 0.0:
momentum = mx.nd.zeros(weight.shape, weight.context, dtype=np.float32)
weight_master_copy = array(weight, ctx=weight.context, dtype=np.float32)
return (weight_master_copy, momentum)
else:
if self.momentum != 0.0:
momentum = mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)
return momentum
def create_state_multi_precision(self, index, weight):
return self.create_state(index, weight)
def update(self, index, weight, grad, state):
lr = self._get_lr(index)
wd = self._get_wd(index)
self._update_count(index)
use_multi_precision = isinstance(state, list) or isinstance(state, tuple)
if not use_multi_precision:
grad = grad * self.rescale_grad
if self.clip_gradient is not None:
grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient)
if self.momentum == 0.0:
weight[:] += -lr * (grad + wd * weight)
else:
mom = state
mom[:] *= self.momentum
grad += wd * weight
mom[:] += grad
grad[:] += self.momentum * mom
weight[:] += -lr * grad
else:
grad32 = array(grad, ctx=grad.context, dtype=np.float32)
grad32 = grad32 * self.rescale_grad
if self.clip_gradient is not None:
grad32 = mx.nd.clip(grad32, -self.clip_gradient, self.clip_gradient)
mom = state[1]
weight32 = state[0]
if self.momentum == 0.0:
weight32[:] += -lr * (grad32 + wd * weight32)
else:
mom[:] *= self.momentum
grad32 += wd * weight32
mom[:] += grad32
grad32[:] += self.momentum * mom
weight32[:] += -lr * grad32
tmp = weight32.astype(weight.dtype)
tmp.copyto(weight)
@with_seed(0)
def test_nag():
opt1 = PyNAG
opt2 = mx.optimizer.NAG
shape = (3, 4, 5)
mom_options = [{}, {'momentum': 0.9}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}]
for dtype in [np.float16, np.float32, np.float64]:
for mom_option in mom_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in mp_options:
kwarg = {}
kwarg.update(mom_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
if (dtype == np.float16 and
('multi_precision' not in kwarg or
not kwarg['multi_precision'])):
continue
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
class PyFTML(mx.optimizer.Optimizer):
def __init__(self, beta1=0.6, beta2=0.999, epsilon=1e-8, **kwargs):
super(PyFTML, self).__init__(**kwargs)
self.beta1 = beta1
self.beta2 = beta2
self.epsilon = epsilon
def create_state(self, index, weight):
return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype),
mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype),
mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype))
def update(self, index, weight, grad, state):
assert(isinstance(weight, mx.nd. NDArray))
assert(isinstance(grad, mx.nd.NDArray))
self._update_count(index)
lr = self._get_lr(index)
wd = self._get_wd(index)
t = self._index_update_count[index]
grad = grad * self.rescale_grad + wd * weight
if self.clip_gradient is not None:
grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient)
prev_d, prev_v, prev_z = state
v_t = self.beta2 * prev_v + (1 - self.beta2) * mx.nd.square(grad)
d_t = (1 - pow(self.beta1, t)) / lr * (mx.nd.sqrt(v_t / (1 - pow(self.beta2, t))) + self.epsilon)
sigma_t = d_t - self.beta1 * prev_d
z_t = self.beta1 * prev_z + (1 - self.beta1) * grad - sigma_t * weight
weight[:] = - z_t / d_t
prev_d[:] = d_t
prev_v[:] = v_t
prev_z[:] = z_t
@with_seed(0)
def test_ftml():
opt1 = PyFTML
opt2 = mx.optimizer.FTML
shape = (3, 4, 5)
beta1_options = [{}, {'beta1': 0.5}, {'beta1': 0.7}]
beta2_options = [{}, {'beta2': 0.8}, {'beta2': 0.9}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
for dtype in [np.float32]:
for beta1_option in beta1_options:
for beta2_option in beta2_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
kwarg = {}
kwarg.update(beta1_option)
kwarg.update(beta2_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
class PyAdam(mx.optimizer.Optimizer):
def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8,
decay_factor=(1 - 1e-8), sparse_update=False, **kwargs):
super(PyAdam, self).__init__(learning_rate=learning_rate, **kwargs)
self.beta1 = beta1
self.beta2 = beta2
self.epsilon = epsilon
self.decay_factor = decay_factor
self.sparse_update = sparse_update
def create_state(self, index, weight):
return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype),
mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype))
def update(self, index, weight, grad, state):
lr = self._get_lr(index)
self._update_count(index)
t = self._index_update_count[index]
mean, variance = state
wd = self._get_wd(index)
num_rows = weight.shape[0]
coef1 = 1. - self.beta1**t
coef2 = 1. - self.beta2**t
lr *= math.sqrt(coef2)/coef1
for row in range(num_rows):
all_zeros = mx.test_utils.almost_equal(grad[row].asnumpy(), np.zeros_like(grad[row].asnumpy()))
if all_zeros and self.sparse_update:
continue
grad[row] = grad[row] * self.rescale_grad + wd * weight[row]
if self.clip_gradient is not None:
mx.nd.clip(grad[row], -self.clip_gradient, self.clip_gradient, out=grad[row])
mean[row] *= self.beta1
mean[row] += grad[row] * (1. - self.beta1)
variance[row] *= self.beta2
variance[row] += (1 - self.beta2) * mx.nd.square(grad[row], out=grad[row])
weight[row] -= lr*mean[row]/(mx.nd.sqrt(variance[row]) + self.epsilon)
@with_seed()
def test_adam():
opt1 = PyAdam
opt2 = mx.optimizer.Adam
shape = (3, 4, 5)
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}]
for dtype in [np.float16, np.float32, np.float64]:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in mp_options:
kwarg = {}
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
if (dtype == np.float16 and
('multi_precision' not in kwarg or
not kwarg['multi_precision'])):
continue
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype,
rtol=1e-4, atol=2e-5)
compare_optimizer(opt1(sparse_update=True, **kwarg), opt2(**kwarg), shape,
dtype, w_stype='row_sparse', g_stype='row_sparse',
rtol=1e-4, atol=2e-5)
compare_optimizer(opt1(**kwarg), opt2(lazy_update=False, **kwarg), shape,
dtype, w_stype='row_sparse', g_stype='row_sparse',
rtol=1e-4, atol=2e-5)
class PySignum(mx.optimizer.Optimizer):
def __init__(self, learning_rate=0.01, momentum=0.9, wd_lh = 0.0, **kwargs):
super(PySignum, self).__init__(learning_rate = learning_rate, **kwargs)
self.momentum = momentum
self.wd_lh = wd_lh
def create_state(self, index, weight):
momentum = None
if self.momentum != 0.0:
momentum = mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype, stype=weight.stype)
return momentum
def update(self, index, weight, grad, state):
self._update_count(index)
lr = self._get_lr(index)
wd = self._get_wd(index)
if state is not None:
mom = state
if self.clip_gradient is not None:
mom[:] = (self.momentum*mom - (1-self.momentum)*(wd*weight +
mx.nd.clip(grad*self.rescale_grad, -self.clip_gradient, self.clip_gradient)))
else:
mom[:] = self.momentum*mom - (1-self.momentum)*wd*weight - (1-self.momentum)*self.rescale_grad*grad
weight[:] = (1 - lr*self.wd_lh)*weight + lr*mx.nd.sign(mom)
else:
weight[:] = (1 - lr*(wd+self.wd_lh))*weight - lr*mx.nd.sign(grad)
@with_seed(0)
def test_signum():
opt1 = PySignum
opt2 = mx.optimizer.Signum
shape = (3, 4, 5)
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
wd_lh_options = [{}, {'wd_lh': 0.015}, {'wd_lh': 0.0}]
mom_options = [{}, {'momentum': 0.9}]
lr_options = [{'learning_rate': 0.05},{'learning_rate': 0.01}]
for dtype in [np.float32, np.float64]:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in wd_lh_options:
for lr_option in lr_options:
for mom_option in mom_options:
kwarg = {}
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
kwarg.update(lr_option)
kwarg.update(mom_option)
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
class PyRMSProp(mx.optimizer.Optimizer):
def __init__(self, learning_rate=0.001, gamma1=0.9, gamma2=0.9,
epsilon=1e-8, centered=False, clip_weights=None, **kwargs):
super(PyRMSProp, self).__init__(learning_rate=learning_rate, **kwargs)
self.centered = centered
self.gamma1 = gamma1
self.gamma2 = gamma2
self.epsilon = epsilon
self.clip_weights = clip_weights
def create_state(self, index, weight):
if self.centered:
return (mx.nd.zeros(weight.shape, weight.context),
mx.nd.zeros(weight.shape, weight.context),
mx.nd.zeros(weight.shape, weight.context))
else:
return (mx.nd.zeros(weight.shape, weight.context), )
def update(self, index, weight, grad, state):
lr = self._get_lr(index)
wd = self._get_wd(index)
self._update_count(index)
grad = grad * self.rescale_grad + wd * weight
if not self.centered:
(n, ) = state
if self.clip_gradient is not None:
grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient)
n[:] = (1 - self.gamma1) * (grad * grad) + self.gamma1 * n
weight[:] -= lr * grad/(mx.nd.sqrt(n + self.epsilon))
else:
n, g, delta = state
if self.clip_gradient is not None:
grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient)
n[:] = (1 - self.gamma1) * (grad * grad) + self.gamma1 * n
g[:] = (1 - self.gamma1) * grad + self.gamma1 * g
delta[:] = (self.gamma2) * delta - lr * grad/(mx.nd.sqrt(n - g*g + self.epsilon))
weight[:] += delta
if self.clip_weights:
mx.ndarray.clip(weight, -self.clip_weights, self.clip_weights, out=weight)
@unittest.skip("Test fails intermittently. Temporarily disabled until fixed. Tracked at https://github.com/apache/incubator-mxnet/issues/8230")
@with_seed(0)
def test_rms():
opt1 = PyRMSProp
opt2 = mx.optimizer.RMSProp
shape = (3, 4, 5)
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
cw_options = [{}, {'clip_weights': 0.01}]
center_options = [{}, {'centered': False}, {'centered': True}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}]
mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}]
for dtype in [np.float16, np.float32]:
for cw_option in cw_options:
for cg_option in cg_options:
for center_option in center_options:
for rg_option in rg_options:
for wd_option in wd_options:
for mp_option in mp_options:
kwarg = {}
kwarg.update(cw_option)
kwarg.update(cg_option)
kwarg.update(center_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
kwarg.update(mp_option)
if (dtype == np.float16 and
('multi_precision' not in kwarg or
not kwarg['multi_precision'])):
continue
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
if (default_context() == mx.cpu()):
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype, g_stype='row_sparse')
class PyFtrl(mx.optimizer.Optimizer):
def __init__(self, lamda1=0.01, learning_rate=0.1, beta=1, sparse_update=False, **kwargs):
super(PyFtrl, self).__init__(**kwargs)
self.lamda1 = lamda1
self.beta = beta
self.lr = learning_rate
self.sparse_update = sparse_update
def create_state(self, index, weight):
return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype),
mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype))
def update(self, index, weight, grad, state):
self._update_count(index)
wd = self._get_wd(index)
lr = self._get_lr(index)
num_rows = weight.shape[0]
dn, n = state
for row in range(num_rows):
all_zeros = mx.test_utils.almost_equal(grad[row].asnumpy(), np.zeros_like(grad[row].asnumpy()))
if all_zeros and self.sparse_update:
continue
grad[row] = grad[row] * self.rescale_grad
if self.clip_gradient is not None:
mx.nd.clip(grad[row], -self.clip_gradient, self.clip_gradient, out=grad[row])
dn[row] += grad[row] - (mx.nd.sqrt(n[row] + grad[row] * grad[row]) - mx.nd.sqrt(n[row])) * weight[row] / lr
n[row] += grad[row] * grad[row]
weight[row] = (mx.nd.sign(dn[row]) * self.lamda1 - dn[row]) / \
((self.beta + mx.nd.sqrt(n[row])) / lr + wd) * (mx.nd.abs(dn[row]) > self.lamda1)
@with_seed()
def test_ftrl():
opt1 = PyFtrl
opt2 = mx.optimizer.Ftrl
shape = (3, 4, 5)
kwargs = [{},
{'clip_gradient': 0.5},
{'clip_gradient': 0.4, 'rescale_grad': 0.14},
{'rescale_grad': 0.8},
{'clip_gradient': 0.5, 'wd': 0.07},
{'clip_gradient': 0.4, 'rescale_grad': 0.14, 'wd': 0.03},
{'rescale_grad': 0.8, 'wd': 0.05},
{'rescale_grad': 0.8, 'wd': 0.05, 'lamda1': 0.01},
{'clip_gradient': 0.5, 'wd': 0.07, 'lamda1': 1.0}]
for kwarg in kwargs:
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, np.float32)
compare_optimizer(opt1(sparse_update=True, **kwarg), opt2(**kwarg), shape,
np.float32, w_stype='row_sparse', g_stype='row_sparse')
@with_seed(1234)
def test_nadam():
def get_net(num_hidden, flatten=True):
data = mx.symbol.Variable('data')
fc1 = mx.symbol.FullyConnected(data, name='fc1', num_hidden=128, flatten=flatten)
act1 = mx.symbol.Activation(fc1, name='relu1', act_type="relu")
fc2 = mx.symbol.FullyConnected(act1, name = 'fc2', num_hidden = 64, flatten=flatten)
act2 = mx.symbol.Activation(fc2, name='relu2', act_type="relu")
fc3 = mx.symbol.FullyConnected(act2, name='fc3', num_hidden=num_hidden, flatten=flatten)
return fc3
N = 20
data = mx.random.uniform(-1, 1, shape=(N, 10))
label = mx.random.uniform(-1, 1, shape=(N, 1))
data_iter = mx.io.NDArrayIter(data, label, batch_size=5, label_name='label', shuffle=True)
output = get_net(1)
l = mx.symbol.Variable('label')
Loss = gluon.loss.L1Loss()
loss = Loss(output, l)
loss = mx.sym.make_loss(loss)
mod = mx.mod.Module(loss, data_names=('data',), label_names=('label',))
mod.fit(data_iter, num_epoch=60, optimizer_params={'learning_rate': 0.0005, 'wd': 0.0005},
initializer=mx.init.Xavier(magnitude=2), eval_metric=mx.metric.Loss(),
optimizer='nadam')
assert mod.score(data_iter, eval_metric=mx.metric.Loss())[0][1] < 0.1
class PyAdaGrad(mx.optimizer.Optimizer):
def __init__(self, eps=1e-7, **kwargs):
super(PyAdaGrad, self).__init__(**kwargs)
self.float_stable_eps = eps
def create_state(self, index, weight):
return mx.nd.zeros(weight.shape, weight.context, stype=weight.stype)
def update(self, index, weight, grad, state):
self._update_count(index)
lr = self._get_lr(index)
wd = self._get_wd(index)
history = state
grad = grad * self.rescale_grad
if self.clip_gradient is not None:
grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient)
history[:] += mx.nd.square(grad)
div = grad / mx.nd.sqrt(history + self.float_stable_eps)
weight[:] += (div + weight * wd) * -lr
def test_adagrad():
mx.random.seed(0)
opt1 = PyAdaGrad
opt2 = mx.optimizer.AdaGrad
shape = (3, 4, 5)
eps_options = [{}, {'eps': 1e-8}]
cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}]
rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}]
wd_options = [{}, {'wd': 0.0}]
for dtype in [np.float32]:
for eps_option in eps_options:
for cg_option in cg_options:
for rg_option in rg_options:
for wd_option in wd_options:
kwarg = {}
kwarg.update(eps_option)
kwarg.update(cg_option)
kwarg.update(rg_option)
kwarg.update(wd_option)
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype)
if wd_option.get('wd', 0.0) == 0.0:
compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype,
w_stype='row_sparse', g_stype='row_sparse')
if __name__ == '__main__':
import nose
nose.runmodule()
| true | true |
f71e2c9e8f1cbeecfc7e957981439d2f7a58f3a4 | 648 | py | Python | Task1D.py | cued-ia-computing/flood-jdv24-ahw41 | b8d62b33281dfb01aec569998b88769ca6d06556 | [
"MIT"
] | null | null | null | Task1D.py | cued-ia-computing/flood-jdv24-ahw41 | b8d62b33281dfb01aec569998b88769ca6d06556 | [
"MIT"
] | null | null | null | Task1D.py | cued-ia-computing/flood-jdv24-ahw41 | b8d62b33281dfb01aec569998b88769ca6d06556 | [
"MIT"
] | null | null | null | from floodsystem.stationdata import build_station_list
from floodsystem.geo import rivers_with_station
from floodsystem.geo import stations_by_river
def run():
stations = build_station_list()
riversWithStation = rivers_with_station(stations)
print(len(riversWithStation), "stations. First 10 -", sorted(riversWithStation)[:10])
for river in ["River Aire", "River Cam", "River Thames"]:
arr = []
for station in stations_by_river(stations)[river]:
arr.append(station.name)
print(sorted(arr))
if __name__ == "__main__":
print("*** Task 1D: CUED Part IA Flood Warning System ***")
run()
| 30.857143 | 89 | 0.697531 | from floodsystem.stationdata import build_station_list
from floodsystem.geo import rivers_with_station
from floodsystem.geo import stations_by_river
def run():
stations = build_station_list()
riversWithStation = rivers_with_station(stations)
print(len(riversWithStation), "stations. First 10 -", sorted(riversWithStation)[:10])
for river in ["River Aire", "River Cam", "River Thames"]:
arr = []
for station in stations_by_river(stations)[river]:
arr.append(station.name)
print(sorted(arr))
if __name__ == "__main__":
print("*** Task 1D: CUED Part IA Flood Warning System ***")
run()
| true | true |
f71e2d970e1e3446eab8718d6147913ef60c7888 | 6,098 | py | Python | test/mount_efs_test/test_describe_availability_zone.py | Git0ffS3c/efs-utils | e5e7252f88992d0ef2cf0bdac56c1d41644f4c44 | [
"MIT"
] | null | null | null | test/mount_efs_test/test_describe_availability_zone.py | Git0ffS3c/efs-utils | e5e7252f88992d0ef2cf0bdac56c1d41644f4c44 | [
"MIT"
] | null | null | null | test/mount_efs_test/test_describe_availability_zone.py | Git0ffS3c/efs-utils | e5e7252f88992d0ef2cf0bdac56c1d41644f4c44 | [
"MIT"
] | null | null | null | # Copyright 2017-2018 Amazon.com, Inc. and its affiliates. All Rights Reserved.
#
# Licensed under the MIT License. See the LICENSE accompanying this file
# for the specific language governing permissions and limitations under
# the License.
import mount_efs
import pytest
from botocore.exceptions import ClientError, EndpointConnectionError, NoCredentialsError
from .. import utils
MOCK_EC2_AGENT = "fake-client"
AZ_NAME = "us-east-2b"
AZ_ID = "use2-az2"
OPERATION_NAME = "DescribeAvailabilityZones"
def _test_describe_availability_zones_response(
mocker,
dryrun_effect,
response,
expected_describe_time,
desired_az_id=None,
desired_exception=None,
desired_message=None,
):
describe_mock = mocker.patch(
"mount_efs.ec2_describe_availability_zones_helper",
side_effect=[dryrun_effect, response],
)
if desired_exception:
assert desired_message != None
with pytest.raises(mount_efs.FallbackException) as excinfo:
mount_efs.get_az_id_by_az_name(MOCK_EC2_AGENT, AZ_NAME)
assert desired_message in str(excinfo)
else:
az_id = mount_efs.get_az_id_by_az_name(MOCK_EC2_AGENT, AZ_NAME)
assert az_id == desired_az_id
utils.assert_called_n_times(describe_mock, expected_describe_time)
def test_describe_availability_zones_dryrun_succeed_return_correct(mocker):
dryrun_exception_response = {
"Error": {"Code": "DryRunOperation", "Message": "DryRunOperation"}
}
response = {
"AvailabilityZones": [
{
"Messages": [],
"ZoneId": AZ_ID,
"State": "available",
"ZoneName": AZ_NAME,
"RegionName": "us-east-2",
}
]
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
response,
2,
AZ_ID,
)
def test_describe_availability_zones_dryrun_failed_unauthorized_operation(mocker):
dryrun_exception_response = {
"Error": {"Code": "UnauthorizedOperation", "Message": "UnauthorizedOperation"}
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="Unauthorized to perform operation",
)
def test_describe_availability_zones_dryrun_failed_invalid_az_name(mocker):
dryrun_exception_response = {
"Error": {"Code": "InvalidParameterValue", "Message": "InvalidParameterValue"}
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="Invalid availability zone",
)
def test_describe_availability_zones_dryrun_failed_service_unavailable(mocker):
dryrun_exception_response = {
"Error": {
"Code": "ServiceUnavailableException",
"Message": "ServiceUnavailableException",
}
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="The ec2 service cannot",
)
def test_describe_availability_zones_dryrun_failed_access_denied(mocker):
exception_message = "is not authorized to perform"
dryrun_exception_response = {
"Error": {"Code": "AccessDeniedException", "Message": exception_message}
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message=exception_message,
)
def test_describe_availability_zones_dryrun_failed_unknown_exception(mocker):
dryrun_exception_response = {
"Error": {"Code": "UnknownException", "Message": "UnknownException"}
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="Unexpected error",
)
def test_describe_availability_zones_dryrun_failed_no_credential_error(mocker):
_test_describe_availability_zones_response(
mocker,
NoCredentialsError(),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="confirm your aws credentials are properly configured",
)
def test_describe_availability_zones_failed_unknown_error(mocker):
_test_describe_availability_zones_response(
mocker,
Exception("Unknown"),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="Unknown error",
)
def test_describe_availability_zones_failed_endpoing_error(mocker):
_test_describe_availability_zones_response(
mocker,
EndpointConnectionError(endpoint_url="https://efs.us-east-1.com"),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="Could not connect to the endpoint",
)
def test_describe_availability_zones_return_empty_az_info(mocker):
dryrun_exception_response = {
"Error": {"Code": "DryRunOperation", "Message": "DryRunOperation"}
}
response = {"AvailabilityZones": []}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
response,
2,
None,
)
def test_describe_availability_zones_return_none_az_info(mocker):
dryrun_exception_response = {
"Error": {"Code": "DryRunOperation", "Message": "DryRunOperation"}
}
_test_describe_availability_zones_response(
mocker, ClientError(dryrun_exception_response, OPERATION_NAME), None, 2, None
)
| 30.954315 | 88 | 0.699738 |
import mount_efs
import pytest
from botocore.exceptions import ClientError, EndpointConnectionError, NoCredentialsError
from .. import utils
MOCK_EC2_AGENT = "fake-client"
AZ_NAME = "us-east-2b"
AZ_ID = "use2-az2"
OPERATION_NAME = "DescribeAvailabilityZones"
def _test_describe_availability_zones_response(
mocker,
dryrun_effect,
response,
expected_describe_time,
desired_az_id=None,
desired_exception=None,
desired_message=None,
):
describe_mock = mocker.patch(
"mount_efs.ec2_describe_availability_zones_helper",
side_effect=[dryrun_effect, response],
)
if desired_exception:
assert desired_message != None
with pytest.raises(mount_efs.FallbackException) as excinfo:
mount_efs.get_az_id_by_az_name(MOCK_EC2_AGENT, AZ_NAME)
assert desired_message in str(excinfo)
else:
az_id = mount_efs.get_az_id_by_az_name(MOCK_EC2_AGENT, AZ_NAME)
assert az_id == desired_az_id
utils.assert_called_n_times(describe_mock, expected_describe_time)
def test_describe_availability_zones_dryrun_succeed_return_correct(mocker):
dryrun_exception_response = {
"Error": {"Code": "DryRunOperation", "Message": "DryRunOperation"}
}
response = {
"AvailabilityZones": [
{
"Messages": [],
"ZoneId": AZ_ID,
"State": "available",
"ZoneName": AZ_NAME,
"RegionName": "us-east-2",
}
]
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
response,
2,
AZ_ID,
)
def test_describe_availability_zones_dryrun_failed_unauthorized_operation(mocker):
dryrun_exception_response = {
"Error": {"Code": "UnauthorizedOperation", "Message": "UnauthorizedOperation"}
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="Unauthorized to perform operation",
)
def test_describe_availability_zones_dryrun_failed_invalid_az_name(mocker):
dryrun_exception_response = {
"Error": {"Code": "InvalidParameterValue", "Message": "InvalidParameterValue"}
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="Invalid availability zone",
)
def test_describe_availability_zones_dryrun_failed_service_unavailable(mocker):
dryrun_exception_response = {
"Error": {
"Code": "ServiceUnavailableException",
"Message": "ServiceUnavailableException",
}
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="The ec2 service cannot",
)
def test_describe_availability_zones_dryrun_failed_access_denied(mocker):
exception_message = "is not authorized to perform"
dryrun_exception_response = {
"Error": {"Code": "AccessDeniedException", "Message": exception_message}
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message=exception_message,
)
def test_describe_availability_zones_dryrun_failed_unknown_exception(mocker):
dryrun_exception_response = {
"Error": {"Code": "UnknownException", "Message": "UnknownException"}
}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="Unexpected error",
)
def test_describe_availability_zones_dryrun_failed_no_credential_error(mocker):
_test_describe_availability_zones_response(
mocker,
NoCredentialsError(),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="confirm your aws credentials are properly configured",
)
def test_describe_availability_zones_failed_unknown_error(mocker):
_test_describe_availability_zones_response(
mocker,
Exception("Unknown"),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="Unknown error",
)
def test_describe_availability_zones_failed_endpoing_error(mocker):
_test_describe_availability_zones_response(
mocker,
EndpointConnectionError(endpoint_url="https://efs.us-east-1.com"),
None,
1,
desired_exception=mount_efs.FallbackException,
desired_message="Could not connect to the endpoint",
)
def test_describe_availability_zones_return_empty_az_info(mocker):
dryrun_exception_response = {
"Error": {"Code": "DryRunOperation", "Message": "DryRunOperation"}
}
response = {"AvailabilityZones": []}
_test_describe_availability_zones_response(
mocker,
ClientError(dryrun_exception_response, OPERATION_NAME),
response,
2,
None,
)
def test_describe_availability_zones_return_none_az_info(mocker):
dryrun_exception_response = {
"Error": {"Code": "DryRunOperation", "Message": "DryRunOperation"}
}
_test_describe_availability_zones_response(
mocker, ClientError(dryrun_exception_response, OPERATION_NAME), None, 2, None
)
| true | true |
f71e2e288d694a122eadc4d1b3f2262116c447ab | 7,293 | py | Python | rest_framework/tests/hyperlinkedserializers.py | asfaltboy/django-rest-framework | 6d3bb67aa654d5f4c555746655a312000422d474 | [
"Unlicense"
] | null | null | null | rest_framework/tests/hyperlinkedserializers.py | asfaltboy/django-rest-framework | 6d3bb67aa654d5f4c555746655a312000422d474 | [
"Unlicense"
] | null | null | null | rest_framework/tests/hyperlinkedserializers.py | asfaltboy/django-rest-framework | 6d3bb67aa654d5f4c555746655a312000422d474 | [
"Unlicense"
] | null | null | null | from django.conf.urls.defaults import patterns, url
from django.test import TestCase
from django.test.client import RequestFactory
from rest_framework import generics, status, serializers
from rest_framework.tests.models import Anchor, BasicModel, ManyToManyModel, BlogPost, BlogPostComment, Album, Photo
factory = RequestFactory()
class BlogPostCommentSerializer(serializers.ModelSerializer):
text = serializers.CharField()
blog_post_url = serializers.HyperlinkedRelatedField(source='blog_post', view_name='blogpost-detail')
class Meta:
model = BlogPostComment
fields = ('text', 'blog_post_url')
class PhotoSerializer(serializers.Serializer):
description = serializers.CharField()
album_url = serializers.HyperlinkedRelatedField(source='album', view_name='album-detail', queryset=Album.objects.all(), slug_field='title', slug_url_kwarg='title')
def restore_object(self, attrs, instance=None):
return Photo(**attrs)
class BasicList(generics.ListCreateAPIView):
model = BasicModel
model_serializer_class = serializers.HyperlinkedModelSerializer
class BasicDetail(generics.RetrieveUpdateDestroyAPIView):
model = BasicModel
model_serializer_class = serializers.HyperlinkedModelSerializer
class AnchorDetail(generics.RetrieveAPIView):
model = Anchor
model_serializer_class = serializers.HyperlinkedModelSerializer
class ManyToManyList(generics.ListAPIView):
model = ManyToManyModel
model_serializer_class = serializers.HyperlinkedModelSerializer
class ManyToManyDetail(generics.RetrieveAPIView):
model = ManyToManyModel
model_serializer_class = serializers.HyperlinkedModelSerializer
class BlogPostCommentListCreate(generics.ListCreateAPIView):
model = BlogPostComment
serializer_class = BlogPostCommentSerializer
class BlogPostDetail(generics.RetrieveAPIView):
model = BlogPost
class PhotoListCreate(generics.ListCreateAPIView):
model = Photo
model_serializer_class = PhotoSerializer
class AlbumDetail(generics.RetrieveAPIView):
model = Album
urlpatterns = patterns('',
url(r'^basic/$', BasicList.as_view(), name='basicmodel-list'),
url(r'^basic/(?P<pk>\d+)/$', BasicDetail.as_view(), name='basicmodel-detail'),
url(r'^anchor/(?P<pk>\d+)/$', AnchorDetail.as_view(), name='anchor-detail'),
url(r'^manytomany/$', ManyToManyList.as_view(), name='manytomanymodel-list'),
url(r'^manytomany/(?P<pk>\d+)/$', ManyToManyDetail.as_view(), name='manytomanymodel-detail'),
url(r'^posts/(?P<pk>\d+)/$', BlogPostDetail.as_view(), name='blogpost-detail'),
url(r'^comments/$', BlogPostCommentListCreate.as_view(), name='blogpostcomment-list'),
url(r'^albums/(?P<title>\w[\w-]*)/$', AlbumDetail.as_view(), name='album-detail'),
url(r'^photos/$', PhotoListCreate.as_view(), name='photo-list')
)
class TestBasicHyperlinkedView(TestCase):
urls = 'rest_framework.tests.hyperlinkedserializers'
def setUp(self):
"""
Create 3 BasicModel intances.
"""
items = ['foo', 'bar', 'baz']
for item in items:
BasicModel(text=item).save()
self.objects = BasicModel.objects
self.data = [
{'url': 'http://testserver/basic/%d/' % obj.id, 'text': obj.text}
for obj in self.objects.all()
]
self.list_view = BasicList.as_view()
self.detail_view = BasicDetail.as_view()
def test_get_list_view(self):
"""
GET requests to ListCreateAPIView should return list of objects.
"""
request = factory.get('/basic/')
response = self.list_view(request).render()
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.data, self.data)
def test_get_detail_view(self):
"""
GET requests to ListCreateAPIView should return list of objects.
"""
request = factory.get('/basic/1')
response = self.detail_view(request, pk=1).render()
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.data, self.data[0])
class TestManyToManyHyperlinkedView(TestCase):
urls = 'rest_framework.tests.hyperlinkedserializers'
def setUp(self):
"""
Create 3 BasicModel intances.
"""
items = ['foo', 'bar', 'baz']
anchors = []
for item in items:
anchor = Anchor(text=item)
anchor.save()
anchors.append(anchor)
manytomany = ManyToManyModel()
manytomany.save()
manytomany.rel.add(*anchors)
self.data = [{
'url': 'http://testserver/manytomany/1/',
'rel': [
'http://testserver/anchor/1/',
'http://testserver/anchor/2/',
'http://testserver/anchor/3/',
]
}]
self.list_view = ManyToManyList.as_view()
self.detail_view = ManyToManyDetail.as_view()
def test_get_list_view(self):
"""
GET requests to ListCreateAPIView should return list of objects.
"""
request = factory.get('/manytomany/')
response = self.list_view(request).render()
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.data, self.data)
def test_get_detail_view(self):
"""
GET requests to ListCreateAPIView should return list of objects.
"""
request = factory.get('/manytomany/1/')
response = self.detail_view(request, pk=1).render()
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.data, self.data[0])
class TestCreateWithForeignKeys(TestCase):
urls = 'rest_framework.tests.hyperlinkedserializers'
def setUp(self):
"""
Create a blog post
"""
self.post = BlogPost.objects.create(title="Test post")
self.create_view = BlogPostCommentListCreate.as_view()
def test_create_comment(self):
data = {
'text': 'A test comment',
'blog_post_url': 'http://testserver/posts/1/'
}
request = factory.post('/comments/', data=data)
response = self.create_view(request).render()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(self.post.blogpostcomment_set.count(), 1)
self.assertEqual(self.post.blogpostcomment_set.all()[0].text, 'A test comment')
class TestCreateWithForeignKeysAndCustomSlug(TestCase):
urls = 'rest_framework.tests.hyperlinkedserializers'
def setUp(self):
"""
Create an Album
"""
self.post = Album.objects.create(title='test-album')
self.list_create_view = PhotoListCreate.as_view()
def test_create_photo(self):
data = {
'description': 'A test photo',
'album_url': 'http://testserver/albums/test-album/'
}
request = factory.post('/photos/', data=data)
response = self.list_create_view(request).render()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(self.post.photo_set.count(), 1)
self.assertEqual(self.post.photo_set.all()[0].description, 'A test photo')
| 34.079439 | 167 | 0.669135 | from django.conf.urls.defaults import patterns, url
from django.test import TestCase
from django.test.client import RequestFactory
from rest_framework import generics, status, serializers
from rest_framework.tests.models import Anchor, BasicModel, ManyToManyModel, BlogPost, BlogPostComment, Album, Photo
factory = RequestFactory()
class BlogPostCommentSerializer(serializers.ModelSerializer):
text = serializers.CharField()
blog_post_url = serializers.HyperlinkedRelatedField(source='blog_post', view_name='blogpost-detail')
class Meta:
model = BlogPostComment
fields = ('text', 'blog_post_url')
class PhotoSerializer(serializers.Serializer):
description = serializers.CharField()
album_url = serializers.HyperlinkedRelatedField(source='album', view_name='album-detail', queryset=Album.objects.all(), slug_field='title', slug_url_kwarg='title')
def restore_object(self, attrs, instance=None):
return Photo(**attrs)
class BasicList(generics.ListCreateAPIView):
model = BasicModel
model_serializer_class = serializers.HyperlinkedModelSerializer
class BasicDetail(generics.RetrieveUpdateDestroyAPIView):
model = BasicModel
model_serializer_class = serializers.HyperlinkedModelSerializer
class AnchorDetail(generics.RetrieveAPIView):
model = Anchor
model_serializer_class = serializers.HyperlinkedModelSerializer
class ManyToManyList(generics.ListAPIView):
model = ManyToManyModel
model_serializer_class = serializers.HyperlinkedModelSerializer
class ManyToManyDetail(generics.RetrieveAPIView):
model = ManyToManyModel
model_serializer_class = serializers.HyperlinkedModelSerializer
class BlogPostCommentListCreate(generics.ListCreateAPIView):
model = BlogPostComment
serializer_class = BlogPostCommentSerializer
class BlogPostDetail(generics.RetrieveAPIView):
model = BlogPost
class PhotoListCreate(generics.ListCreateAPIView):
model = Photo
model_serializer_class = PhotoSerializer
class AlbumDetail(generics.RetrieveAPIView):
model = Album
urlpatterns = patterns('',
url(r'^basic/$', BasicList.as_view(), name='basicmodel-list'),
url(r'^basic/(?P<pk>\d+)/$', BasicDetail.as_view(), name='basicmodel-detail'),
url(r'^anchor/(?P<pk>\d+)/$', AnchorDetail.as_view(), name='anchor-detail'),
url(r'^manytomany/$', ManyToManyList.as_view(), name='manytomanymodel-list'),
url(r'^manytomany/(?P<pk>\d+)/$', ManyToManyDetail.as_view(), name='manytomanymodel-detail'),
url(r'^posts/(?P<pk>\d+)/$', BlogPostDetail.as_view(), name='blogpost-detail'),
url(r'^comments/$', BlogPostCommentListCreate.as_view(), name='blogpostcomment-list'),
url(r'^albums/(?P<title>\w[\w-]*)/$', AlbumDetail.as_view(), name='album-detail'),
url(r'^photos/$', PhotoListCreate.as_view(), name='photo-list')
)
class TestBasicHyperlinkedView(TestCase):
urls = 'rest_framework.tests.hyperlinkedserializers'
def setUp(self):
items = ['foo', 'bar', 'baz']
for item in items:
BasicModel(text=item).save()
self.objects = BasicModel.objects
self.data = [
{'url': 'http://testserver/basic/%d/' % obj.id, 'text': obj.text}
for obj in self.objects.all()
]
self.list_view = BasicList.as_view()
self.detail_view = BasicDetail.as_view()
def test_get_list_view(self):
request = factory.get('/basic/')
response = self.list_view(request).render()
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.data, self.data)
def test_get_detail_view(self):
request = factory.get('/basic/1')
response = self.detail_view(request, pk=1).render()
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.data, self.data[0])
class TestManyToManyHyperlinkedView(TestCase):
urls = 'rest_framework.tests.hyperlinkedserializers'
def setUp(self):
items = ['foo', 'bar', 'baz']
anchors = []
for item in items:
anchor = Anchor(text=item)
anchor.save()
anchors.append(anchor)
manytomany = ManyToManyModel()
manytomany.save()
manytomany.rel.add(*anchors)
self.data = [{
'url': 'http://testserver/manytomany/1/',
'rel': [
'http://testserver/anchor/1/',
'http://testserver/anchor/2/',
'http://testserver/anchor/3/',
]
}]
self.list_view = ManyToManyList.as_view()
self.detail_view = ManyToManyDetail.as_view()
def test_get_list_view(self):
request = factory.get('/manytomany/')
response = self.list_view(request).render()
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.data, self.data)
def test_get_detail_view(self):
request = factory.get('/manytomany/1/')
response = self.detail_view(request, pk=1).render()
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.data, self.data[0])
class TestCreateWithForeignKeys(TestCase):
urls = 'rest_framework.tests.hyperlinkedserializers'
def setUp(self):
self.post = BlogPost.objects.create(title="Test post")
self.create_view = BlogPostCommentListCreate.as_view()
def test_create_comment(self):
data = {
'text': 'A test comment',
'blog_post_url': 'http://testserver/posts/1/'
}
request = factory.post('/comments/', data=data)
response = self.create_view(request).render()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(self.post.blogpostcomment_set.count(), 1)
self.assertEqual(self.post.blogpostcomment_set.all()[0].text, 'A test comment')
class TestCreateWithForeignKeysAndCustomSlug(TestCase):
urls = 'rest_framework.tests.hyperlinkedserializers'
def setUp(self):
self.post = Album.objects.create(title='test-album')
self.list_create_view = PhotoListCreate.as_view()
def test_create_photo(self):
data = {
'description': 'A test photo',
'album_url': 'http://testserver/albums/test-album/'
}
request = factory.post('/photos/', data=data)
response = self.list_create_view(request).render()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(self.post.photo_set.count(), 1)
self.assertEqual(self.post.photo_set.all()[0].description, 'A test photo')
| true | true |
f71e2e36831ac465a1adbb26b80e5430c3a328c4 | 1,827 | py | Python | pymcxray/FileFormat/Results/test_Dump.py | drix00/pymcxray | bf650aa0f31c635040a6cb79fe1cb7ecf27b8990 | [
"Apache-2.0"
] | 1 | 2020-07-23T12:13:30.000Z | 2020-07-23T12:13:30.000Z | pymcxray/FileFormat/Results/test_Dump.py | drix00/pymcxray | bf650aa0f31c635040a6cb79fe1cb7ecf27b8990 | [
"Apache-2.0"
] | 3 | 2017-03-05T16:09:30.000Z | 2017-03-05T16:11:41.000Z | pymcxray/FileFormat/Results/test_Dump.py | drix00/pymcxray | bf650aa0f31c635040a6cb79fe1cb7ecf27b8990 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
.. py:currentmodule:: FileFormat.Results.test_Dump
.. moduleauthor:: Hendrix Demers <hendrix.demers@mail.mcgill.ca>
Tests for module `Dump`
"""
# Script information for the file.
__author__ = "Hendrix Demers (hendrix.demers@mail.mcgill.ca)"
__version__ = ""
__date__ = ""
__copyright__ = "Copyright (c) 2012 Hendrix Demers"
__license__ = ""
# Subversion informations for the file.
__svnRevision__ = "$Revision$"
__svnDate__ = "$Date$"
__svnId__ = "$Id$"
# Standard library modules.
import unittest
import logging
import os
# Third party modules.
# Local modules.
# Project modules
import pymcxray.FileFormat.Results.Dump as Dump
# Globals and constants variables.
class TestDump(unittest.TestCase):
"""
TestCase class for the module `Dump`.
"""
def setUp(self):
"""
Setup method.
"""
unittest.TestCase.setUp(self)
self.testDataPath = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../test_data"))
def tearDown(self):
"""
Teardown method.
"""
unittest.TestCase.tearDown(self)
def testSkeleton(self):
"""
First test to check if the testcase is working with the testing framework.
"""
#self.fail("Test if the testcase is working.")
self.assert_(True)
def test_read(self):
"""
Tests for method `read`.
"""
filepath = os.path.join(self.testDataPath, "autoSavedFiles", "McXRayDUMP.txt")
dumpFile = Dump.Dump()
dumpFile.read(filepath)
#self.fail("Test if the testcase is working.")
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
from pymcxray.Testings import runTestModuleWithCoverage
runTestModuleWithCoverage(__file__)
| 23.126582 | 106 | 0.654625 |
__author__ = "Hendrix Demers (hendrix.demers@mail.mcgill.ca)"
__version__ = ""
__date__ = ""
__copyright__ = "Copyright (c) 2012 Hendrix Demers"
__license__ = ""
__svnRevision__ = "$Revision$"
__svnDate__ = "$Date$"
__svnId__ = "$Id$"
import unittest
import logging
import os
import pymcxray.FileFormat.Results.Dump as Dump
class TestDump(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.testDataPath = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../test_data"))
def tearDown(self):
unittest.TestCase.tearDown(self)
def testSkeleton(self):
self.assert_(True)
def test_read(self):
filepath = os.path.join(self.testDataPath, "autoSavedFiles", "McXRayDUMP.txt")
dumpFile = Dump.Dump()
dumpFile.read(filepath)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
from pymcxray.Testings import runTestModuleWithCoverage
runTestModuleWithCoverage(__file__)
| true | true |
f71e2f73e8bfacab772375103ad09d6f547eb6cf | 2,226 | py | Python | eplaunch/interface/welcome_dialog.py | wta12/EP-Launch | ac62691cfbb595153f658b8757ce1e089861d456 | [
"BSD-3-Clause"
] | 7 | 2018-11-06T12:45:57.000Z | 2021-07-06T07:56:02.000Z | eplaunch/interface/welcome_dialog.py | wta12/EP-Launch | ac62691cfbb595153f658b8757ce1e089861d456 | [
"BSD-3-Clause"
] | 128 | 2018-07-16T16:09:40.000Z | 2021-12-16T17:13:42.000Z | eplaunch/interface/welcome_dialog.py | wta12/EP-Launch | ac62691cfbb595153f658b8757ce1e089861d456 | [
"BSD-3-Clause"
] | 3 | 2020-02-07T21:01:34.000Z | 2022-01-06T05:00:17.000Z | import webbrowser
import wx
from eplaunch import DOCS_URL, VERSION
# wx callbacks need an event argument even though we usually don't use it, so the next line disables that check
# noinspection PyUnusedLocal
class WelcomeDialog(wx.Dialog):
CLOSE_SIGNAL_OK = 0
def __init__(self, *args, **kwargs):
super(WelcomeDialog, self).__init__(*args, **kwargs)
self.SetTitle("EP-Launch")
this_border = 12
self.panel = wx.Panel(self, wx.ID_ANY)
title = wx.StaticText(self.panel, wx.ID_ANY, 'Welcome to EP-Launch ' + VERSION)
message = """
EP-Launch has been around for many years as a part of the EnergyPlus distribution.
Starting with the 3.0 release, it has changed drastically, completely redesigned and rewritten.
For full documentation or a quick start guide, click the "Open Docs" button below.
This dialog will only be shown once, but documentation is available in the Help menu.
"""
text_description = wx.StaticText(self.panel, wx.ID_ANY, message, style=wx.ALIGN_CENTRE_HORIZONTAL)
ok_button = wx.Button(self.panel, label='OK')
docs_button = wx.Button(self.panel, label='Open Docs')
self.Bind(wx.EVT_CLOSE, self.handle_close_ok)
ok_button.Bind(wx.EVT_BUTTON, self.handle_close_ok)
docs_button.Bind(wx.EVT_BUTTON, self.handle_open_docs)
button_row_sizer = wx.BoxSizer(wx.HORIZONTAL)
button_row_sizer.Add(ok_button, flag=wx.LEFT | wx.RIGHT | wx.ALIGN_CENTER_VERTICAL, border=this_border)
button_row_sizer.Add(docs_button, flag=wx.LEFT | wx.RIGHT | wx.ALIGN_CENTER_VERTICAL, border=this_border)
sizer_main_vertical = wx.BoxSizer(wx.VERTICAL)
sizer_main_vertical.Add(title, 0, wx.CENTER | wx.ALL, border=this_border)
sizer_main_vertical.Add(text_description, proportion=1, flag=wx.ALL | wx.EXPAND, border=this_border)
sizer_main_vertical.Add(button_row_sizer, flag=wx.ALL | wx.ALIGN_CENTER, border=this_border)
self.panel.SetSizer(sizer_main_vertical)
sizer_main_vertical.Fit(self)
def handle_open_docs(self, e):
webbrowser.open(DOCS_URL)
def handle_close_ok(self, e):
self.EndModal(WelcomeDialog.CLOSE_SIGNAL_OK)
| 43.647059 | 113 | 0.721923 | import webbrowser
import wx
from eplaunch import DOCS_URL, VERSION
# noinspection PyUnusedLocal
class WelcomeDialog(wx.Dialog):
CLOSE_SIGNAL_OK = 0
def __init__(self, *args, **kwargs):
super(WelcomeDialog, self).__init__(*args, **kwargs)
self.SetTitle("EP-Launch")
this_border = 12
self.panel = wx.Panel(self, wx.ID_ANY)
title = wx.StaticText(self.panel, wx.ID_ANY, 'Welcome to EP-Launch ' + VERSION)
message = """
EP-Launch has been around for many years as a part of the EnergyPlus distribution.
Starting with the 3.0 release, it has changed drastically, completely redesigned and rewritten.
For full documentation or a quick start guide, click the "Open Docs" button below.
This dialog will only be shown once, but documentation is available in the Help menu.
"""
text_description = wx.StaticText(self.panel, wx.ID_ANY, message, style=wx.ALIGN_CENTRE_HORIZONTAL)
ok_button = wx.Button(self.panel, label='OK')
docs_button = wx.Button(self.panel, label='Open Docs')
self.Bind(wx.EVT_CLOSE, self.handle_close_ok)
ok_button.Bind(wx.EVT_BUTTON, self.handle_close_ok)
docs_button.Bind(wx.EVT_BUTTON, self.handle_open_docs)
button_row_sizer = wx.BoxSizer(wx.HORIZONTAL)
button_row_sizer.Add(ok_button, flag=wx.LEFT | wx.RIGHT | wx.ALIGN_CENTER_VERTICAL, border=this_border)
button_row_sizer.Add(docs_button, flag=wx.LEFT | wx.RIGHT | wx.ALIGN_CENTER_VERTICAL, border=this_border)
sizer_main_vertical = wx.BoxSizer(wx.VERTICAL)
sizer_main_vertical.Add(title, 0, wx.CENTER | wx.ALL, border=this_border)
sizer_main_vertical.Add(text_description, proportion=1, flag=wx.ALL | wx.EXPAND, border=this_border)
sizer_main_vertical.Add(button_row_sizer, flag=wx.ALL | wx.ALIGN_CENTER, border=this_border)
self.panel.SetSizer(sizer_main_vertical)
sizer_main_vertical.Fit(self)
def handle_open_docs(self, e):
webbrowser.open(DOCS_URL)
def handle_close_ok(self, e):
self.EndModal(WelcomeDialog.CLOSE_SIGNAL_OK)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.