content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
import logging
import unittest
import quantum_wavepacket
import numpy
import quantum_wavepacket.wavepacket as wp
logger = logging.getLogger(quantum_wavepacket.LOGGER_NAME)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG, format=quantum_wavepacket.LOG_FORMAT)
unittest.main() | [
11748,
18931,
198,
11748,
555,
715,
395,
198,
11748,
14821,
62,
19204,
8002,
316,
198,
11748,
299,
32152,
198,
11748,
14821,
62,
19204,
8002,
316,
13,
19204,
8002,
316,
355,
266,
79,
628,
198,
6404,
1362,
796,
18931,
13,
1136,
11187,
... | 2.850467 | 107 |
# Generated by Django 3.2.7 on 2021-10-23 14:07
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
513,
13,
17,
13,
22,
319,
33448,
12,
940,
12,
1954,
1478,
25,
2998,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.84375 | 32 |
#!/usr/bin/python
# Package: autoconf
# System: MacOS High Sierra
# Author: richard pct
"""autoconf"""
import fileinput, re
from subprocess import check_call, CalledProcessError
pkg_version = "2.69"
pkg_name = "autoconf-" + pkg_version
pkg_src = pkg_name + ".tar.gz"
pkg_url = "http://ftp.gnu.org/gnu/autoconf"
hash_type = "sha256"
pkg_sha256 = "954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969"
def build(path_src, destdir):
"""configure and make"""
pattern = re.compile(r"libtoolize")
for line in fileinput.input("bin/autoreconf.in", inplace=True):
new_line = pattern.sub("glibtoolize", line)
print new_line.rstrip()
try:
check_call(["./configure", "--prefix=" + destdir])
except CalledProcessError:
print "[Error] configure"
exit(1)
try:
check_call(["make"])
except CalledProcessError:
print "[Error] make"
exit(1)
def install(path_src, destdir):
"""install"""
try:
check_call(["make", "install"])
except CalledProcessError:
print "[Error] make install"
exit(1)
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
198,
2,
15717,
25,
1960,
36221,
69,
198,
2,
4482,
25,
4100,
2640,
3334,
17559,
198,
2,
6434,
25,
5527,
446,
279,
310,
198,
198,
37811,
2306,
36221,
69,
37811,
198,
198,
11748,
2393,
15414,
... | 2.3147 | 483 |
from .base import *
from .heterodyne import heterodyne, heterodyne_dag, heterodyne_merge
| [
6738,
764,
8692,
1330,
1635,
198,
6738,
764,
43332,
1118,
710,
1330,
14445,
1118,
710,
11,
14445,
1118,
710,
62,
67,
363,
11,
14445,
1118,
710,
62,
647,
469,
198
] | 2.966667 | 30 |
from fastapi.testclient import TestClient
from app.main import app
from app.models import Numbers
import pytest
client = TestClient(app)
| [
6738,
3049,
15042,
13,
9288,
16366,
1330,
6208,
11792,
198,
6738,
598,
13,
12417,
1330,
598,
198,
6738,
598,
13,
27530,
1330,
27797,
198,
11748,
12972,
9288,
198,
198,
16366,
796,
6208,
11792,
7,
1324,
8,
628
] | 3.756757 | 37 |
# coding: utf-8
# 模块状态信息的输出
from global_data import Bases, Trucks, Orders, Destinations
import numpy as np
from model.base_model.base import Base
from model.base_model.base_.type import Truck_status
from model.base_model.order import Order
from model.base_model.truck import Truck
from model.base_model.base_.data_record import Writer, model_time_to_date_time
import sys
reload(sys)
sys.setdefaultencoding('utf8')
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
2,
10545,
101,
94,
161,
251,
245,
163,
232,
35050,
222,
223,
46479,
94,
162,
223,
107,
21410,
164,
122,
241,
49035,
118,
198,
6738,
3298,
62,
7890,
1330,
347,
1386,
11,
13616,
4657,
11,
30689,
... | 2.769737 | 152 |
# Copyright Alex Morais (thatsamorais@gmail.com) for perplexistential
"""The PubSub cog example."""
import os
from twitchio.ext import commands, pubsub
class Cog(commands.Cog):
"""Cog."""
def __init__(self, bot, data={}):
"""init."""
self.bot = bot
self.data = data
self.bot.pubsub = pubsub.PubSubPool(self.bot)
@commands.Cog.event("event_ready")
async def is_ready(self):
"""is_ready."""
print("pubsub cog is ready!")
bot = self.bot
@bot.event()
@bot.event()
@bot.event()
@bot.event()
# The following two events are mentioned in docs but not supported
@bot.event()
@bot.event()
for channel in self.bot.channels:
token = os.environ.get(f"{channel.upper()}_PUBSUB_TOKEN", "")
channel_details = await self.bot.fetch_channel(channel)
channel_id = channel_details.user.id
topics = []
for topic in self.data.get("topics", []):
if topic == "channel_points":
topics.append(pubsub.channel_points(token)[channel_id])
elif topic == "bits":
topics.append(pubsub.bits(token)[channel_id])
elif topic == "bits_badge":
topics.append(pubsub.bits_badge(token)[channel_id])
# This support is not yet ready in twitchio. maybe soon?
elif topic == "channel_subscriptions":
topics.append(
pubsub.channel_subscriptions(self.bot.access_token)[channel_id]
)
elif topic == "whispers":
topics.append(pubsub.whispers(self.bot.access_token))
for mod_id in os.environ.get("MODERATORS", "").strip().split(","):
if mod_id:
topics.append(
pubsub.moderation_user_action(token)[channel_id][mod_id]
)
self.bot.loop.create_task(self.bot.pubsub.subscribe_topics(topics))
def prepare(bot: commands.Bot, data={}):
"""Load our cog with this module."""
bot.add_cog(Cog(bot, data=data))
| [
2,
15069,
4422,
3461,
15152,
357,
400,
1381,
321,
5799,
271,
31,
14816,
13,
785,
8,
329,
35682,
396,
1843,
198,
198,
37811,
464,
8525,
7004,
43072,
1672,
526,
15931,
198,
198,
11748,
28686,
198,
6738,
37366,
952,
13,
2302,
1330,
9729,... | 2.002703 | 1,110 |
# Copyright 2015 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''Volume driver for Dell Storage Center.'''
from oslo_log import log as logging
from oslo_utils import excutils
from cinder import exception
from cinder.i18n import _, _LE
from cinder.volume.drivers.dell import dell_storagecenter_common
from cinder.volume.drivers import san
LOG = logging.getLogger(__name__)
class DellStorageCenterISCSIDriver(san.SanISCSIDriver,
dell_storagecenter_common.DellCommonDriver):
'''Implements commands for Dell StorageCenter ISCSI management.
To enable the driver add the following line to the cinder configuration:
volume_driver=cinder.volume.drivers.dell.DellStorageCenterISCSIDriver
'''
VERSION = '1.0.1'
| [
2,
220,
220,
220,
15069,
1853,
23617,
3457,
13,
198,
2,
198,
2,
220,
220,
220,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,
198,
2,
220,
220,
220,
407,
779,
428,
2393,
2845,
287,
... | 3.055944 | 429 |
from data import DataLoader
import os
TESTDIR = "C:/Users/Administrator/Desktop/neuron_factor_tree_test/test_1"
eval_path = TESTDIR + "/data/morpho_eval/L4_BC/C060998B-I1.CNG.nfss"
class_map = {"L4_BC": 1, "L4_PC": 2, "L4_SC": 3, "L5_MC": 4, "L5_PC": 5,
"L6_PC": 6, "L23_BC": 7, "L23_MC": 8, "L23_PC": 9}
# data_loader = DataLoader(data_dir= (TESTDIR+"/data"), batch_size=100, sequence_size=10, feature_size=19, mode="train")
data_loader = DataLoader(data_dir= eval_path, batch_size=-1, sequence_size=10, feature_size=19, mode="eval")
path_list = os.listdir(eval_path)
for p in path_list:
class_path = os.path.join(eval_path, p)
if os.path.isdir(class_path): # if is dir, dir's name is neuron class.
neuron_label = class_map[p]
print(p)
f_list = os.listdir(class_path)
for f in f_list:
if os.path.isfile(os.path.join(class_path, f)) and f[-4:]=="nfss" : # f
print(f) | [
6738,
1366,
1330,
6060,
17401,
201,
198,
11748,
28686,
201,
198,
201,
198,
51,
6465,
34720,
796,
366,
34,
14079,
14490,
14,
41862,
12392,
14,
36881,
14,
710,
44372,
62,
31412,
62,
21048,
62,
9288,
14,
9288,
62,
16,
1,
201,
198,
1820... | 2.026694 | 487 |
import re
def parse_gcov( gcovfile ):
"""Parses a .gcov file and returns a report array of line coverage
"""
report = []
ignore_block = False
with open( gcovfile ) as gcov:
for line in gcov:
tokens = line.split(':')
if len(tokens) < 2:
continue
count = tokens[0].strip()
line = int( tokens[1].strip() )
source = tokens[2]
if line == 0:
continue
if re.search( r'\bLCOV_EXCL_START\b', source ):
ignore_block = ignore_block + 1
elif re.search(r'\bLCOV_EXCL_END\b', source ):
ignore_block = ignore_block - 1
if count == '-':
report.append( None )
elif count == '#####':
if ( ignore_block > 0 or
source.strip().startswith( ( 'inline', 'static' ) ) or
source.strip() == '}' or
re.search( r'\bLCOV_EXCL_LINE\b', source ) ):
report.append( None )
else:
report.append( 0 )
elif count == '=====':
report.append( 0 )
else:
report.append( int( count ) )
return report
| [
198,
11748,
302,
198,
198,
4299,
21136,
62,
36484,
709,
7,
308,
66,
709,
7753,
15179,
198,
220,
37227,
47,
945,
274,
257,
764,
36484,
709,
2393,
290,
5860,
257,
989,
7177,
286,
1627,
5197,
198,
220,
37227,
628,
220,
989,
796,
17635,... | 2.134921 | 504 |
from tunipost import tunipost
| [
6738,
6278,
541,
455,
1330,
6278,
541,
455,
198
] | 3.333333 | 9 |
import os
from tensorflow.python.keras.callbacks import ReduceLROnPlateau
from epyseg.postprocess.filtermask import simpleFilter
from epyseg.postprocess.refine_v2 import RefineMaskUsingSeeds
os.environ['SM_FRAMEWORK'] = 'tf.keras' # set env var for changing the segmentation_model framework
import traceback
import matplotlib.pyplot as plt
from epyseg.img import Img
from epyseg.deeplearning.augmentation.generators.data import DataGenerator
import numpy as np
import tensorflow as tf
import urllib.request
import hashlib
import re
from epyseg.postprocess.refine import EPySegPostProcess
import segmentation_models as sm
# sm.set_framework('tf.keras') # alternative fix = changing framework on the fly
from epyseg.deeplearning.callbacks.saver import My_saver_callback
from epyseg.deeplearning.callbacks.stop import myStopCallback
from segmentation_models.metrics import *
from segmentation_models.losses import *
from skimage import exposure
# logging
from epyseg.tools.logger import TA_logger
logger = TA_logger()
class EZDeepLearning:
'''A class to handle deep learning models
'''
available_model_architectures = ['Unet', 'PSPNet', 'FPN', 'Linknet']
optimizers = ['adam', 'sgd', 'rmsprop', 'adagrad', 'adadelta', 'adamax', 'nadam']
available_sm_backbones = sm.get_available_backbone_names()
# TODO below are the pretrained models for 2D epithelia segmentation if None --> no pretrained model exist # maybe sort them by efficiency ???
# for each model do provide all the necessary parameters: 'model' 'model_weights' 'architecture' 'backbone' 'activation' 'classes' 'input_width' 'input_height' 'input_channels'
pretrained_models_2D_epithelia = {
'Unet-vgg19-sigmoid': None,
'Unet-vgg16-sigmoid': None,
'Unet-seresnext50-sigmoid': None,
'Unet-seresnext101-sigmoid': None,
'Unet-seresnet50-sigmoid': None,
'Unet-seresnet34-sigmoid': None,
'Unet-seresnet18-sigmoid': None,
'Unet-seresnet152-sigmoid': None,
'Unet-seresnet101-sigmoid': None,
'Unet-senet154-sigmoid': None,
'Unet-resnext50-sigmoid': None,
'Unet-resnext101-sigmoid': None,
'Unet-resnet50-sigmoid': None,
'Unet-resnet34-sigmoid': None,
'Unet-resnet18-sigmoid': None,
'Unet-resnet152-sigmoid': None,
'Unet-resnet101-sigmoid': None,
'Unet-mobilenetv2-sigmoid': None,
'Unet-mobilenet-sigmoid': None,
'Unet-inceptionv3-sigmoid': None,
'Unet-inceptionresnetv2-sigmoid': None,
'Unet-efficientnetb7-sigmoid': None,
'Unet-efficientnetb6-sigmoid': None,
'Unet-efficientnetb5-sigmoid': None,
'Unet-efficientnetb4-sigmoid': None,
'Unet-efficientnetb3-sigmoid': None,
'Unet-efficientnetb2-sigmoid': None,
'Unet-efficientnetb1-sigmoid': None,
'Unet-efficientnetb0-sigmoid': None,
'Unet-densenet201-sigmoid': None,
'Unet-densenet169-sigmoid': None,
'Unet-densenet121-sigmoid': None,
'PSPNet-vgg19-sigmoid': None,
'PSPNet-vgg16-sigmoid': None,
'PSPNet-seresnext50-sigmoid': None,
'PSPNet-seresnext101-sigmoid': None,
'PSPNet-seresnet50-sigmoid': None,
'PSPNet-seresnet34-sigmoid': None,
'PSPNet-seresnet18-sigmoid': None,
'PSPNet-seresnet152-sigmoid': None,
'PSPNet-seresnet101-sigmoid': None,
'PSPNet-senet154-sigmoid': None,
'PSPNet-resnext50-sigmoid': None,
'PSPNet-resnext101-sigmoid': None,
'PSPNet-resnet50-sigmoid': None,
'PSPNet-resnet34-sigmoid': None,
'PSPNet-resnet18-sigmoid': None,
'PSPNet-resnet152-sigmoid': None,
'PSPNet-resnet101-sigmoid': None,
'PSPNet-mobilenetv2-sigmoid': None,
'PSPNet-mobilenet-sigmoid': None,
'PSPNet-inceptionv3-sigmoid': None,
'PSPNet-inceptionresnetv2-sigmoid': None,
'PSPNet-efficientnetb7-sigmoid': None,
'PSPNet-efficientnetb6-sigmoid': None,
'PSPNet-efficientnetb5-sigmoid': None,
'PSPNet-efficientnetb4-sigmoid': None,
'PSPNet-efficientnetb3-sigmoid': None,
'PSPNet-efficientnetb2-sigmoid': None,
'PSPNet-efficientnetb1-sigmoid': None,
'PSPNet-efficientnetb0-sigmoid': None,
'PSPNet-densenet201-sigmoid': None,
'PSPNet-densenet169-sigmoid': None,
'PSPNet-densenet121-sigmoid': None,
'FPN-vgg19-sigmoid': None,
'FPN-vgg16-sigmoid': None,
'FPN-seresnext50-sigmoid': None,
'FPN-seresnext101-sigmoid': None,
'FPN-seresnet50-sigmoid': None,
'FPN-seresnet34-sigmoid': None,
'FPN-seresnet18-sigmoid': None,
'FPN-seresnet152-sigmoid': None,
'FPN-seresnet101-sigmoid': None,
'FPN-senet154-sigmoid': None,
'FPN-resnext50-sigmoid': None,
'FPN-resnext101-sigmoid': None,
'FPN-resnet50-sigmoid': None,
'FPN-resnet34-sigmoid': None,
'FPN-resnet18-sigmoid': None,
'FPN-resnet152-sigmoid': None,
'FPN-resnet101-sigmoid': None,
'FPN-mobilenetv2-sigmoid': None,
'FPN-mobilenet-sigmoid': None,
'FPN-inceptionv3-sigmoid': None,
'FPN-inceptionresnetv2-sigmoid': None,
'FPN-efficientnetb7-sigmoid': None,
'FPN-efficientnetb6-sigmoid': None,
'FPN-efficientnetb5-sigmoid': None,
'FPN-efficientnetb4-sigmoid': None,
'FPN-efficientnetb3-sigmoid': None,
'FPN-efficientnetb2-sigmoid': None,
'FPN-efficientnetb1-sigmoid': None,
'FPN-efficientnetb0-sigmoid': None,
'FPN-densenet201-sigmoid': None,
'FPN-densenet169-sigmoid': None,
'FPN-densenet121-sigmoid': None,
'Linknet-vgg19-sigmoid': None,
'Linknet-vgg16-sigmoid': {'url': 'https://gitlab.com/baigouy/models/raw/master/model_linknet-vgg16_shells.h5',
# TODO change this
'md5': '266ca9acd9d7a4fe74a473e17952fb6c',
'model': None,
'model_weights': None,
'architecture': 'Linknet',
'backbone': 'vgg16',
'activation': 'sigmoid',
'classes': 7,
'input_width': None,
'input_height': None,
'input_channels': 1,
'version':1},
'Linknet-vgg16-sigmoid-v2': {'url': 'https://gitlab.com/baigouy/models/raw/master/model_linknet-vgg16_shells_v2.h5',
'md5': '98c8a51f3365e77c07a4f9e95669c259',
'model': None,
'model_weights': None,
'architecture': 'Linknet',
'backbone': 'vgg16',
'activation': 'sigmoid',
'classes': 7,
'input_width': None,
'input_height': None,
'input_channels': 1,
'version': 1},
'Linknet-seresnext50-sigmoid': None,
# 'https://github.com/baigouy/models/raw/master/model_Linknet-seresnext101.h5'
'Linknet-seresnext101-sigmoid': {
'url': 'https://gitlab.com/baigouy/models/raw/master/model_Linknet-seresnext101.h5',
'md5': '209f3bf53f3e2f5aaeef62d517e8b8d8',
'model': None,
'model_weights': None,
'architecture': 'Linknet',
'backbone': 'seresnext101',
'activation': 'sigmoid',
'classes': 1,
'input_width': None,
'input_height': None,
'input_channels': 1},
'Linknet-seresnet50-sigmoid': None,
'Linknet-seresnet34-sigmoid': None,
'Linknet-seresnet18-sigmoid': None,
'Linknet-seresnet152-sigmoid': None,
'Linknet-seresnet101-sigmoid': None,
'Linknet-senet154-sigmoid': None,
'Linknet-resnext50-sigmoid': None,
'Linknet-resnext101-sigmoid': None,
'Linknet-resnet50-sigmoid': None,
'Linknet-resnet34-sigmoid': None,
'Linknet-resnet18-sigmoid': None,
'Linknet-resnet152-sigmoid': None,
'Linknet-resnet101-sigmoid': None,
'Linknet-mobilenetv2-sigmoid': None,
'Linknet-mobilenet-sigmoid': None,
'Linknet-inceptionv3-sigmoid': None,
'Linknet-inceptionresnetv2-sigmoid': None,
'Linknet-efficientnetb7-sigmoid': None,
'Linknet-efficientnetb6-sigmoid': None,
'Linknet-efficientnetb5-sigmoid': None,
'Linknet-efficientnetb4-sigmoid': None,
'Linknet-efficientnetb3-sigmoid': None,
'Linknet-efficientnetb2-sigmoid': None,
'Linknet-efficientnetb1-sigmoid': None,
'Linknet-efficientnetb0-sigmoid': None,
'Linknet-densenet201-sigmoid': None,
'Linknet-densenet169-sigmoid': None,
'Linknet-densenet121-sigmoid': None}
# https://www.tensorflow.org/api_docs/python/tf/keras/metrics
# TODO add smlosses iou... also add shortcuts
metrics = {'accuracy': 'accuracy', 'f1_score': f1_score, 'f2_score': f2_score,
'precision': precision, 'iou_score': iou_score,
'recall': recall, 'kullback_leibler_divergence': 'kullback_leibler_divergence',
'mean_absolute_error': 'mean_absolute_error',
'mean_absolute_percentage_error': 'mean_absolute_percentage_error',
'mean_squared_error': 'mean_squared_error', 'msle': 'msle',
'binary_accuracy': 'binary_accuracy', 'binary_crossentropy': 'binary_crossentropy',
'categorical_accuracy': 'categorical_accuracy', 'categorical_crossentropy': 'categorical_crossentropy',
'hinge': 'hinge', 'poisson': 'poisson', 'sparse_categorical_accuracy': 'sparse_categorical_accuracy',
'sparse_categorical_crossentropy': 'sparse_categorical_crossentropy',
'sparse_top_k_categorical_accuracy': 'sparse_top_k_categorical_accuracy',
'top_k_categorical_accuracy': 'top_k_categorical_accuracy', 'squared_hinge': 'squared_hinge',
'cosine_proximity': 'cosine_proximity'}
# https://keras.io/losses/
loss = {'mean_squared_error': 'mean_squared_error',
'mean_absolute_error': 'mean_absolute_error',
'jaccard_loss': jaccard_loss, 'binary_crossentropy': 'binary_crossentropy', 'dice_loss': dice_loss,
'binary_focal_loss': binary_focal_loss, 'categorical_focal_loss': categorical_focal_loss,
'binary_crossentropy': binary_crossentropy, 'categorical_crossentropy': categorical_crossentropy,
'bce_dice_loss': bce_dice_loss, 'bce_jaccard_loss': bce_jaccard_loss, 'cce_dice_loss': cce_dice_loss,
'cce_jaccard_loss': cce_jaccard_loss, 'binary_focal_dice_loss': binary_focal_dice_loss,
'binary_focal_jaccard_loss': binary_focal_jaccard_loss,
'categorical_focal_dice_loss': categorical_focal_dice_loss,
'categorical_focal_jaccard_loss': categorical_focal_jaccard_loss,
'mean_absolute_percentage_error': 'mean_absolute_percentage_error',
'mean_squared_logarithmic_error': 'mean_squared_logarithmic_error', 'squared_hinge': 'squared_hinge',
'hinge': 'hinge', 'categorical_hinge': 'categorical_hinge', 'logcosh': 'logcosh',
'huber_loss': 'huber_loss', 'categorical_crossentropy': 'categorical_crossentropy',
'sparse_categorical_crossentropy': 'sparse_categorical_crossentropy',
'kullback_leibler_divergence': 'kullback_leibler_divergence', 'poisson': 'poisson',
'cosine_proximity': 'cosine_proximity', 'is_categorical_crossentropy': 'is_categorical_crossentropy'}
# TODO explain activation layers
# https://keras.io/activations/
last_layer_activation = ['sigmoid', 'softmax', 'linear', 'relu', 'elu', 'tanh', 'selu', 'softplus', 'softsign',
'hard_sigmoid', 'exponential', 'None']
def __init__(self, use_cpu=False): # TODO handle backbone and type
'''class init
Parameters
----------
use_cpu : boolean
if set to True tf will use CPU (slow) instead of GPU
'''
# use_cpu = True # can be used to test a tf model easily using the CPU while the GPU is running.
print('Using tensorflow version ' + str(tf.__version__))
print('Using segmentation models version ' + sm.__version__)
if use_cpu:
# must be set before model is compiled
self.force_use_cpu()
print('Using CPU')
# gpu_options = tf.GPUOptions(allow_growth=True)
# session = tf.InteractiveSession(config=tf.ConfigProto(gpu_options=gpu_options))
try:
physical_devices = tf.config.list_physical_devices('GPU')
except:
# dirty hack for tf2.0 support for mac OS X anaconda
physical_devices = tf.config.experimental.list_physical_devices('GPU')
for physical_device in physical_devices:
try:
tf.config.experimental.set_memory_growth(physical_device, True)
except:
# Invalid device or cannot modify.
pass
self.stop_cbk = None
self.saver_cbk = None
self.model = None
# encoder_weights=None,
# @staticmethod
def stop_model_training_now(self):
'''Early stop for model training
'''
logger.warning('user stop received, model will stop training at epoch end, please wait...')
if self.stop_cbk is not None:
self.stop_cbk.stop_me = True
if self.saver_cbk is not None:
self.saver_cbk.stop_me = True
def load_model(self, model=None, skip_comile=False):
'''loads a model
Parameters
----------
model : string
path to the model
Returns
-------
model
a tf model or None if fails to load the model
'''
# bunch of custom objects to allow easy reload especially for the sm models
# TODO force it to be in sync with metrics and losses
# TODO should I add more ???
custom_objects = {"softmax": tf.nn.softmax, "iou_score": sm.metrics.iou_score,
'f1_score': f1_score, 'f2_score': f2_score, 'precision': precision,
'recall': recall, 'jaccard_loss': jaccard_loss, 'dice_loss': dice_loss,
'binary_focal_loss': binary_focal_loss, 'categorical_focal_loss': categorical_focal_loss,
'binary_crossentropy': binary_crossentropy, 'categorical_crossentropy': categorical_crossentropy,
'bce_dice_loss': bce_dice_loss, 'bce_jaccard_loss': bce_jaccard_loss,
'cce_dice_loss': cce_dice_loss, 'cce_jaccard_loss': cce_jaccard_loss,
'binary_focal_dice_loss': binary_focal_dice_loss,
'binary_focal_loss_plus_dice_loss': binary_focal_dice_loss,
'binary_focal_jaccard_loss': binary_focal_jaccard_loss,
'binary_crossentropy_plus_dice_loss': bce_dice_loss,
'binary_focal_plus_jaccard_loss': binary_focal_jaccard_loss,
'categorical_focal_dice_loss': categorical_focal_dice_loss,
'categorical_focal_jaccard_loss': categorical_focal_jaccard_loss,
'binary_crossentropy_plus_jaccard_loss': bce_jaccard_loss
}
if model is not None:
if not model.lower().endswith('.json'):
# load non JSON models
if skip_comile:
try:
model_binary = tf.keras.models.load_model(model, custom_objects=custom_objects,
compile=False)
return model_binary
except:
# failed to load the model
traceback.print_exc()
logger.error('Failed loading model')
return None
else:
try:
model_binary = tf.keras.models.load_model(model,
custom_objects=custom_objects)
return model_binary
except:
traceback.print_exc()
logger.error('failed loading model, retrying with compile=False')
try:
model_binary = tf.keras.models.load_model(model, custom_objects=custom_objects, compile=False)
return model_binary
except:
# failed to load the model
traceback.print_exc()
logger.error('Failed loading model')
return None
else:
# load model from a JSON file
with open(model, 'r') as f:
jsonString = f.read()
try:
model_binary = tf.keras.models.model_from_json(jsonString,
custom_objects=custom_objects)
return model_binary
except:
# failed to load the model
traceback.print_exc()
logger.error('failed loading model')
return None
# failed to load the model
return None
# TODO implement decay rate and allow to set learning rate from GUI
# from https://gist.github.com/jeremyjordan/86398d7c05c02396c24661baa4c88165
@staticmethod
def combine(model1, model2):
'''combine too models
Combines two sequential models into one, the output of the first model must be compatible with the input of the second model
Parameters
----------
model1 : model
model2 : model
Returns
-------
model
the combined model
'''
try:
return tf.keras.Model(model1.inputs, model2(model1(model1.inputs)))
except:
traceback.print_exc()
logger.error('models could not be combined sorry') # TODO add more info why that did not work
def compile(self, optimizer='Adam', loss='binary_crossentropy', metrics=['accuracy'],
**kwargs): # added kwargs to ignore extra args without a crash
'''Compile the model
Parameters
----------
optimizer : string or optimizer
the optimizer is the gradient descent algorithm
loss : string or loss
the loss function (to be minimized during training)
metrics : string, list and metrics
the human readable version of the loss
'''
if metrics:
if not isinstance(metrics, list):
metrics = [metrics]
if isinstance(loss, str):
if loss in self.loss:
loss = self.loss[loss]
for idx, metric in enumerate(metrics):
if isinstance(metric, str):
if metric in self.metrics:
metrics[idx] = self.metrics[metric]
self.model.compile(optimizer=optimizer, loss=loss, metrics=metrics)
def force_use_cpu(self):
'''Force tensorflow to use the CPU instead of GPU even if available
'''
# https://stackoverflow.com/questions/40690598/can-keras-with-tensorflow-backend-be-forced-to-use-cpu-or-gpu-at-will
import os
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152
os.environ["CUDA_VISIBLE_DEVICES"] = ""
def load_weights(self, weights):
'''Loads model weights
Parameters
----------
weights : model
path to model weights
'''
if weights is not None:
try:
logger.info("Loading weights ' " + str(weights) + "'")
self.model.load_weights(weights)
except:
try:
logger.error("Error --> try loading weights by name, i.e. model and weights don't match ???")
self.model.load_weights(weights, by_name=True)
except:
logger.error("Definitely failed loading weights, there is no match between weights and model!!!")
# TODO could even hack my summary to see the content of nested models, maybe add an option
def summary(self, line_length=150):
'''prints a summary of the model
Parameters
----------
line_length : int
increase the value if model columns appear truncated
'''
if self.model is not None:
logger.info(self.model.summary(line_length=line_length))
else:
logger.error('Please load a model first')
def plot_graph(self, save_name):
'''Draws the model as an image
requires pydot --> need install it
pip install pydot
and
sudo apt install python-pydot python-pydot-ng graphviz # python-pydot-ng isn't in the 14.04 repos
Parameters
----------
save_name : string
save the graph as save_name
'''
logger.info("Plotting model as .png")
try:
tf.keras.utils.plot_model(self.model, to_file=save_name, show_shapes=True, show_layer_names=True)
except:
logger.error('failed to save model layout as png')
def get_inputs_shape(self, remove_batch_size_from_shape=False):
'''Get model input shapes as a list of tuples
Parameters
----------
remove_batch_size_from_shape : boolean
if True removes the first/batch size value of the shape tuple
Returns
-------
list of tuples/shapes
model inputs shapes
'''
shapes = []
inputs = self._get_inputs()
for input in inputs:
shape = input.shape.as_list()
if remove_batch_size_from_shape:
shape = shape[1:]
shapes.append(tuple(shape))
return shapes
def get_outputs_shape(self, remove_batch_size_from_shape=False):
'''Get model output shapes as a list of tuples
Parameters
----------
remove_batch_size_from_shape : boolean
if True removes the first/batch size value of the shape tuple
Returns
-------
list of tuples/shapes
model inputs shapes
'''
shapes = []
outputs = self._get_outputs()
for output in outputs:
shape = output.shape.as_list()
if remove_batch_size_from_shape:
shape = shape[1:]
shapes.append(tuple(shape))
return shapes
# TODO allow recursive and allow index support maybe --> yes do so!!!
@staticmethod
def freeze(model, layer_names=None):
'''Allows to freeze (prevent training) of layers in the model
Parameters
----------
layer_names : string or regex pattern
layer name to freeze
Returns
-------
model
model with the frozen layers
'''
if layer_names is None:
for layer in model.layers:
layer.trainable = False
else:
for layer in model.layers:
try:
model.get_layer(layer_name).trainable = False
continue
except:
pass
for layer_name in layer_names:
# try match layer name using regex
# TODO maybe also check if layer is a model and then do things recursively in it
try:
p = re.compile(layer_name)
if p.match(layer.name):
layer.trainable = False
except:
logger.error('\'' + str(layer_name) + '\' could not be found in model')
pass
@staticmethod
def set_trainable(model, layer_names=None):
'''Set specified layers trainable
Parameters
----------
layer_names : string or regex pattern
layer name to freeze
Returns
-------
model
model with the trainable layers
'''
if layer_names is None:
for layer in model.layers:
layer.trainable = True
else:
for layer in model.layers:
try:
model.get_layer(layer_name).trainable = True
continue
except:
pass
for layer_name in layer_names:
# try match layer name using regex
# TODO maybe also check if layer is a model and then do things recursively in it
try:
p = re.compile(layer_name)
if p.match(layer.name):
layer.trainable = True
except:
logger.error('\'' + str(layer_name) + '\' could not be found in model')
pass
def get_predict_generator(self, input_shape=(512, 512, 1), output_shape=(512, 512, 1), inputs=None,
default_input_tile_width=256, default_input_tile_height=256, tile_width_overlap=32,
tile_height_overlap=32,
input_normalization={'method': 'Rescaling (min-max normalization)', 'range': [0, 1],
'individual_channels': True},
clip_by_frequency=0.05, **kwargs):
'''retruns a predict generator used by models for their predictions
Parameters
----------
input_shape : tuple or list of tuples
desired image shapes for model input
output_shape : tuple or list of tuples
desired output shapes
inputs : list of strings
path to input images/folder
default_input_tile_width : int
default tile width when None in input shape
default_input_tile_height : int
default tile height when None in input shape
tile_width_overlap : int
tile overlap along the x axis
tile_height_overlap : int
tile overlap along the y axis
input_normalization : dict
type of normalisation/standarization to apply to the input image
clip_by_frequency : float, list of floats or None
remove hot/cold pixels by intensity frequency
Returns
-------
generator
an image generator to be used for model predictions
'''
logger.debug('inputs for datagen ' + str(input_shape) + ' ' + str(output_shape) + ' ' + str(inputs) + ' ' + str(
default_input_tile_width) + ' ' + str(default_input_tile_height) + ' ' + str(
tile_width_overlap) + ' ' + str(tile_height_overlap) + ' ' + str(input_normalization) + ' ' + str(
clip_by_frequency) + ' ' + str(kwargs))
if inputs is None:
logger.error('Please specify a valid input folder to build a predict_generator')
return
predict_generator = DataGenerator(inputs=inputs, input_shape=input_shape,
output_shape=output_shape, input_normalization=input_normalization,
clip_by_frequency=clip_by_frequency, is_predict_generator=True,
default_input_tile_width=default_input_tile_width,
default_input_tile_height=default_input_tile_height,
overlap_x=tile_width_overlap,
overlap_y=tile_height_overlap,
**kwargs)
return predict_generator
# TODO check how I can save the settings in a smart way ????
def train(self, metagenerator, progress_callback=None, output_folder_for_models=None, keep_n_best=5,
steps_per_epoch=-1, epochs=100,
batch_size_auto_adjust=False, upon_train_completion_load='last', lr=None, reduce_lr_on_plateau=None, patience=10, **kwargs):
'''train the model
Parameters
----------
metagenerator : datagenerator
a generator yielding input images and ground/truth output to the loss
progress_callback : None or a progress displaying object
output_folder_for_models : string
path to a folder where model need be saved
keep_n_best : int
number of 'best' models to be saved (best = models with lower loss)
steps_per_epoch : int
nb of steps per epoch, if < 0 then run training on fullset
epochs : int
nb of train epochs
batch_size_auto_adjust : boolean
if True, batch size is divided by two every time train fails to run untill batch size reaches 0
'''
# try read model name and save right model name
name = "model"
if self.model._name is not None:
name = self.model._name
# try on an old untouched version
# this works --> where is the fucking bug ????
# DEBUG
# gener = metagenerator.train_generator(infinite=True)
# for inp, out in gener:
# print('saving')
# # en tout cas ça ne marche pas
# print(inp[0].shape, out[0].shape)
#
# print(isinstance(inp, tuple), type(inp))
# Img(inp[0], dimensions='dhwc').save('/home/aigouy/Bureau/trashme_inp.tif')
# Img(out[0], dimensions='dhwc').save('/home/aigouy/Bureau/trashme.tif')
# import sys
# sys.exit(0)
if lr is not None:
self.set_learning_rate(lr)
try:
validation_data = metagenerator.validation_generator(infinite=True)
validation_steps = metagenerator.get_validation_length(first_run=True) # use this to generate data
if reduce_lr_on_plateau is None:
validation_freq = 5 # checks on validation data every 5 steps # TODO set this as a parameter
else:
validation_freq = 1
# TODO IMPORTANT link on how to set the parameters https://segmentation-models.readthedocs.io/en/latest/api.html#unet
if validation_steps is None:
validation_steps = 0
# TODO VERY IMPORTANT need shuffle if not steps_per_epoch == -1 (fullset) --> TODO
if steps_per_epoch == -1:
run_steps_per_epoch = metagenerator.get_train_length(first_run=True)
logger.info('train dataset batches: ' + str(
run_steps_per_epoch) + '\nvalidation dataset batches: ' + str(validation_steps))
else:
# TODO VERY IMPORTANT need shuffle if not steps_per_epoch == -1 (fullset) --> TODO
run_steps_per_epoch = steps_per_epoch
train_data = metagenerator.train_generator(infinite=True)
except:
traceback.print_exc()
logger.error(
'Failed to create datagenerators (see log above), training is therefore impossible, sorry...')
return
# fake_crash = True
result = None
while result is None and metagenerator.batch_size > 0:
try:
# if fake_crash:
# fake_crash = False
# raise Exception('test crash')
self.stop_cbk = myStopCallback()
self.saver_cbk = My_saver_callback(name, self, epochs=epochs,
output_folder_for_models=output_folder_for_models,
keep_n_best=keep_n_best, progress_callback=progress_callback)
callbacks = [self.saver_cbk, self.stop_cbk]
if reduce_lr_on_plateau is not None and reduce_lr_on_plateau < 1:
logger.info('Reduce learning rate on plateau is enabled.')
monitor = "val_loss"
if validation_steps == 0:
monitor='loss'
logger.info('Reduce learning rate is monitoring "'+monitor+'"')
self.reduce_lr = ReduceLROnPlateau(monitor=monitor, factor=reduce_lr_on_plateau, patience=patience, verbose=1, cooldown=1)
# https://stackoverflow.com/questions/51889378/how-to-use-keras-reducelronplateau
callbacks.append(self.reduce_lr)
else:
logger.info('Reduce learning rate on plateau is disabled.')
# if 'reduce_learning_rate' in kwargs and kwargs['reduce_learning_rate']:
# # URGENT TODO add parameters such as decay and epoch
# reduce_learning_rate = self.step_decay_schedule(
# initial_lr=tf.keras.backend.eval(self.model.optimizer.lr))
# callbacks.append(reduce_learning_rate) # TODO not great --> change that soon
if validation_steps != 0:
if tf.__version__ <= "2.0.0":
# hack for tf 2.0.0 support for mac osX (weird bug in tf.keras somewhere)
# https://github.com/tensorflow/tensorflow/issues/31231#issuecomment-586630019
result = self.model.fit_generator(train_data,
validation_data=validation_data,
validation_steps=validation_steps,
validation_freq=validation_freq,
steps_per_epoch=run_steps_per_epoch, epochs=epochs,
callbacks=callbacks,
verbose=1)
else:
result = self.model.fit(train_data,
validation_data=validation_data,
validation_steps=validation_steps,
validation_freq=validation_freq,
steps_per_epoch=run_steps_per_epoch, epochs=epochs,
callbacks=callbacks,
verbose=1)
else:
# same as above without validation
if tf.__version__ <= "2.0.0":
# hack for tf 2.0.0 support for mac osX (weird bug in tf.keras somewhere)
# https://github.com/tensorflow/tensorflow/issues/31231#issuecomment-586630019
result = self.model.fit_generator(train_data,
steps_per_epoch=run_steps_per_epoch, epochs=epochs,
callbacks=callbacks,
verbose=1)
else:
result = self.model.fit(train_data,
steps_per_epoch=run_steps_per_epoch, epochs=epochs,
callbacks=callbacks,
verbose=1)
except:
traceback.print_exc()
if batch_size_auto_adjust:
metagenerator.batch_size = int(metagenerator.batch_size / 2)
if validation_steps != 0:
validation_steps = metagenerator.get_validation_length()
if steps_per_epoch == -1:
run_steps_per_epoch = metagenerator.get_train_length() # need recompute how many steps there will be because of the batch size reduction by 2
else:
traceback.print_exc()
# if user does not want batch size to be adjusted --> quit loop
break
logger.error(
'An error occurred but soft did not crash, most likely batch size is too big, giving rise to oom, reducing bacth size to ' + str(
metagenerator.batch_size))
self.clear_mem()
if result is None:
logger.error(
'Something went wrong during the training, if you get oom, you could try to reduce \'tile input width\' and \'tile input height\'')
else:
# load best or last model (by default last model is loaded...)
if upon_train_completion_load == 'best':
try:
path_to_best_model = self.saver_cbk.get_best_model()
if path_to_best_model is not None:
logger.info("Loading best model '" + str(path_to_best_model) + "'")
self.load_or_build(model=path_to_best_model)
else:
logger.error('No best model found, nothing to load')
if self.model is None:
logger.critical(
'Could not load best model, something wrong happened, please load or build a new model')
except:
traceback.print_exc()
logger.error('Failed to load best model upon training completion')
self.clear_mem()
def clear_mem(self):
'''attempt to clear mem on oom TODO test that it really works
'''
try:
tf.keras.backend.clear_session()
import gc
gc.collect()
# print(len(gc.get_objects())) # to see that it really works
except:
traceback.print_exc()
def predict(self, datagenerator, output_shapes, progress_callback=None, batch_size=1, predict_output_folder=None,
hq_predictions='mean', post_process_algorithm=None, append_this_to_save_name='', **kwargs):
'''run the model
Parameters
----------
datagenerator : datagenerator
a generator yielding model input images
progress_callback : None or a progress displaying object
batch_size : int
by setting it to one you do not really affect speed much but you really ensure that no oom occurs
predict_output_folder : string
path to a folder where model predictions should be saved
'''
logger.debug('hq_predictions mode' + str(hq_predictions))
predict_generator = datagenerator.predict_generator()
# bckup_predict_output_folder = predict_output_folder
TA_mode = False
if predict_output_folder == 'TA_mode':
TA_mode = True
if predict_output_folder is None:
predict_output_folder = ''
self.stop_cbk = myStopCallback()
for i, (files, crop_parameters) in enumerate(predict_generator):
try:
if progress_callback is not None:
progress_callback.emit((i / len(datagenerator.predict_inputs[0])) * 100)
else:
print(str((i / len(datagenerator.predict_inputs[0])) * 100) + '%')
except:
pass
# allow early stop
# do I need to do that ??? probably not...
if self.stop_cbk.stop_me:
return
# we will use this file name to generate the outputname if needed
filename0 = datagenerator._get_from(datagenerator.predict_inputs, i)[0]
filename0_without_path = os.path.basename(filename0)
filename0_without_ext = os.path.splitext(filename0_without_path)[0]
parent_dir_of_filename0 = os.path.dirname(filename0)
TA_output_filename = os.path.join(parent_dir_of_filename0, filename0_without_ext,
'epyseg_raw_predict.tif') # TODO allow custom names here to allow ensemble methods
non_TA_final_output_name = os.path.join(predict_output_folder, filename0_without_ext + '.tif')
filename_to_use_to_save = non_TA_final_output_name
if TA_mode:
filename_to_use_to_save = TA_output_filename
try:
results = self.model.predict(files, verbose=1, batch_size=batch_size)
if hq_predictions is not None:
results = self.get_HQ_predictions(files, results, batch_size=batch_size,
projection_method=hq_predictions)
except:
traceback.print_exc()
logger.error('Could not predict output for image \'' + str(
filename0_without_path) + '\', please check it manually. Prediction continues with the next image.')
continue
if results is None:
logger.warning('Prediction interrupted or failed. Stopping...')
if progress_callback is not None:
progress_callback.emit(100)
return
if isinstance(results, np.ndarray):
results = [results]
for j in range(len(crop_parameters)):
ordered_tiles = Img.linear_to_2D_tiles(results[j], crop_parameters[j])
output_shape = output_shapes[j]
if len(output_shape) == 4:
reconstructed_tile = Img.reassemble_tiles(ordered_tiles, crop_parameters[j])
# print('post_process_algorithm', post_process_algorithm)
# print(reconstructed_tile.dtype)
# print(reconstructed_tile.min())
# print(reconstructed_tile.max())
# print(reconstructed_tile[50,50])
# run post process directly on the image if available
if output_shape[-1]!=7 and (post_process_algorithm is not None and (isinstance(post_process_algorithm, str) and not ('imply' in post_process_algorithm or 'first' in post_process_algorithm))):
logger.error('Model is not compatible with epyseg and cannot be optimized, so the desired post processing cannot be applied, sorry...')
if isinstance(post_process_algorithm, str) and 'imply' in post_process_algorithm: # or output_shape[-1]!=7 # bug why did I put that ??? # if model is incompatible
# simply binarise all
reconstructed_tile = simpleFilter(Img(reconstructed_tile, dimensions='hwc'), **kwargs)
# print('oubsi 1')
Img(reconstructed_tile, dimensions='hwc').save(filename_to_use_to_save+append_this_to_save_name)
del reconstructed_tile
elif post_process_algorithm is not None:
try:
logger.info('post processing/refining mask, please wait...')
# print('post_process_algorithm', post_process_algorithm)
reconstructed_tile = self.run_post_process(Img(reconstructed_tile, dimensions='hwc'),
post_process_algorithm,
progress_callback=progress_callback, **kwargs)
if 'epyseg_raw_predict.tif' in filename_to_use_to_save:
filename_to_use_to_save = filename_to_use_to_save.replace('epyseg_raw_predict.tif',
'handCorrection.tif')
# print('oubsi 2')
# print('bug her"',reconstructed_tile.shape) # most likely not 2D
# Img(reconstructed_tile, dimensions='hw').save(filename_to_use_to_save)
Img(reconstructed_tile).save(filename_to_use_to_save+append_this_to_save_name) # TODO check if that fixes bugs
del reconstructed_tile
except:
logger.error('running post processing/refine mask failed')
traceback.print_exc()
else:
# import tifffile
# tifffile.imwrite('/home/aigouy/Bureau/201104_armGFP_different_lines_tila/predict/test_direct_save.tif', reconstructed_tile, imagej=True)
# print('oubsi 3')
Img(reconstructed_tile, dimensions='hwc').save(filename_to_use_to_save+append_this_to_save_name)
del reconstructed_tile
else:
reconstructed_tile = Img.reassemble_tiles(ordered_tiles, crop_parameters[j], three_d=True)
# run post process directly on the image if available
if output_shape[-1] != 7 and (post_process_algorithm is not None or (
isinstance(post_process_algorithm, str) and 'imply' in post_process_algorithm)):
logger.error(
'Model is not compatible with epyseg and cannot be optimized, so it will simply be thresholded according to selected options, sorry...')
if isinstance(post_process_algorithm, str) and 'imply' in post_process_algorithm: #or output_shape[-1]!=7 --> there was a bug here ...
# simply binarise all
# nb that will NOT WORK TODO FIX BUT OK FOR NOW
# reconstructed_tile = simpleFilter(Img(reconstructed_tile, dimensions='dhwc'), **kwargs)
logger.error('not supported yet please threshold outside the software')
Img(reconstructed_tile, dimensions='dhwc').save(filename_to_use_to_save+append_this_to_save_name)
del reconstructed_tile
elif post_process_algorithm is not None:
try:
logger.info('post processing/refining mask, please wait...')
reconstructed_tile = self.run_post_process(Img(reconstructed_tile, dimensions='dhwc'),
post_process_algorithm,
progress_callback=progress_callback, **kwargs)
if 'epyseg_raw_predict.tif' in filename_to_use_to_save:
filename_to_use_to_save = filename_to_use_to_save.replace('epyseg_raw_predict.tif',
'handCorrection.tif') # nb java TA does not support 3D masks yet --> maybe do that specifically for the python version
Img(reconstructed_tile, dimensions='dhw').save(filename_to_use_to_save+append_this_to_save_name)
del reconstructed_tile
except:
logger.error('running post processing/refine mask failed')
traceback.print_exc()
else:
Img(reconstructed_tile, dimensions='dhwc').save(filename_to_use_to_save+append_this_to_save_name)
del reconstructed_tile
logger.info('saving file as ' + str(filename_to_use_to_save))
del results
try:
if progress_callback is not None:
progress_callback.emit(100)
else:
print(str(100) + '%')
except:
pass
# TODO add median as avg_method
# TODO put this outside of the class
# TODO ask for a save path
# TODO ask for a save path
# TODO ask for a save path
# TODO put outside of the class and add a model parameter
# TODO make it generic so that it can loop on any generator even when they have mutiple inputs and outputs, could be really useful though
# TODO add nested model support and store outside of the class
def is_model_compiled(self):
'''returns True if model is compiled, False otherwise
'''
if self.model is None:
logger.error("Model not loaded, can't check its compilation status...")
return False
return self.model.optimizer is not None
def get_loaded_model_params(self):
'''prints model optimizer and its parameters
'''
try:
print(self.model.optimizer)
if self.model.optimizer is None:
print(
'No training configuration found in save file: the model was *not* compiled. Compile it manually.')
return
print(tf.keras.backend.eval((self.model.optimizer.lr)))
print('name', self.model.optimizer._name)
try:
# print learning rates, decay and total model iterations
print('lr', tf.keras.backend.eval(self.model.optimizer.lr))
print('lr2', tf.keras.backend.eval(self.model.optimizer.decay))
print('lr3', tf.keras.backend.eval(self.model.optimizer.iterations))
except:
pass
try:
print('lr4', tf.keras.backend.eval(self.model.optimizer.beta_1))
print('lr5', tf.keras.backend.eval(self.model.optimizer.beta_2))
except:
pass
print(self.model.optimizer) # prints the optimizer
print(
self.model.optimizer.__dict__) # this contains a lot of the model infos
print(self.model.optimizer._hyper)
print(self.model.optimizer._hyper['learning_rate'])
print('_iterations', tf.keras.backend.eval(
self.model.optimizer.__dict__['_iterations'])) # probably the total nb of iterations
print('learning_rate', tf.keras.backend.eval(self.model.optimizer._hyper['learning_rate']))
print('decay', tf.keras.backend.eval(self.model.optimizer._hyper['decay']))
print('beta_1', tf.keras.backend.eval(self.model.optimizer._hyper['beta_1']))
print('beta_2', tf.keras.backend.eval(self.model.optimizer._hyper['beta_2']))
except:
pass
if __name__ == '__main__':
deepTA = EZDeepLearning()
deepTA.load_or_build(architecture='Unet', backbone='vgg19', activation='sigmoid', classes=1)
# deepTA.load_or_build(model='/path/to/model.h5')
deepTA.get_loaded_model_params()
deepTA.summary()
print(deepTA._get_inputs())
print(deepTA._get_outputs())
print('input shapes', deepTA.get_inputs_shape())
print('output shapes', deepTA.get_outputs_shape())
input_shape = deepTA.get_inputs_shape()
output_shape = deepTA.get_outputs_shape()
input_normalization = {'method': 'Rescaling (min-max normalization)', 'range': [0, 1],
'individual_channels': True}
# metaAugmenter = MetaAugmenter.get_epithelia_data_augmenter()
#
# optimizer = 'adam' # 'adadelta' # 'adam' #Adam() #keras.optimizers.Adam() #Adam(lr=1e-4) #optimizer='rmsprop' #'sgd' #keras.optimizers.SGD(learning_rate=learning_rate_fn)
# loss = sm.losses.jaccard_loss #'binary_crossentropy' # 'binary_crossentropy' #'categorical_crossentropy' #'mean_squared_error'#'mean_squared_error' #sm.losses.bce_jaccard_loss #'binary_crossentropy' #'mean_squared_error'
# metrics = [sm.metrics.iou_score] # 'accuracy' # ['binary_accuracy'] #[sm.metrics.iou_score] #['accuracy'] ['binary_accuracy'] ['mae']
#
# # TRAIN SETTINGS
# if not deepTA.is_model_compiled():
# print('compiling model')
# deepTA.compile(optimizer=optimizer, loss=loss, metrics=metrics)
#
# NB_EPOCHS = 100 # 80 # 100 # 10
#
# deepTA.get_loaded_model_params()
# deepTA.train(metaAugmenter, epochs=NB_EPOCHS, batch_size_auto_adjust=True)
#
# deepTA.saveModel()
# deepTA.saveAsJsonWithWeights()
# deepTA.plot_graph(deepTA.model._name + '_graph.png')
default_input_width = 256 # 576 # 128 # 64
default_input_height = 256 # 576 # 128 # 64
predict_generator = deepTA.get_predict_generator(
inputs=['/home/aigouy/Bureau/last_model_not_sure_that_works/tmp/'], input_shape=input_shape,
output_shape=output_shape, default_input_tile_width=default_input_width,
default_input_tile_height=default_input_height,
tile_width_overlap=32,
tile_height_overlap=32, input_normalization=input_normalization, clip_by_frequency=0.05)
predict_output_folder = os.path.join('/D/datasets_deep_learning/keras_segmentation_dataset/TA_test_set/trash',
deepTA.model._name if deepTA.model._name is not None else 'model') # 'TA_mode'
deepTA.predict(predict_generator, output_shape, predict_output_folder=predict_output_folder,
batch_size=1) | [
11748,
28686,
198,
198,
6738,
11192,
273,
11125,
13,
29412,
13,
6122,
292,
13,
13345,
10146,
1330,
44048,
35972,
2202,
3646,
378,
559,
198,
198,
6738,
2462,
88,
325,
70,
13,
7353,
14681,
13,
10379,
4354,
2093,
1330,
2829,
22417,
198,
... | 1.975529 | 27,461 |
"""
Wrapper to send email
"""
# Import smtplib for the actual sending function
import smtplib
# Here are the email package modules we'll need
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
def send_email(subject, from_, to_,
message_txt="",
message_HTML="",
images=[],
server="", port=465,
login="",
passwd="",
debug=0):
"""
allow some default values to reduce overhead
:return:
"""
COMMASPACE = ', '
# Create the container (outer) email message.
msg = MIMEMultipart()
msg['Subject'] = subject
msg['From'] = from_ # the sender's email address
msg['To'] = COMMASPACE.join(to_) # the list of all recipients' email addresses
if message_txt:
msg.attach(MIMEText(message_txt, 'plain'))
elif message_HTML:
msg.attach(MIMEText(message_HTML, 'html', _charset='utf-8'))
# Assume we know that the image files are all in PNG format
for file in images:
# Open the files in binary mode. Let the MIMEImage class automatically
# guess the specific image type.
with open(file, 'rb') as fp:
img = MIMEImage(fp.read())
img.add_header('Content-Disposition', 'attachment')
msg.attach(img)
if debug >= 3:
print(msg['From'])
print(msg['To'])
print(message_txt)
print(message_HTML)
print(server)
# Send the email via our own SMTP server.
s = smtplib.SMTP_SSL(server, port)
if debug >= 3:
print(s)
is_logged = s.login(login, passwd)
if debug >= 3:
print(is_logged)
#is_sent = s.send_message(msg)
is_sent = s.sendmail(msg['From'], msg['To'], msg.as_string())
if debug >= 3:
print(is_sent)
s.quit()
if __name__ == "__main__":
from model_db import Database
db = Database()
with open("/home/pi/zhima/Private/night_batch.log", "rt") as txt:
msg = txt.readlines()
send_email("XCJ Zhima night report",
from_=db.mailbox["username"],
to_ = ["ericgibert@yahoo.fr",],
message_txt="".join(msg),
# message_HTML = """
# From the <b>__main__</b> part of send_email.py module
# <p>網站有中、英文版本,也有繁、簡體版</p>
# """,
# images=[r"/home/pi/zhima/Private/night_batch.log"],
server=db.mailbox["server"], port=db.mailbox["port"],
login=db.mailbox["username"], passwd=db.mailbox["password"],
debug=3)
| [
37811,
198,
220,
220,
220,
27323,
2848,
284,
3758,
3053,
198,
37811,
198,
2,
17267,
895,
83,
489,
571,
329,
262,
4036,
7216,
2163,
198,
11748,
895,
83,
489,
571,
198,
198,
2,
3423,
389,
262,
3053,
5301,
13103,
356,
1183,
761,
198,
... | 2.101256 | 1,274 |
"""
Plotting module product data returned from keepa interface module
"""
import datetime
import warnings
import numpy as np
from keepaAPI import keepaTime, ParseCSV
try:
import matplotlib.pyplot as plt
plt_loaded = True
except BaseException as e:
plt_loaded = False
warnings.warn('keepaAPI plotting unavailable: %s' % str(e))
def PlotProduct(product, keys=['AMAZON', 'USED', 'COUNT_USED', 'SALES'],
price_limit=1000):
"""
Plots a product using matplotlib
Parameters
----------
product : list
Single product from keepaAPI.ProductQuery
keys : list, optional
Keys to plot. Defaults to ['AMAZON', 'USED', 'COUNT_USED', 'SALES']
price_limit : float, optional
Prices over this value will not be plotted. Used to ignore
extreme prices.
"""
if not plt_loaded:
raise Exception('Plotting not available. Install matplotlib with:\n' +
'pip install matplotlib')
if 'data' not in product:
product['data'] = ParseCSV[product['csv']]
# Use all keys if not specified
if not keys:
keys = product['data'].keys()
# Create three figures, one for price data, offers, and sales rank
pricefig, priceax = plt.subplots(figsize=(10, 5))
pricefig.canvas.set_window_title('Product Price Plot')
plt.title(product['title'])
plt.xlabel('Date')
plt.ylabel('Price')
pricelegend = []
offerfig, offerax = plt.subplots(figsize=(10, 5))
offerfig.canvas.set_window_title('Product Offer Plot')
plt.title(product['title'])
plt.xlabel('Date')
plt.ylabel('Listings')
offerlegend = []
salesfig, salesax = plt.subplots(figsize=(10, 5))
salesfig.canvas.set_window_title('Product Sales Rank Plot')
plt.title(product['title'])
plt.xlabel('Date')
plt.ylabel('Sales Rank')
saleslegend = []
# Add in last update time
lstupdate = keepaTime.KeepaMinutesToTime(product['lastUpdate'])
# Attempt to plot each key
for key in keys:
# Continue if key does not exist
if key not in product['data']:
print('%s not in product' % key)
continue
elif 'SALES' in key and 'time' not in key:
if product['data'][key].size == 1:
print('%s not in product' % key)
continue
x = np.append(product['data'][key + '_time'], lstupdate)
y = np.append(product['data'][key],
product['data'][key][-1]).astype(np.float)
ReplaceInvalid(y)
salesax.step(x, y, where='pre')
saleslegend.append(key)
elif 'COUNT_' in key and 'time' not in key:
x = np.append(product['data'][key + '_time'], lstupdate)
y = np.append(product['data'][key],
product['data'][key][-1]).astype(np.float)
ReplaceInvalid(y)
offerax.step(x, y, where='pre')
offerlegend.append(key)
elif 'time' not in key:
x = np.append(product['data'][key + '_time'], lstupdate)
y = np.append(product['data'][key],
product['data'][key][-1]).astype(np.float)
ReplaceInvalid(y, max_value=price_limit)
priceax.step(x, y, where='pre')
pricelegend.append(key)
# Add in legends or close figure
if pricelegend:
priceax.legend(pricelegend)
else:
plt.close(pricefig)
if offerlegend:
offerax.legend(offerlegend)
else:
plt.close(offerfig)
if not saleslegend:
plt.close(salesfig)
plt.show(block=True)
plt.draw()
def ReplaceInvalid(arr, max_value=None):
""" Replace invalid data with nan """
with np.warnings.catch_warnings():
np.warnings.filterwarnings('ignore')
arr[arr < 0.0] = np.nan
if max_value:
arr[arr > max_value] = np.nan
| [
37811,
198,
43328,
889,
8265,
1720,
1366,
4504,
422,
1394,
64,
7071,
8265,
198,
37811,
198,
11748,
4818,
8079,
198,
11748,
14601,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
1394,
64,
17614,
1330,
1394,
64,
7575,
11,
2547,
325,
7902,... | 2.221847 | 1,776 |
# -*- coding: utf-8 -*-
""" This file contains view functions for Flask-User forms.
:copyright: (c) 2013 by Ling Thio
:author: Ling Thio (ling.thio@gmail.com)
:license: Simplified BSD License, see LICENSE.txt for more details."""
from datetime import datetime
from flask import current_app, flash, redirect, request, url_for
from flask_login import current_user, login_user, logout_user
from .decorators import confirm_email_required, login_required
from . import emails
from . import signals
from .models import User
from .translations import gettext as _
# Python version specific imports
from sys import version_info as py_version
is_py2 = (py_version[0] == 2) #: Python 2.x?
is_py3 = (py_version[0] == 3) #: Python 3.x?
if is_py2:
from urlparse import urlsplit, urlunsplit
from urllib import quote, unquote
if is_py3:
from urllib.parse import urlsplit, urlunsplit
from urllib.parse import quote, unquote
def confirm_email(token):
""" Verify email confirmation token and activate the user account."""
# Verify token
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
is_valid, has_expired, object_id = user_manager.verify_token(
token,
user_manager.confirm_email_expiration)
if has_expired:
flash(_('Seu token de confirmacao expirou.'), 'error')
return redirect(url_for('user.login'))
if not is_valid:
flash(_('Token de confirmacao invalido.'), 'error')
return redirect(url_for('user.login'))
""" Confirm email by setting User.confirmed_at=utcnow() or
UserEmail.confirmed_at=utcnow()"""
user = None
if db_adapter.UserEmailClass:
user_email = user_manager.get_user_email_by_id(object_id)
if user_email:
user_email.confirmed_at = datetime.utcnow()
user = user_email.user
else:
user_email = None
user = user_manager.get_user_by_id(object_id)
if user:
user.confirmed_at = datetime.utcnow()
if user:
user.set_active(True)
db_adapter.commit()
else: # pragma: no cover
flash(_('Token de confirmacao invalido.'), 'error')
return redirect(url_for('user.login'))
# Send email_confirmed signal
signals.user_confirmed_email.send(
current_app._get_current_object(), user=user)
# Prepare one-time system message
flash(_('Seu email foi confirmado.'), 'success')
# Auto-login after confirm or redirect to login page
safe_next = _get_safe_next_param(
'next', user_manager.after_confirm_endpoint)
if user_manager.auto_login_after_confirm:
return _do_login_user(user, safe_next) # auto-login
else:
return redirect(
url_for('user.login')+'?next='+quote(safe_next)
) # redirect to login page
@login_required
@confirm_email_required
def change_password():
""" Prompt for old password and new password and change
the user's password."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
# Initialize form
form = user_manager.change_password_form(request.form)
safe_next = _get_safe_next_param(
'next', user_manager.after_change_password_endpoint)
form.next.data = safe_next
# Process valid POST
if request.method == 'POST' and form.validate():
# Hash password
hashed_password = user_manager.hash_password(form.new_password.data)
# Change password
user_manager.update_password(current_user, hashed_password)
# Send 'password_changed' email
if user_manager.enable_email and \
user_manager.send_password_changed_email:
emails.send_password_changed_email(current_user)
# Send password_changed signal
signals.user_changed_password.send(
current_app._get_current_object(), user=current_user)
# Prepare one-time system message
flash(_('Sua senha foi modificada com sucesso.'), 'success')
# Redirect to 'next' URL
safe_next = user_manager.make_safe_url_function(form.next.data)
return redirect(safe_next)
# Process GET or invalid POST
return render(user_manager.change_password_template, form=form)
@login_required
@login_required
@login_required
@login_required
@login_required
@login_required
@login_required
@confirm_email_required
def change_username():
""" Prompt for new username and old password and change
the user's username."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
# Initialize form
form = user_manager.change_username_form(request.form)
safe_next = _get_safe_next_param(
'next', user_manager.after_change_username_endpoint)
form.next.data = safe_next
# Process valid POST
if request.method == 'POST' and form.validate():
new_username = form.new_username.data
# Change username
if db_adapter.UserAuthClass and hasattr(current_user, 'user_auth'):
user_auth = current_user.user_auth
else:
user_auth = current_user
db_adapter.update_object(user_auth, username=new_username)
db_adapter.commit()
# Send 'username_changed' email
if user_manager.enable_email and \
user_manager.send_username_changed_email:
emails.send_username_changed_email(current_user)
# Send username_changed signal
signals.user_changed_username.send(
current_app._get_current_object(), user=current_user)
# Prepare one-time system message
flash(_(
"Seu username foi modificado para '%(username)s'.",
username=new_username), 'success'
)
# Redirect to 'next' URL
safe_next = user_manager.make_safe_url_function(form.next.data)
return redirect(safe_next)
# Process GET or invalid POST
return render(user_manager.change_username_template, form=form)
@login_required
@confirm_email_required
def email_action(id, action):
""" Perform action 'action' on UserEmail object 'id'
"""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
# Retrieve UserEmail by id
user_email = db_adapter.find_first_object(db_adapter.UserEmailClass, id=id)
# Users may only change their own UserEmails
if not user_email or user_email.user_id != int(current_user.get_id()):
return unauthorized()
if action == 'delete':
# Primary UserEmail can not be deleted
if user_email.is_primary:
return unauthorized()
# Delete UserEmail
db_adapter.delete_object(user_email)
db_adapter.commit()
elif action == 'make-primary':
# Disable previously primary emails
user_emails = db_adapter.find_all_objects(
db_adapter.UserEmailClass, user_id=int(current_user.get_id()))
for ue in user_emails:
if ue.is_primary:
ue.is_primary = False
# Enable current primary email
user_email.is_primary = True
# Commit
db_adapter.commit()
elif action == 'confirm':
_send_confirm_email(user_email.user, user_email)
else:
return unauthorized()
return redirect(url_for('user.manage_emails'))
def forgot_password():
"""Prompt for email and send reset password email."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
# Initialize form
form = user_manager.forgot_password_form(request.form)
# Process valid POST
if request.method == 'POST' and form.validate():
email = form.email.data
user, user_email = user_manager.find_user_by_email(email)
if user:
user_manager.send_reset_password_email(email)
# Prepare one-time system message
flash(_("Um email para resetar a senha foi enviado para '%(email)s'."
" Siga as instrucoes do email para resetar sua senha.",
email=email), 'success')
# Redirect to the login page
return redirect(_endpoint_url(
user_manager.after_forgot_password_endpoint))
# Process GET or invalid POST
return render(user_manager.forgot_password_template, form=form)
def login():
""" Prompt for username/email and password and sign the user in."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
safe_next = _get_safe_next_param('next', user_manager.after_login_endpoint)
safe_reg_next = _get_safe_next_param(
'reg_next', user_manager.after_register_endpoint)
# Immediately redirect already logged in users
if _call_or_get(current_user.is_authenticated) and \
user_manager.auto_login_at_login:
return redirect(safe_next)
# Initialize form
login_form = user_manager.login_form(request.form) # for login.html
register_form = user_manager.register_form() # for login_or_register.html
if request.method != 'POST':
login_form.next.data = register_form.next.data = safe_next
login_form.reg_next.data = register_form.reg_next.data = safe_reg_next
# Process valid POST
if request.method == 'POST' and login_form.validate():
# Retrieve User
user = None
user_email = None
if user_manager.enable_username:
# Find user record by username
user = user_manager.find_user_by_username(login_form.username.data)
user_email = None
# Find primary user_email record
if user and db_adapter.UserEmailClass:
user_email = db_adapter.find_first_object(
db_adapter.UserEmailClass,
user_id=int(user.get_id()),
is_primary=True,
)
# Find user record by email (with form.username)
if not user and user_manager.enable_email:
user, user_email = user_manager.find_user_by_email(
login_form.username.data)
else:
# Find user by email (with form.email)
user, user_email = user_manager.find_user_by_email(
login_form.email.data)
if user:
# Log user in
safe_next = user_manager.make_safe_url_function(
login_form.next.data)
return _do_login_user(user, safe_next, login_form.remember_me.data)
# Process GET or invalid POST
return render(user_manager.login_template,
form=login_form,
login_form=login_form,
register_form=register_form)
def logout():
""" Sign the user out."""
user_manager = current_app.user_manager
# Send user_logged_out signal
signals.user_logged_out.send(
current_app._get_current_object(), user=current_user)
# Use Flask-Login to sign out user
logout_user()
# Prepare one-time system message
flash(_('Voce deslogou com sucesso.'), 'success')
# Redirect to logout_next endpoint or '/'
safe_next = _get_safe_next_param(
'next', user_manager.after_logout_endpoint)
return redirect(safe_next)
@login_required
@confirm_email_required
def register():
""" Display registration form and create new User."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
safe_next = _get_safe_next_param('next', user_manager.after_login_endpoint)
safe_reg_next = _get_safe_next_param(
'reg_next', user_manager.after_register_endpoint)
# Initialize form
login_form = user_manager.login_form() # for login_or_register.html
register_form = user_manager.register_form(
request.form) # for register.html
# invite token used to determine validity of registeree
invite_token = request.values.get("token")
# require invite without a token should disallow the user from registering
if user_manager.require_invitation and not invite_token:
flash("Cadastro permitido apenas por convite.", "error")
return redirect(url_for('user.login'))
user_invite = None
if invite_token and db_adapter.UserInvitationClass:
user_invite = db_adapter.find_first_object(
db_adapter.UserInvitationClass, token=invite_token)
if user_invite:
register_form.invite_token.data = invite_token
else:
flash("Token de convite invalido.", "error")
return redirect(url_for('user.login'))
if request.method != 'POST':
login_form.next.data = register_form.next.data = safe_next
login_form.reg_next.data = register_form.reg_next.data = safe_reg_next
if user_invite:
register_form.email.data = user_invite.email
# Process valid POST
if request.method == 'POST' and register_form.validate():
""" Create a User object using Form fields
that have a corresponding User field"""
User = db_adapter.UserClass
user_class_fields = User.__dict__
user_fields = {}
""" Create a UserEmail object using Form fields
that have a corresponding UserEmail field"""
if db_adapter.UserEmailClass:
UserEmail = db_adapter.UserEmailClass
user_email_class_fields = UserEmail.__dict__
user_email_fields = {}
""" Create a UserAuth object using Form fields
that have a corresponding UserAuth field"""
if db_adapter.UserAuthClass:
UserAuth = db_adapter.UserAuthClass
user_auth_class_fields = UserAuth.__dict__
user_auth_fields = {}
# Enable user account
if db_adapter.UserProfileClass:
if hasattr(db_adapter.UserProfileClass, 'active'):
user_auth_fields['active'] = True
elif hasattr(db_adapter.UserProfileClass, 'is_enabled'):
user_auth_fields['is_enabled'] = True
else:
user_auth_fields['is_active'] = True
else:
if hasattr(db_adapter.UserClass, 'active'):
user_fields['active'] = True
elif hasattr(db_adapter.UserClass, 'is_enabled'):
user_fields['is_enabled'] = True
else:
user_fields['is_active'] = True
# For all form fields
for field_name, field_value in register_form.data.items():
# Hash password field
if field_name == 'password':
hashed_password = user_manager.hash_password(field_value)
if db_adapter.UserAuthClass:
user_auth_fields['password'] = hashed_password
else:
user_fields['password'] = hashed_password
else:
if field_name in user_class_fields:
user_fields[field_name] = field_value
if db_adapter.UserEmailClass:
if field_name in user_email_class_fields:
user_email_fields[field_name] = field_value
if db_adapter.UserAuthClass:
if field_name in user_auth_class_fields:
user_auth_fields[field_name] = field_value
# Add User record using named arguments 'user_fields'
user = db_adapter.add_object(User, **user_fields)
if db_adapter.UserProfileClass:
user_profile = user
# Add UserEmail record using named arguments 'user_email_fields'
if db_adapter.UserEmailClass:
user_email = db_adapter.add_object(
UserEmail,
user=user,
is_primary=True,
**user_email_fields
)
else:
user_email = None
# Add UserAuth record using named arguments 'user_auth_fields'
if db_adapter.UserAuthClass:
user_auth = db_adapter.add_object(UserAuth, **user_auth_fields)
if db_adapter.UserProfileClass:
user = user_auth
else:
user.user_auth = user_auth
require_email_confirmation = True
if user_invite:
if user_invite.email == register_form.email.data:
require_email_confirmation = False
db_adapter.update_object(user, confirmed_at=datetime.utcnow())
db_adapter.commit()
# Send 'registered' email and delete new User object if send fails
if user_manager.send_registered_email:
try:
# Send 'registered' email
_send_registered_email(
user, user_email, require_email_confirmation)
except Exception as e:
# delete new User object if send fails
db_adapter.delete_object(user)
db_adapter.commit()
raise
# Send user_registered signal
signals.user_registered.send(current_app._get_current_object(),
user=user,
user_invite=user_invite)
# Redirect if USER_ENABLE_CONFIRM_EMAIL is set
if user_manager.enable_confirm_email and require_email_confirmation:
safe_reg_next = user_manager.make_safe_url_function(
register_form.reg_next.data)
return redirect(safe_reg_next)
# Auto-login after register or redirect to login page
if 'reg_next' in request.args:
safe_reg_next = user_manager.make_safe_url_function(
register_form.reg_next.data)
else:
safe_reg_next = _endpoint_url(user_manager.after_confirm_endpoint)
if user_manager.auto_login_after_register:
return _do_login_user(user, safe_reg_next) # auto-login
else:
return redirect(
url_for('user.login')+'?next='+quote(safe_reg_next)
) # redirect to login page
# Process GET or invalid POST
return render(
user_manager.register_template,
form=register_form,
login_form=login_form,
register_form=register_form
)
@login_required
def invite():
""" Allows users to send invitations to register an account """
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
invite_form = user_manager.invite_form(request.form)
if request.method == 'POST' and invite_form.validate():
email = invite_form.email.data
User = db_adapter.UserClass
user_class_fields = User.__dict__
user_fields = {
"email": email
}
user, user_email = user_manager.find_user_by_email(email)
if user:
flash("Usuario com este email ja foi cadastrado.", "error")
return redirect(url_for('user.invite'))
else:
user_invite = db_adapter \
.add_object(db_adapter.UserInvitationClass, **{
"email": email,
"invited_by_user_id": current_user.id
})
db_adapter.commit()
token = user_manager.generate_token(user_invite.id)
accept_invite_link = url_for('user.register',
token=token,
_external=True)
# Store token
if hasattr(db_adapter.UserInvitationClass, 'token'):
user_invite.token = token
db_adapter.commit()
try:
# Send 'invite' email
emails.send_invite_email(user_invite, accept_invite_link)
except Exception as e:
# delete new User object if send fails
db_adapter.delete_object(user_invite)
db_adapter.commit()
raise
signals \
.user_sent_invitation \
.send(current_app._get_current_object(), user_invite=user_invite,
form=invite_form)
flash(_('Convite enviado.'), 'success')
safe_next = _get_safe_next_param(
'next', user_manager.after_invite_endpoint)
return redirect(safe_next)
return render(user_manager.invite_template, form=invite_form)
def resend_confirm_email():
"""Prompt for email and re-send email conformation email."""
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
# Initialize form
form = user_manager.resend_confirm_email_form(request.form)
# Process valid POST
if request.method == 'POST' and form.validate():
email = form.email.data
# Find user by email
user, user_email = user_manager.find_user_by_email(email)
if user:
_send_confirm_email(user, user_email)
# Redirect to the login page
return redirect(_endpoint_url(
user_manager.after_resend_confirm_email_endpoint))
# Process GET or invalid POST
return render(user_manager.resend_confirm_email_template, form=form)
def reset_password(token):
""" Verify the password reset token, Prompt for new password,
and set the user's password."""
# Verify token
user_manager = current_app.user_manager
db_adapter = user_manager.db_adapter
if _call_or_get(current_user.is_authenticated):
logout_user()
is_valid, has_expired, user_id = user_manager.verify_token(
token,
user_manager.reset_password_expiration)
if has_expired:
flash(_('Seu token para resetar a senha expirou.'), 'error')
return redirect(_endpoint_url(user_manager.login_endpoint))
if not is_valid:
flash(_('Seu token para resetar a senha e invalido.'), 'error')
return redirect(_endpoint_url(user_manager.login_endpoint))
user = user_manager.get_user_by_id(user_id)
# Mark email as confirmed
user_email = emails.get_primary_user_email(user)
user_email.confirmed_at = datetime.utcnow()
db_adapter.commit()
# Initialize form
form = user_manager.reset_password_form(request.form)
# Process valid POST
if request.method == 'POST' and form.validate():
# Change password
hashed_password = user_manager.hash_password(form.new_password.data)
if db_adapter.UserAuthClass and hasattr(user, 'user_auth'):
user_auth = user.user_auth
else:
user_auth = user
db_adapter.update_object(user_auth, password=hashed_password)
db_adapter.commit()
# Send 'password_changed' email
if user_manager.enable_email and \
user_manager.send_password_changed_email:
emails.send_password_changed_email(user)
# Prepare one-time system message
flash(_("Sua senha foi resetada com sucesso."), 'success')
# Auto-login after reset password or redirect to login page
safe_next = _get_safe_next_param(
'next', user_manager.after_reset_password_endpoint)
if user_manager.auto_login_after_reset_password:
return _do_login_user(user, safe_next) # auto-login
else:
return redirect(
url_for('user.login')+'?next='+quote(safe_next)
) # redirect to login page
# Process GET or invalid POST
return render(user_manager.reset_password_template, form=form)
def unconfirmed():
""" Prepare a Flash message and redirect to USER_UNCONFIRMED_ENDPOINT"""
# Prepare Flash message
url = request.script_root + request.path
flash(_(
"Voce deve confirmar seu email para acessar '%(url)s'.", url=url),
'error'
)
# Redirect to USER_UNCONFIRMED_EMAIL_ENDPOINT
user_manager = current_app.user_manager
return redirect(_endpoint_url(user_manager.unconfirmed_email_endpoint))
def unauthenticated():
""" Prepare a Flash message and redirect to
USER_UNAUTHENTICATED_ENDPOINT"""
user_manager = current_app.user_manager
# Prepare Flash message
url = request.url
flash(_(
"Voce deve estar logado para acessar '%(url)s'.", url=url), 'error')
# Redirect to USER_UNAUTHENTICATED_ENDPOINT
safe_next = user_manager.make_safe_url_function(url)
return redirect(_endpoint_url(
user_manager.unauthenticated_endpoint)+'?next='+quote(safe_next))
def unauthorized():
""" Prepare a Flash message and redirect to USER_UNAUTHORIZED_ENDPOINT"""
# Prepare Flash message
url = request.script_root + request.path
flash(_(
"Voce nao tem permissao para acessar '%(url)s'.", url=url), 'error')
# Redirect to USER_UNAUTHORIZED_ENDPOINT
user_manager = current_app.user_manager
return redirect(_endpoint_url(user_manager.unauthorized_endpoint))
@login_required
@confirm_email_required
""" Turns an usafe absolute URL into a safe relative URL by removing the
scheme and the hostname
Example: make_safe_url('http://hostname/path1/path2?q1=v1&q2=v2#fragment')
returns: '/path1/path2?q1=v1&q2=v2#fragment"""
# 'next' and 'reg_next' query parameters contain quoted (URL-encoded) URLs
# that may contain unsafe hostnames.
# Return the query parameter as a safe, unquoted URL
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
770,
2393,
4909,
1570,
5499,
329,
46947,
12,
12982,
5107,
13,
628,
220,
220,
220,
1058,
22163,
4766,
25,
357,
66,
8,
2211,
416,
25116,
536,
952,
198,
220,
220,
... | 2.32844 | 10,900 |
from django.db.models import Q
from rest_framework import viewsets
from rest_framework.authentication import SessionAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from backend.models import PreSupport, Implement, AfterSupport, Service, ServiceProcess, FilesModel
from backend.serializers import ServiceDetailSerializer, ServiceCreateSerializer, ServiceSerializer, \
AfterSupportDetailSerializer, AfterSupportSerializer, AfterSupportCreateSerializer, ImplementSerializer, \
ImplementCreateSerializer, ImplementDetailSerializer, PreSupportDetailSerializer, PreSupportCreateSerializer, \
PreSupportSerializer, ServiceProcessSerializer, FilesSerializer
from customer.views import Pagination
from utils.permissions import IsOwnerOrReadOnly
class PreSupportViewset(viewsets.ModelViewSet):
""" 售前支持 """
permission_classes = (IsAuthenticated, IsOwnerOrReadOnly)
authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication)
pagination_class = Pagination
class AllPreSupportViewset(viewsets.ModelViewSet):
""" 所有售前支持 """
permission_classes = (IsAuthenticated,)
authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication)
pagination_class = Pagination
serializer_class = PreSupportSerializer
class ImplementViewset(viewsets.ModelViewSet):
""" 实施支持 """
permission_classes = (IsAuthenticated, IsOwnerOrReadOnly)
authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication)
pagination_class = Pagination
class AllImplementViewset(viewsets.ModelViewSet):
""" 所有实施支持 """
permission_classes = (IsAuthenticated,)
authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication)
pagination_class = Pagination
serializer_class = ImplementSerializer
class AfterSupportViewset(viewsets.ModelViewSet):
""" 售后支持 """
permission_classes = (IsAuthenticated, IsOwnerOrReadOnly)
authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication)
pagination_class = Pagination
class AllAfterSupportViewset(viewsets.ModelViewSet):
""" 所有售后支持 """
permission_classes = (IsAuthenticated,)
authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication)
pagination_class = Pagination
serializer_class = AfterSupportSerializer
class ServiceViewset(viewsets.ModelViewSet):
""" 维修支持 """
permission_classes = (IsAuthenticated, IsOwnerOrReadOnly)
authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication)
pagination_class = Pagination
class AllServiceViewset(viewsets.ModelViewSet):
""" 所有维修支持 """
permission_classes = (IsAuthenticated,)
authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication)
pagination_class = Pagination
serializer_class = ServiceSerializer
class ServiceProcessViewset(viewsets.ModelViewSet):
""" 维修过程 """
authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication)
serializer_class = ServiceProcessSerializer
class FileViewset(viewsets.ModelViewSet):
""" 文件上传 """
authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication)
queryset = FilesModel.objects.all()
serializer_class = FilesSerializer
| [
6738,
42625,
14208,
13,
9945,
13,
27530,
1330,
1195,
201,
198,
6738,
1334,
62,
30604,
1330,
5009,
1039,
201,
198,
6738,
1334,
62,
30604,
13,
41299,
3299,
1330,
23575,
47649,
3299,
201,
198,
6738,
1334,
62,
30604,
13,
525,
8481,
1330,
... | 3.037267 | 1,127 |
x = [1, 2, 3]
y = [a*a+1 for a in x]
assert y == [2, 5, 10]
z = [(b, c) for b in x for c in y]
# print(z)
assert z == [
(1, 2), (1, 5), (1, 10),
(2, 2), (2, 5), (2, 10),
(3, 2), (3, 5), (3, 10)]
v = {b * 2 for b in x}
# TODO: how to check set equality?
# assert v == {2, 6, 4}
u = {str(b): b-2 for b in x}
assert u['3'] == 1
assert u['1'] == -1
y = [a+2 for a in x if a % 2]
print(y)
assert y == [3, 5]
z = [(9,), (10,)]
w = [x for x, in z]
assert w == [9, 10]
| [
198,
87,
796,
685,
16,
11,
362,
11,
513,
60,
198,
198,
88,
796,
685,
64,
9,
64,
10,
16,
329,
257,
287,
2124,
60,
198,
30493,
331,
6624,
685,
17,
11,
642,
11,
838,
60,
198,
198,
89,
796,
47527,
65,
11,
269,
8,
329,
275,
287... | 1.878906 | 256 |
import datetime
import os
from django.test import TestCase
from django.core.urlresolvers import reverse
from schedule.models import Event, Rule, Occurrence, Calendar
from schedule.periods import Period, Month, Day
from schedule.utils import EventListManager
| [
11748,
4818,
8079,
198,
11748,
28686,
198,
198,
6738,
42625,
14208,
13,
9288,
1330,
6208,
20448,
198,
6738,
42625,
14208,
13,
7295,
13,
6371,
411,
349,
690,
1330,
9575,
198,
198,
6738,
7269,
13,
27530,
1330,
8558,
11,
14330,
11,
10775,
... | 3.925373 | 67 |
import random
if __name__ == "__main__":
test_cases = [
([3, 2, 1, 3], sorted([3, 2, 1, 3])),
# ([3, 2, 5, 4, 3, -2, 1], sorted([3, 2, 5, 4, 3, -2, 1])),
# ([1, -2, -5], sorted([1, -2, -5])),
# ([1], sorted([1])),
# ([1], sorted([1])),
]
for test_case, exp in test_cases:
print test_case
print partition(test_case, 0, 3)
print test_case
# print quick_select(test_case, 0)
| [
11748,
4738,
628,
628,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
220,
220,
1332,
62,
33964,
796,
685,
198,
220,
220,
220,
220,
220,
220,
220,
29565,
18,
11,
362,
11,
352,
11,
513,
4357,
23243,
26933,
... | 1.902439 | 246 |
"""
This file contains pre-defined demand commodities.
"""
from pygenesys.commodity.commodity import DemandCommodity
ELC_DEMAND = DemandCommodity(comm_name='ELC_DEMAND',
comm_label='d',
units='GWh',
description='End-use electricity demand')
STM_DEMAND = DemandCommodity(comm_name='STM_DEMAND',
units='GWh(th)',
description='End-use steam demand')
CW_DEMAND = DemandCommodity(comm_name='CW_DEMAND',
units='Million ton-hours refrigeration',
description='End-use chilled water demand')
TRANSPORT = DemandCommodity(comm_name='TRANSPORT',
units='thousand gallon gasoline equivalent',
description='transportation demand')
if __name__ == '__main__':
print(ELC_DEMAND._db_entry())
print(ELC_DEMAND.demand)
print(STM_DEMAND._db_entry())
| [
37811,
198,
1212,
2393,
4909,
662,
12,
23211,
3512,
24392,
13,
198,
37811,
198,
198,
6738,
12972,
5235,
274,
893,
13,
785,
4666,
414,
13,
785,
4666,
414,
1330,
34479,
6935,
375,
414,
628,
198,
3698,
34,
62,
39429,
6981,
796,
34479,
... | 1.990099 | 505 |
#!/usr/bin/env python
import sys
import os.path as osp
this_dir = osp.dirname(__file__)
sys.path.insert(0, osp.join(this_dir, '../../external/caffe-mpi/build/install/python/'))
import caffe
import argparse
import cPickle
import numpy as np
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('def_file')
parser.add_argument('param_file')
parser.add_argument('save_file')
parser.add_argument('--box_layer',
help='Box regression layer to change.')
parser.add_argument('--cls_layer',
help='Classification layer to change.')
parser.add_argument('--length', type=int, default=3,
help='New length. Should be greater than 2. [3]')
args = parser.parse_args()
net = caffe.Net(args.def_file, args.param_file, caffe.TEST)
# bbox_regression layer
weight = net.params[args.box_layer][0].data
bias = net.params[args.box_layer][1].data
length = args.length - 1
# bias is direct repetition
new_bias = np.tile(bias, length)
net.params[args.box_layer][1].reshape(*new_bias.shape)
net.params[args.box_layer][1].data[...] = new_bias
# weight
feat_dim = weight.shape[1] / 2
frame1_weight = weight[:,:feat_dim]
frame2_weight = weight[:,feat_dim:]
new_weight = np.zeros((4 * length, feat_dim * (length + 1)))
for i in xrange(length):
new_weight[4*i:4*i+4,:feat_dim] = frame1_weight
new_weight[4*i:4*i+4,feat_dim+i*feat_dim:2*feat_dim+i*feat_dim] = frame2_weight
net.params[args.box_layer][0].reshape(*new_weight.shape)
net.params[args.box_layer][0].data[...] = new_weight
# classification layer
cls_weight = net.params[args.cls_layer][0].data
# cls_weight
feat_dim = cls_weight.shape[1] / 2
frame1_cls_weight = cls_weight[:,:feat_dim]
frame2_cls_weight = cls_weight[:,feat_dim:]
new_cls_weight = np.zeros((cls_weight.shape[0], feat_dim * (length + 1)))
new_cls_weight[:,:feat_dim] = frame1_cls_weight
for i in xrange(1,length+1):
new_cls_weight[:,i*feat_dim:feat_dim+i*feat_dim] = frame2_cls_weight / float(length)
net.params[args.cls_layer][0].reshape(*new_cls_weight.shape)
net.params[args.cls_layer][0].data[...] = new_cls_weight
net.save(args.save_file)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
11748,
25064,
198,
11748,
28686,
13,
6978,
355,
267,
2777,
198,
5661,
62,
15908,
796,
267,
2777,
13,
15908,
3672,
7,
834,
7753,
834,
8,
198,
17597,
13,
6978,
13,
28463,
7,
15,
... | 2.371608 | 958 |
# coding: utf-8
import functools
from decimal import Decimal
# import mock
import pytest
from prices import (
Money, TaxedMoney, MoneyRange, TaxedMoneyRange, percentage_discount)
from django_prices_openexchangerates import exchange_currency
from django_prices_openexchangerates.models import ConversionRate, get_rates
from django_prices_openexchangerates.templatetags import prices_multicurrency
RATES = {
'EUR': Decimal(2),
'GBP': Decimal(4),
'BTC': Decimal(10)}
@pytest.fixture
@pytest.fixture(autouse=True)
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
11748,
1257,
310,
10141,
198,
6738,
32465,
1330,
4280,
4402,
198,
198,
2,
1330,
15290,
198,
11748,
12972,
9288,
198,
6738,
4536,
1330,
357,
198,
220,
220,
220,
12911,
11,
9241,
276,
26788,
11,
12... | 2.825641 | 195 |
"""
Mamy zdefiniowaną n-elementową tablicę liczb całkowitych. Proszę napisać funkcję zwracającą wartość
typu bool oznaczającą, czy w tablicy istnieje dokładnie jeden element najmniejszy i dokładnie jeden
element największy (liczba elementów najmniejszych oznacza liczbę takich elementów o tej samej wartości).
"""
from random import randint
n = int(input("Enter range of array: "))
x = int(input("Enter max array value: "))
t = [randint(-abs(x), abs(x)) for _ in range(n)]
print(t)
print(only_one_max_min_element(t, x))
| [
37811,
198,
44,
14814,
1976,
4299,
5362,
45197,
128,
227,
299,
12,
30854,
322,
128,
227,
7400,
677,
128,
247,
3476,
14969,
1275,
41615,
74,
322,
414,
354,
13,
27631,
89,
128,
247,
25422,
9160,
38325,
46212,
66,
73,
128,
247,
1976,
8... | 2.244635 | 233 |
import struct
from src.system.controller.python.messaging.messages import Message, welcome, header, NODE_TYPE_CONTROLLER, MESSAGE_WELCOME, ip_address
| [
11748,
2878,
198,
198,
6738,
12351,
13,
10057,
13,
36500,
13,
29412,
13,
37348,
3039,
13,
37348,
1095,
1330,
16000,
11,
7062,
11,
13639,
11,
399,
16820,
62,
25216,
62,
10943,
5446,
46,
3069,
1137,
11,
337,
1546,
4090,
8264,
62,
54,
... | 3.04 | 50 |
import numpy as np
from scipy.sparse import csc_matrix
from pytorch_widedeep.wdtypes import WideDeep
def create_explain_matrix(model: WideDeep) -> csc_matrix:
"""
Returns a sparse matrix used to compute the feature importances after
training
Parameters
----------
model: WideDeep
object of type ``WideDeep``
Examples
--------
>>> from pytorch_widedeep.models import TabNet, WideDeep
>>> from pytorch_widedeep.models.tabnet.tab_net_utils import create_explain_matrix
>>> embed_input = [("a", 4, 2), ("b", 4, 2), ("c", 4, 2)]
>>> cont_cols = ["d", "e"]
>>> column_idx = {k: v for v, k in enumerate(["a", "b", "c", "d", "e"])}
>>> deeptabular = TabNet(column_idx=column_idx, embed_input=embed_input, continuous_cols=cont_cols)
>>> model = WideDeep(deeptabular=deeptabular)
>>> reduce_mtx = create_explain_matrix(model)
>>> reduce_mtx.todense()
matrix([[1., 0., 0., 0., 0.],
[1., 0., 0., 0., 0.],
[0., 1., 0., 0., 0.],
[0., 1., 0., 0., 0.],
[0., 0., 1., 0., 0.],
[0., 0., 1., 0., 0.],
[0., 0., 0., 1., 0.],
[0., 0., 0., 0., 1.]])
"""
(
embed_input,
column_idx,
embed_and_cont_dim,
) = _extract_tabnet_params(model)
n_feat = len(column_idx)
col_embeds = {e[0]: e[2] - 1 for e in embed_input}
embed_colname = [e[0] for e in embed_input]
cont_colname = [c for c in column_idx.keys() if c not in embed_colname]
embed_cum_counter = 0
indices_trick = []
for colname, idx in column_idx.items():
if colname in cont_colname:
indices_trick.append([idx + embed_cum_counter])
elif colname in embed_colname:
indices_trick.append(
range( # type: ignore[arg-type]
idx + embed_cum_counter,
idx + embed_cum_counter + col_embeds[colname] + 1,
)
)
embed_cum_counter += col_embeds[colname]
reducing_matrix = np.zeros((embed_and_cont_dim, n_feat))
for i, cols in enumerate(indices_trick):
reducing_matrix[cols, i] = 1
return csc_matrix(reducing_matrix)
| [
11748,
299,
32152,
355,
45941,
198,
6738,
629,
541,
88,
13,
82,
29572,
1330,
269,
1416,
62,
6759,
8609,
198,
198,
6738,
12972,
13165,
354,
62,
86,
1384,
68,
538,
13,
16993,
19199,
1330,
23399,
29744,
628,
198,
4299,
2251,
62,
20676,
... | 2.07757 | 1,070 |
import os
import subprocess
adaq_home = '/gws/smf/j04/cedaproc/cedawps/adaq/src/adaq_toolbox-ADAQ_Python_v7.1'
| [
11748,
28686,
198,
11748,
850,
14681,
198,
198,
4763,
80,
62,
11195,
796,
31051,
70,
18504,
14,
5796,
69,
14,
73,
3023,
14,
771,
499,
12204,
14,
771,
707,
862,
14,
4763,
80,
14,
10677,
14,
4763,
80,
62,
25981,
3524,
12,
26853,
48,... | 2.132075 | 53 |
#!/usr/bin/env python
from __future__ import division
import time, sys, signal
import argparse
import numpy as np
import piplates.DAQCplate as DAQC
parser = argparse.ArgumentParser(description='Acquire Data from Pi Plate')
parser.add_argument('-f','--fsample', dest='sample_frequency',
metavar='fs', type=float,
default = 1, help='ADC sample frequency [Hz]')
args = parser.parse_args()
fs = args.sample_frequency
if __debug__:
print("Sample Frequency is " + str(fs) + " Hz")
if fs < 1e-5:
parser.error("Error: sample rate must be > 1e-5 Hz")
# this is the main loop
j = 0
while j < 10:
x = DAQC.getADC(0,1)
if __debug__: print x
j += 1
time.sleep(1/fs)
# this is the output when its done running
# should also catch the CTRL-C
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
6738,
11593,
37443,
834,
1330,
7297,
198,
11748,
640,
11,
25064,
11,
6737,
198,
11748,
1822,
29572,
198,
198,
11748,
299,
32152,
355,
45941,
198,
198,
11748,
31028,
17041,
13,
46640... | 2.570513 | 312 |
import importlib
import unittest
import click
import click.testing
import mock
from treadmill.infra import constants, connection
| [
11748,
1330,
8019,
198,
11748,
555,
715,
395,
198,
11748,
3904,
198,
11748,
3904,
13,
33407,
198,
11748,
15290,
198,
198,
6738,
49246,
13,
10745,
430,
1330,
38491,
11,
4637,
628
] | 4.225806 | 31 |
# type declarations
from typing import Callable
from jax.numpy import DeviceArray
Model = Callable[[DeviceArray], DeviceArray]
Guide = Callable[[DeviceArray], None]
| [
2,
2099,
31713,
198,
6738,
19720,
1330,
4889,
540,
198,
198,
6738,
474,
897,
13,
77,
32152,
1330,
16232,
19182,
198,
198,
17633,
796,
4889,
540,
30109,
24728,
19182,
4357,
16232,
19182,
60,
198,
47889,
796,
4889,
540,
30109,
24728,
1918... | 3.711111 | 45 |
#import pysam
import bamnostic as bs
import numpy as np
import anndata as ad
import pandas as pd
from scipy.sparse import csc_matrix
MOUSE = ['1', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19',
'2', '3', '4', '5', '6', '7', '8', '9','X', 'Y']
HUMAN = ['1', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22',
'2', '3', '4', '5', '6', '7', '8', '9','X', 'Y']
def bld_atac_mtx(list_bam_files, loaded_feat, output_file_name=None,
path=None, writing_option='a', header=None, mode='rb',
check_sq=True, chromosomes=HUMAN):
"""
Build a count matrix one set of features at a time. It is specific of ATAC-seq data.
It curently do not write down a sparse matrix. It writes down a regular count matrix
as a text file.
Parameters
----------
list_bam_files: input must be a list of bam file names. One for each cell to
build the count matrix for
loaded_feat: the features for which you want to build the count matrix
output_file_name: name of the output file. The count matrix that will be written
down in the current directory. If this parameter is not specified,
the output count amtrix will be named 'std_output_ct_mtx.txt'
path: path where to find the input file. The output file will be written down
in your current directory, it is not affected by this parameter.
writing_option: standard writing options for the output file. 'a' or 'w'
'a' to append to an already existing matrix. 'w' to overwrite any
previously exisiting matrix.
default: 'a'
header: if you want to write down the feature name specify this argument.
Input must be a list.
mode: bamnostic argument 'r' or 'w' for read and write 'b' and 's' for bam or sam
if only 'r' is specified, bamnostic will try to determine if the input is
either a bam or sam file.
check_sq: bamnostic argument. when reading, check if SQ entries are present in header
chromosomes: chromosomes of the species you are considering. default value
is the human genome (not including mitochondrial genome).
HUMAN = ['1', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22',
'2', '3', '4', '5', '6', '7', '8', '9','X', 'Y']
MOUSE = '1', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19',
'2', '3', '4', '5', '6', '7', '8', '9','X', 'Y']
Return
------
It does not return any object. The function write down the desired count
matrix in a txt file
"""
if output_file_name==None:
output_file_name='std_output_ct_mtx.txt'
if path==None:
path=''
# open file to write
output_file = open(path+output_file_name, writing_option)
# write header if specified
if header != None:
output_file.write('sample_name\t')
for feature in header:
output_file.write(feature)
output_file.write('\t')
output_file.write('\n')
# close file to write
output_file.close()
# start going through the bam files
for name_file in list_bam_files[0:]:
## important variables for output
index_feat = {key: 0 for key in chromosomes}
val_feat = {key: [0 for x in range(len(loaded_feat[key]))] for key in chromosomes}
## PART 1 read the bam file
keep_lines = []
#samfile = bs.AlignmentFile(path+output_file_name, mode="rb", check_sq=False)
samfile = bs.AlignmentFile(path+name_file, mode="rb", check_sq=False)
#for read in samfile.fetch(until_eof=True):
for read in samfile:
line = str(read).split('\t')
if line[2][3:] in chromosomes:
keep_lines.append(line[2:4])
### print -- output
print(name_file, len(keep_lines), 'mapped reads')
samfile.close()
## PART2 reads that fall into
for element in keep_lines:
## 2 things per line:
chrom = element[0][3:]
read_pos = int(element[1])
max_value_index = len(loaded_feat[chrom])
## I want to check if the read map to a feature in the same chrom
pointer_feat_pos = index_feat[chrom]
for feat_pos in loaded_feat[chrom][pointer_feat_pos:]:
pointer_feat_pos += 1
# Go through all features that are smaller than the read position
if read_pos > feat_pos[1]:
continue
# if read_pos fall in a feature
elif read_pos > feat_pos[0]:
# Update the pointer for the next read if the pointer isn't out of range
if pointer_feat_pos < max_value_index:
index_feat[chrom] = pointer_feat_pos
val_feat[chrom][pointer_feat_pos] += 1
else:
index_feat[chrom] = max_value_index
# Check the following features without updating the pointer.
break
else:
break
for feat_pos in loaded_feat[chrom][pointer_feat_pos:]:
# +1 if reads fall into more than one feature
if feat_pos[0] < read_pos:
val_feat[chrom][pointer_feat_pos] += 1
pointer_feat_pos += 1
# if read_pos > start position of the new feature break
elif read_pos < feat_pos[0]:
break
else:
print('error')
break
# PART 3
# open
output_file = open(path+output_file_name, 'a')
# write down the result of the cell
output_file.write(name_file)
output_file.write('\t')
for chrom in chromosomes:
output_file.write('\t'.join([str(p) for p in val_feat[chrom]]))
output_file.write('\t')
output_file.write('\n')
#close
output_file.close()
def read_mtx_bed(file_name, path='', omic='ATAC'):
"""
read this specific matrix format. It is the standard output of bedtools when you merge bam files.
"""
peak_name = []
cell_matrix = []
with open(path+file_name) as f:
head = f.readline().split('\t')
head[len(head)-1] = head[len(head)-1].split("\n")[0]
for line in f:
line = line.split('\t')
line[len(line)-1] = line[len(line)-1].split("\n")[0]
peak_name.append(line[3]) # for some reason it has rownames
cell_matrix.append([int(x) for x in line[4:]])
cell_names = head[4:]
cell_matrix=np.matrix(cell_matrix)
cell_matrix = cell_matrix.transpose()
adata = ad.AnnData(cell_matrix,
obs=pd.DataFrame(index=cell_names),
var=pd.DataFrame(index=peak_name))
if omic != None:
adata.uns['omic'] = omic
return(adata)
def save_sparse_mtx(initial_matrix, output_file='.h5ad', path='', omic='ATAC', bed=False, save=True):
"""
Convert regular atac matrix into a sparse Anndata:
Parameters
----------
initial_matrix: initial dense count matrix to load and convert into a sparse matrix
output_file: name of the output file for the AnnData object.
Default output is the name of the input file with .h5ad extension
path: path to the input count matrix. The AnnData object is written in the current directory,
not the location specified in path.
omic: 'ATAC', 'RNA' or 'methylation' are the 3 currently recognised omics in epiScanpy.
However, other omic name can be accepted but are not yet recognised in other functions.
default: 'ATAC'
bed: boolean. If True it consider another input format (bedtools output format for count matrices)
save: boolean. If True, the sparse matrix is saved as h5ad file. Otherwise it is simply return.
Return
------
It returns the loaded matrix as an AnnData object.
"""
head = None
data = []
cell_names = []
# choice between 2 different input count matrix formats
if bed == True:
adata = read_mtx_bed(file_name, path, omic)
else:
# reading the non sparse file
with open(path+initial_matrix) as f:
first_line = f.readline()
first_line = first_line[:-3].split('\t')
if first_line[0] == 'sample_name':
head = first_line[:-1]
else:
cell_names.append(first_line[0])
data = [[int(l) for l in first_line[1:-1]]]
file = f.readlines()
for line in file:
line = line[:-3].split('\t')
cell_names.append(line[0])
data.append([int(l) for l in line[1:-1]])
# convert into an AnnData object
if head != None:
adata = ad.AnnData(csc_matrix(data), obs=pd.DataFrame(index=cell_names), var=pd.DataFrame(index=head[1:]))
else:
adata = ad.AnnData(csc_matrix(data), obs=pd.DataFrame(index=cell_names))
if omic != None:
adata.uns['omic'] = omic
# writing the file as h5ad --> sparse matrix with minimum annotations
if save ==True:
if output_file=='.h5ad':
output_file = "".join([initial_matrix.split('.')[0], output_file])
adata.write(output_file)
return(adata) | [
2,
11748,
279,
893,
321,
198,
11748,
275,
34684,
15132,
355,
275,
82,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
281,
358,
1045,
355,
512,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
629,
541,
88,
13,
82,
29572,
220,
1330,... | 2.227633 | 4,292 |
import numpy as np
| [
11748,
299,
32152,
355,
45941,
198
] | 3.166667 | 6 |
from __future__ import absolute_import, unicode_literals
from celery import shared_task
from django.contrib.auth.models import User
from mood import util
# Celery task file - defines tasks which can be run asyncronously or on timers
@shared_task | [
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
11,
28000,
1098,
62,
17201,
874,
198,
198,
6738,
18725,
1924,
1330,
4888,
62,
35943,
198,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
27530,
1330,
11787,
198,
198,
6738,
10038,
... | 3.623188 | 69 |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
@Author : Collapsar-G
@License : (C) Copyright 2021-*
@Contact : gjf840513468@gmail.com
@File : $Visualization.py
@Time : $2021.4.8 $16:50
@Desc : 工具人
"""
import json
import numpy as np
import torch
from miscc.config import cfg
def data_loat_att(type_data, split=""):
"""
返回从attributes中的数据分析
:param type_data: "dvd" 或 "video"
:param split: "train" 或 "test" 或 ""
:return:
"""
path = "./DATA/attributes/attributes_%s_sparse" % type_data
if split != "":
path += '_%s' % split
path += ".json"
with open(path, 'r') as f:
load_dict = json.load(f)
return load_dict
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
2,
532,
9,
12,
21004,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
198,
31,
13838,
220,
1058,
220,
220,
7778,
1686,
283,
12,
38,
198,
198,
31,
34156,
1058,
220,
220,
... | 2.008523 | 352 |
import os, sys, math, random, itertools, functools
from collections import namedtuple
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.checkpoint import checkpoint as util_checkpoint
from torchvision import models
from utils import *
from models import TrainableModel, DataParallelModel
from task_configs import get_task, task_map, Task, RealityTask, ImageTask
from modules.percep_nets import DenseNet, Dense1by1Net, DenseKernelsNet, DeepNet, BaseNet, WideNet, PyramidNet
from modules.depth_nets import UNetDepth
from modules.unet import UNet, UNetOld, UNetOld2, UNetReshade
from modules.resnet import ResNetClass
from fire import Fire
import IPython
import pdb
pretrained_transfers = {
('normal', 'principal_curvature'):
(lambda: Dense1by1Net(), f"{OLD_MODELS_DIR}/normal2curvature_dense_1x1.pth"),
('normal', 'depth_zbuffer'):
(lambda: UNetDepth(), f"{OLD_MODELS_DIR}/normal2zdepth_unet_v4.pth"),
('normal', 'sobel_edges'):
(lambda: UNet(out_channels=1, downsample=4).cuda(), f"{OLD_MODELS_DIR}/normal2edges2d_sobel_unet4.pth"),
('normal', 'reshading'):
(lambda: UNetReshade(downsample=5), f"{OLD_MODELS_DIR}/normal2reshade_unet5.pth"),
('normal', 'keypoints3d'):
(lambda: UNet(downsample=5, out_channels=1), f"{OLD_MODELS_DIR}/normal2keypoints3d.pth"),
('normal', 'keypoints2d'):
(lambda: UNet(downsample=5, out_channels=1), f"{OLD_MODELS_DIR}/normal2keypoints2d_new.pth"),
('normal', 'edge_occlusion'):
(lambda: UNet(downsample=5, out_channels=1), f"{OLD_MODELS_DIR}/normal2edge_occlusion.pth"),
('depth_zbuffer', 'normal'):
(lambda: UNet(in_channels=1, downsample=6), f"{OLD_MODELS_DIR}/depth2normal_unet6.pth"),
('depth_zbuffer', 'sobel_edges'):
(lambda: UNet(downsample=4, in_channels=1, out_channels=1).cuda(), f"{OLD_MODELS_DIR}/depth_zbuffer2sobel_edges.pth"),
('depth_zbuffer', 'principal_curvature'):
(lambda: UNet(downsample=4, in_channels=1), f"{OLD_MODELS_DIR}/depth_zbuffer2principal_curvature.pth"),
('depth_zbuffer', 'reshading'):
(lambda: UNetReshade(downsample=5, in_channels=1), f"{OLD_MODELS_DIR}/depth_zbuffer2reshading.pth"),
('depth_zbuffer', 'keypoints3d'):
(lambda: UNet(downsample=5, in_channels=1, out_channels=1), f"{OLD_MODELS_DIR}/depth_zbuffer2keypoints3d.pth"),
('depth_zbuffer', 'keypoints2d'):
(lambda: UNet(downsample=5, in_channels=1, out_channels=1), f"{OLD_MODELS_DIR}/depth_zbuffer2keypoints2d.pth"),
('depth_zbuffer', 'edge_occlusion'):
(lambda: UNet(downsample=5, in_channels=1, out_channels=1), f"{OLD_MODELS_DIR}/depth_zbuffer2edge_occlusion.pth"),
('reshading', 'depth_zbuffer'):
(lambda: UNetReshade(downsample=5, out_channels=1), f"{OLD_MODELS_DIR}/reshading2depth_zbuffer.pth"),
('reshading', 'keypoints2d'):
(lambda: UNet(downsample=5, out_channels=1), f"{OLD_MODELS_DIR}/reshading2keypoints2d_new.pth"),
('reshading', 'edge_occlusion'):
(lambda: UNet(downsample=5, out_channels=1), f"{OLD_MODELS_DIR}/reshading2edge_occlusion.pth"),
('reshading', 'normal'):
(lambda: UNet(downsample=4), f"{OLD_MODELS_DIR}/reshading2normal.pth"),
('reshading', 'keypoints3d'):
(lambda: UNet(downsample=5, out_channels=1), f"{OLD_MODELS_DIR}/reshading2keypoints3d.pth"),
('reshading', 'sobel_edges'):
(lambda: UNet(downsample=5, out_channels=1), f"{OLD_MODELS_DIR}/reshading2sobel_edges.pth"),
('reshading', 'principal_curvature'):
(lambda: UNet(downsample=5), f"{OLD_MODELS_DIR}/reshading2principal_curvature.pth"),
('rgb', 'sobel_edges'):
(lambda: SobelKernel(), None),
('rgb', 'principal_curvature'):
(lambda: UNet(downsample=5), f"{OLD_MODELS_DIR}/rgb2principal_curvature.pth"),
('rgb', 'keypoints2d'):
(lambda: UNet(downsample=3, out_channels=1), f"{OLD_MODELS_DIR}/rgb2keypoints2d_new.pth"),
('rgb', 'keypoints3d'):
(lambda: UNet(downsample=5, out_channels=1), f"{OLD_MODELS_DIR}/rgb2keypoints3d.pth"),
('rgb', 'edge_occlusion'):
(lambda: UNet(downsample=5, out_channels=1), f"{OLD_MODELS_DIR}/rgb2edge_occlusion.pth"),
('rgb', 'normal'):
(lambda: UNet(), f"{OLD_MODELS_DIR}/unet_baseline_standardval.pth"),
('rgb', 'reshading'):
(lambda: UNetReshade(downsample=5), f"{OLD_MODELS_DIR}/rgb2reshade.pth"),
('rgb', 'depth_zbuffer'):
(lambda: UNet(downsample=6, out_channels=1), f"{OLD_MODELS_DIR}/rgb2zdepth_buffer.pth"),
('normal', 'imagenet'):
(lambda: ResNetClass().cuda(), None),
('depth_zbuffer', 'imagenet'):
(lambda: ResNetClass().cuda(), None),
('reshading', 'imagenet'):
(lambda: ResNetClass().cuda(), None),
('principal_curvature', 'sobel_edges'):
(lambda: UNet(downsample=4, out_channels=1), f"{OLD_MODELS_DIR}/principal_curvature2sobel_edges.pth"),
('sobel_edges', 'depth_zbuffer'):
(lambda: UNet(downsample=6, in_channels=1, out_channels=1), f"{OLD_MODELS_DIR}/sobel_edges2depth_zbuffer.pth"),
('keypoints2d', 'normal'):
(lambda: UNet(downsample=5, in_channels=1), f"{OLD_MODELS_DIR}/keypoints2d2normal_new.pth"),
('keypoints3d', 'normal'):
(lambda: UNet(downsample=5, in_channels=1), f"{OLD_MODELS_DIR}/keypoints3d2normal.pth"),
('principal_curvature', 'normal'):
(lambda: UNetOld2(), None),
('sobel_edges', 'normal'):
(lambda: UNet(in_channels=1, downsample=5).cuda(), f"{OLD_MODELS_DIR}/sobel_edges2normal.pth"),
('edge_occlusion', 'normal'):
(lambda: UNet(in_channels=1, downsample=5), f"{OLD_MODELS_DIR}/edge_occlusion2normal.pth"),
}
| [
198,
11748,
28686,
11,
25064,
11,
10688,
11,
4738,
11,
340,
861,
10141,
11,
1257,
310,
10141,
198,
6738,
17268,
1330,
3706,
83,
29291,
198,
11748,
299,
32152,
355,
45941,
198,
198,
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
... | 2.210405 | 2,595 |
# Generated by Django 2.2.2 on 2019-06-20 06:36
from django.db import migrations, models
import django.db.models.deletion
| [
2,
2980,
515,
416,
37770,
362,
13,
17,
13,
17,
319,
13130,
12,
3312,
12,
1238,
9130,
25,
2623,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
198,
11748,
42625,
14208,
13,
9945,
13,
27530,
13,
2934,
1616,
295,
... | 2.818182 | 44 |
#
# PySNMP MIB module DATALINK-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DATALINK-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:37:02 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
private, ModuleIdentity, ObjectIdentity, internet, mgmt, TimeTicks, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, enterprises, NotificationType, ObjectName, Bits, Integer32, IpAddress, Gauge32, iso, NotificationType, Counter32, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "private", "ModuleIdentity", "ObjectIdentity", "internet", "mgmt", "TimeTicks", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "enterprises", "NotificationType", "ObjectName", "Bits", "Integer32", "IpAddress", "Gauge32", "iso", "NotificationType", "Counter32", "Counter64")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
asentria = MibIdentifier((1, 3, 6, 1, 4, 1, 3052))
datalink = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1))
productIds = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 1))
productConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 2))
unitIds = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 3))
serialPorts = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 4))
time = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 5))
snmpsetup = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 6))
passwords = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 7))
ftpsetup = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 8))
databases = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 9))
alarms = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 10))
actions = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 11))
controls = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 12))
alarmhistory = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 13))
realtimesocket = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 14))
iprestrictions = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 15))
ipsetup = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 16))
pppsetup = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 17))
ccode = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 18))
techsupport = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 99))
hardware = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 2, 4))
factorysetup = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 2, 5))
commandPassword = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 7, 5))
entireDatabase = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1))
databaseStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 1))
databaseFiles = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2))
filesetup = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 1))
nodataAlarms = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3))
nodataAlarmHolidays = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 3))
actionsBuzzer = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 11, 1))
actionsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 11, 5))
opSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1))
auxportMode = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 6))
inlineHskMode = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 6, 4))
modemSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2))
dataRelease = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 12, 3))
otherControls = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 12, 4))
ftpPush = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3))
actionQueue = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1))
actionHistory = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2))
ipCurrent = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 16, 1))
ipNew = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 16, 2))
pppIdentification = MibIdentifier((1, 3, 6, 1, 4, 1, 3052, 1, 17, 1))
datalinkThisProduct = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: datalinkThisProduct.setStatus('mandatory')
if mibBuilder.loadTexts: datalinkThisProduct.setDescription('This is a factory-configured string for the product name')
productname = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: productname.setStatus('mandatory')
if mibBuilder.loadTexts: productname.setDescription('A second string which may also contain name/version info')
systemversion = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemversion.setStatus('mandatory')
if mibBuilder.loadTexts: systemversion.setDescription('system rom version number')
appversion = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: appversion.setStatus('mandatory')
if mibBuilder.loadTexts: appversion.setDescription('application version')
numberports = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 4, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberports.setStatus('mandatory')
if mibBuilder.loadTexts: numberports.setDescription('number of RS232 ports found')
netcard = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 4, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netcard.setStatus('mandatory')
if mibBuilder.loadTexts: netcard.setDescription('0 if no net card, 1 if net card found')
modems = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 4, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: modems.setStatus('mandatory')
if mibBuilder.loadTexts: modems.setDescription('0 if no modem, 1 if modem was found')
networkenabled = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 4, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: networkenabled.setStatus('mandatory')
if mibBuilder.loadTexts: networkenabled.setDescription('0 if not enabled, 1 if enabled')
memorysize = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 4, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: memorysize.setStatus('mandatory')
if mibBuilder.loadTexts: memorysize.setDescription('memory size in K')
modemreport = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 5, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemreport.setStatus('mandatory')
if mibBuilder.loadTexts: modemreport.setDescription('5-char string, speed to report for modem speed')
modemportspeed = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 5, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemportspeed.setStatus('mandatory')
if mibBuilder.loadTexts: modemportspeed.setDescription('modem port baud rate 38400, 19200, etc. ')
modemsetupstring = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 5, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemsetupstring.setStatus('mandatory')
if mibBuilder.loadTexts: modemsetupstring.setDescription('modem setup string, e.g., ATe0v0s0=1')
modemcddelay = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 5, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemcddelay.setStatus('mandatory')
if mibBuilder.loadTexts: modemcddelay.setDescription('seconds after CD before sending answer string')
modemtype = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 5, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemtype.setStatus('mandatory')
if mibBuilder.loadTexts: modemtype.setDescription('number factory-assigned to this particular modem, manufacturer, etc.')
serialnumber = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 5, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: serialnumber.setStatus('mandatory')
if mibBuilder.loadTexts: serialnumber.setDescription('up to 10 chars for factory-assigned serial number')
dateofmanufacture = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 5, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dateofmanufacture.setStatus('mandatory')
if mibBuilder.loadTexts: dateofmanufacture.setDescription('up to 8 chars for factory-assigned date of manufacture')
databasemode = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 2, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: databasemode.setStatus('mandatory')
if mibBuilder.loadTexts: databasemode.setDescription('database compatibility mode, 1 -normal, 2 commandset2, etc.')
datalinkSiteId = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 3, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: datalinkSiteId.setStatus('mandatory')
if mibBuilder.loadTexts: datalinkSiteId.setDescription('Site ID string')
idByPortTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 3, 2), )
if mibBuilder.loadTexts: idByPortTable.setStatus('mandatory')
if mibBuilder.loadTexts: idByPortTable.setDescription('an id for type of data by port')
sitebyport = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 3, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "siteindex"))
if mibBuilder.loadTexts: sitebyport.setStatus('mandatory')
if mibBuilder.loadTexts: sitebyport.setDescription('entry for table')
siteindex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 3, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: siteindex.setStatus('mandatory')
if mibBuilder.loadTexts: siteindex.setDescription('index for which port')
siteID = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 3, 2, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: siteID.setStatus('mandatory')
if mibBuilder.loadTexts: siteID.setDescription('site id or type of data by port')
numberPorts = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 4, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberPorts.setStatus('mandatory')
if mibBuilder.loadTexts: numberPorts.setDescription('number of RS232 ports found. ')
portSetupTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2), )
if mibBuilder.loadTexts: portSetupTable.setStatus('mandatory')
if mibBuilder.loadTexts: portSetupTable.setDescription('port setup table, serial params, collect data, etc.')
portSetupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "portIndex"))
if mibBuilder.loadTexts: portSetupEntry.setStatus('mandatory')
if mibBuilder.loadTexts: portSetupEntry.setDescription('port setup table, serial params, collect data, etc.')
portIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portIndex.setStatus('mandatory')
if mibBuilder.loadTexts: portIndex.setDescription('index for table')
portBaud = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBaud.setStatus('mandatory')
if mibBuilder.loadTexts: portBaud.setDescription('baud rate, 19200, 9600, etc.')
portWord = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portWord.setStatus('mandatory')
if mibBuilder.loadTexts: portWord.setDescription('word length, must be 7 or 8')
portParity = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portParity.setStatus('mandatory')
if mibBuilder.loadTexts: portParity.setDescription('a single-char string with values of N E or O')
portStopbits = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portStopbits.setStatus('mandatory')
if mibBuilder.loadTexts: portStopbits.setDescription('number of stop bits, must be 1')
portDataStore = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDataStore.setStatus('mandatory')
if mibBuilder.loadTexts: portDataStore.setDescription('0 data is not stored, 1 data is stored from this port')
portBinaryMode = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBinaryMode.setStatus('mandatory')
if mibBuilder.loadTexts: portBinaryMode.setDescription('0 data is ASCII, 1 data is binary')
portWrapMode = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portWrapMode.setStatus('mandatory')
if mibBuilder.loadTexts: portWrapMode.setDescription('0 oldest data not overwritten, 1 older data is overwritten')
portHskMode = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portHskMode.setStatus('mandatory')
if mibBuilder.loadTexts: portHskMode.setDescription('HSK mode to use when buffer close to full, 0 none, 1 xon, 2 DTR, 3 DTR and Xon')
portDateTimeStampMode = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDateTimeStampMode.setStatus('mandatory')
if mibBuilder.loadTexts: portDateTimeStampMode.setDescription('Date/time stamp mode to use,bit mapped bit 0 - do date stamp bit 1 - include year bit 2 - include year 19xx or 20xx bit 3 - include day of week bit 4 - space after date bit 5 - include time bit 6 - include seconds bit 7 - space after time')
portPTMode = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPTMode.setStatus('mandatory')
if mibBuilder.loadTexts: portPTMode.setDescription('pass-through access mode. 0=none, 1=by modem, 2=by network any write kills the passthrough connection. any write requires private community name')
portPTTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portPTTime.setStatus('mandatory')
if mibBuilder.loadTexts: portPTTime.setDescription('pass-through access mode time of this connection, in seconds')
portStoreFile = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 13), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portStoreFile.setStatus('mandatory')
if mibBuilder.loadTexts: portStoreFile.setDescription('selects which data file data from this port is stored into')
portPtStripOutputLfs = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 14), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPtStripOutputLfs.setStatus('mandatory')
if mibBuilder.loadTexts: portPtStripOutputLfs.setDescription('0/1 no/yes in pass-through, strip LFs going to device on this port')
portPtStripInputLfs = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 15), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPtStripInputLfs.setStatus('mandatory')
if mibBuilder.loadTexts: portPtStripInputLfs.setDescription('0/1 no/yes in pass-through, strip LFs coming from device on this port')
portlowDTR = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 4, 2, 1, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portlowDTR.setStatus('mandatory')
if mibBuilder.loadTexts: portlowDTR.setDescription('0/1 no/yes set DTR low and only raise it on SysAdmin & bypass connections')
currenttime = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 5, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: currenttime.setStatus('mandatory')
if mibBuilder.loadTexts: currenttime.setDescription('Text string for date and time: SUN 01/02/98 12:34:27')
autoDstAdjust = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 5, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: autoDstAdjust.setStatus('mandatory')
if mibBuilder.loadTexts: autoDstAdjust.setDescription('0 no adjust, 1 adjust')
snmpTrapsEnabled = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 6, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpTrapsEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: snmpTrapsEnabled.setDescription('0 do not send any traps, 1 do send traps')
snmpManagerTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 6, 2), )
if mibBuilder.loadTexts: snmpManagerTable.setStatus('mandatory')
if mibBuilder.loadTexts: snmpManagerTable.setDescription('management station names and addresses')
snmpTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 6, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "snmpMgrIndex"))
if mibBuilder.loadTexts: snmpTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: snmpTableEntry.setDescription('entry for snmp table')
snmpMgrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 6, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: snmpMgrIndex.setStatus('mandatory')
if mibBuilder.loadTexts: snmpMgrIndex.setDescription('index for table')
snmpManagerIp = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 6, 2, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpManagerIp.setStatus('mandatory')
if mibBuilder.loadTexts: snmpManagerIp.setDescription('the ip address of a manager')
snmpManagerName = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 6, 2, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpManagerName.setStatus('mandatory')
if mibBuilder.loadTexts: snmpManagerName.setDescription('the name of a manager, up to 80 chars')
snmpTrapsAutoRepeatTime = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 6, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpTrapsAutoRepeatTime.setStatus('mandatory')
if mibBuilder.loadTexts: snmpTrapsAutoRepeatTime.setDescription('0 do not repeat, else number of minutes to repeat')
snmpSendTestTrap = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 6, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpSendTestTrap.setStatus('mandatory')
if mibBuilder.loadTexts: snmpSendTestTrap.setDescription('0 on read, any Set sends test trap to all managers in table')
modemPasswords = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 7, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemPasswords.setStatus('mandatory')
if mibBuilder.loadTexts: modemPasswords.setDescription('0 no modem passwords required, 1 modem passwords are required write requires private community name')
tcpPasswords = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 7, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tcpPasswords.setStatus('mandatory')
if mibBuilder.loadTexts: tcpPasswords.setDescription('0 no telnet/tcp passwords required, 1 passwords are required write requires private community name')
ftpPasswords = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 7, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPasswords.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPasswords.setDescription('0 no ftp passwords required, 1 passwords are required write requires private community name')
promptPasswords = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 7, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: promptPasswords.setStatus('mandatory')
if mibBuilder.loadTexts: promptPasswords.setDescription('0 no Password: prompt, 1 -> show Password: prompt')
commandNeedsPassword = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 7, 5, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: commandNeedsPassword.setStatus('mandatory')
if mibBuilder.loadTexts: commandNeedsPassword.setDescription('0 not needed, 1 is needed write requires private community name')
commandPasswordTimeout = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 7, 5, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: commandPasswordTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: commandPasswordTimeout.setDescription('1-99, number of minutes of no activity which auto logs user out')
passwordTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 7, 6), )
if mibBuilder.loadTexts: passwordTable.setStatus('mandatory')
if mibBuilder.loadTexts: passwordTable.setDescription('Table of password entries, r-w only with private comm. name')
passwordTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 7, 6, 1), ).setIndexNames((0, "DATALINK-MIB", "passwordIndex"))
if mibBuilder.loadTexts: passwordTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: passwordTableEntry.setDescription('entry to password table')
passwordIndex = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 7, 6, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: passwordIndex.setStatus('mandatory')
if mibBuilder.loadTexts: passwordIndex.setDescription('index to password table')
passwordCommand = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 7, 6, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: passwordCommand.setStatus('mandatory')
if mibBuilder.loadTexts: passwordCommand.setDescription('password for command access')
passwordAccess = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 7, 6, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: passwordAccess.setStatus('mandatory')
if mibBuilder.loadTexts: passwordAccess.setDescription('password for pass-through access')
ftpAutoDelete = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpAutoDelete.setStatus('mandatory')
if mibBuilder.loadTexts: ftpAutoDelete.setDescription('0 files not autodeleted, 1 deleted on reading')
ftpDataMode = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpDataMode.setStatus('mandatory')
if mibBuilder.loadTexts: ftpDataMode.setDescription('0 normal, 1 compression mode 1, etc.')
ftpPushEnabled = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushEnabled.setDescription('0-no, 1-yes, enables ftp data push')
ftpPushTiming = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushTiming.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushTiming.setDescription('how often data is pushed, 2-255 minutes')
ftpPushTimer = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushTimer.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushTimer.setDescription('timer which counts to ftpPushTiming')
ftpPushIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushIPAddress.setDescription('ip address of ftp server to which we push the data')
ftpPushUser = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushUser.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushUser.setDescription('text string to send for the user id')
ftpPushPass = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushPass.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushPass.setDescription('text string to send for the ftp server password')
ftpPushAcct = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushAcct.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushAcct.setDescription('text string to send for the account, if used')
ftpPushDir = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushDir.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushDir.setDescription('text string to send for the directory we CWD to')
ftppushTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 9), )
if mibBuilder.loadTexts: ftppushTable.setStatus('mandatory')
if mibBuilder.loadTexts: ftppushTable.setDescription('Table of ftp push enables')
ftppushTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 9, 1), ).setIndexNames((0, "DATALINK-MIB", "ftppushIndex"))
if mibBuilder.loadTexts: ftppushTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ftppushTableEntry.setDescription('entry to ftp push table')
ftppushIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 9, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ftppushIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ftppushIndex.setDescription('index to ftp push table')
ftppushEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 9, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftppushEnable.setStatus('mandatory')
if mibBuilder.loadTexts: ftppushEnable.setDescription('enable for ftp push, indexed by file')
ftpPushAlarms = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushAlarms.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushAlarms.setDescription('0-no, 1-yes, do we push the ALARMS file')
ftpPushCount = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ftpPushCount.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushCount.setDescription('number of ftp data pushes tried since reboot')
ftpPushStatusMode = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushStatusMode.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushStatusMode.setDescription('0-none, 1-append, 2-replace, status file modes')
ftpPushServerName = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 8, 3, 13), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPushServerName.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPushServerName.setDescription('Name of the FTP Push Targer Server')
databasePfull = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: databasePfull.setStatus('mandatory')
if mibBuilder.loadTexts: databasePfull.setDescription('percentage full of all database')
databaseSize = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: databaseSize.setStatus('mandatory')
if mibBuilder.loadTexts: databaseSize.setDescription('Size of Data Storage Area, in bytes')
databaseRecordsAvailable = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: databaseRecordsAvailable.setStatus('mandatory')
if mibBuilder.loadTexts: databaseRecordsAvailable.setDescription('Records which are available to read, total in all files')
databaseRecordsDeleted = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: databaseRecordsDeleted.setStatus('mandatory')
if mibBuilder.loadTexts: databaseRecordsDeleted.setDescription('Records which are deleted, total in all files')
databaseAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 2), )
if mibBuilder.loadTexts: databaseAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmTable.setDescription('table for levels 1 2 3 of all database alarms and actions')
databaseAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "databaseAlarmIndex"))
if mibBuilder.loadTexts: databaseAlarmEntry.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmEntry.setDescription('entry for database alarm config and actions table')
databaseAlarmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: databaseAlarmIndex.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmIndex.setDescription('Index for table, 1 2 or 3')
databaseAlarmActive = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 2, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: databaseAlarmActive.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmActive.setDescription('0/1, 1 = active')
databaseAlarmThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 2, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: databaseAlarmThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmThreshold.setDescription('1-99, percentage full threshold level')
databaseAlarmBeeperActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 2, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: databaseAlarmBeeperActions.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmBeeperActions.setDescription('0 1 2, -> none, 1/10 or 10/10')
databaseAlarmSerialActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 2, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: databaseAlarmSerialActions.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmSerialActions.setDescription('bits 0-7 show which messages 1-8 are sent')
databaseAlarmPagerActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 2, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: databaseAlarmPagerActions.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmPagerActions.setDescription('bits 0-7 show which pagers 1-8 are used')
databaseAlarmCalloutActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 2, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: databaseAlarmCalloutActions.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmCalloutActions.setDescription('bits 0-7 show which modem callouts 1-8 are used')
databaseAlarmTrapActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 2, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: databaseAlarmTrapActions.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmTrapActions.setDescription('0/1 for traps are sent or not')
databaseAlarmFileStore = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: databaseAlarmFileStore.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmFileStore.setDescription('0-no, 1-yes, store alarms in the ALARMS file')
databaseAlarmFileMaxSize = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: databaseAlarmFileMaxSize.setStatus('mandatory')
if mibBuilder.loadTexts: databaseAlarmFileMaxSize.setDescription('in K, max size for alarms file 4-32k')
charmaskEnabled = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: charmaskEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: charmaskEnabled.setDescription('0/1 char masking enabled')
charmask = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: charmask.setStatus('mandatory')
if mibBuilder.loadTexts: charmask.setDescription('32-byte hex ascii for character masking')
maxRecordChars = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: maxRecordChars.setStatus('mandatory')
if mibBuilder.loadTexts: maxRecordChars.setDescription('max characters in an ASCII record')
binRecordBlocking = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: binRecordBlocking.setStatus('mandatory')
if mibBuilder.loadTexts: binRecordBlocking.setDescription('# chars max to block binary records into')
recordCollectionTimeout = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: recordCollectionTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: recordCollectionTimeout.setDescription('# seconds to allow before terminating a record automatically')
fileTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 2), )
if mibBuilder.loadTexts: fileTable.setStatus('mandatory')
if mibBuilder.loadTexts: fileTable.setDescription('table of directory entries')
fileTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "fileTableIndex"))
if mibBuilder.loadTexts: fileTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: fileTableEntry.setDescription('entry for table')
fileTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fileTableIndex.setDescription('index for the table')
fileName = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileName.setStatus('mandatory')
if mibBuilder.loadTexts: fileName.setDescription('name of the file')
fileType = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 2, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fileType.setStatus('mandatory')
if mibBuilder.loadTexts: fileType.setDescription('type of data, up to 24 chars')
fileSize = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileSize.setStatus('mandatory')
if mibBuilder.loadTexts: fileSize.setDescription('file size in bytes')
fileRecords = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileRecords.setStatus('mandatory')
if mibBuilder.loadTexts: fileRecords.setDescription('file size in records')
fileRecordsAvailable = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileRecordsAvailable.setStatus('mandatory')
if mibBuilder.loadTexts: fileRecordsAvailable.setDescription('# recs available')
fileRecordsDeleted = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileRecordsDeleted.setStatus('mandatory')
if mibBuilder.loadTexts: fileRecordsDeleted.setDescription('# recs deleted')
filePercentNow = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: filePercentNow.setStatus('mandatory')
if mibBuilder.loadTexts: filePercentNow.setDescription('% of all of memory this file is, right now')
fileAlarms = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3), )
if mibBuilder.loadTexts: fileAlarms.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarms.setDescription('file alarms, indexed by file and threshold')
fileAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3, 1), ).setIndexNames((0, "DATALINK-MIB", "fileAlarmFileIndex"), (0, "DATALINK-MIB", "fileAlarmThreshold"))
if mibBuilder.loadTexts: fileAlarmEntry.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarmEntry.setDescription('entry to the table')
fileAlarmFileIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileAlarmFileIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarmFileIndex.setDescription('index for filenumber')
fileAlarmThresholdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fileAlarmThresholdIndex.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarmThresholdIndex.setDescription('index for filenumber threshold')
fileAlarmActive = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fileAlarmActive.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarmActive.setDescription('0/1 this file alarm active')
fileAlarmThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fileAlarmThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarmThreshold.setDescription('1-99, threshold level')
fileAlarmBeeperActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fileAlarmBeeperActions.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarmBeeperActions.setDescription('0 1 2, none, 1/10 or 10/10')
fileAlarmSerialActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fileAlarmSerialActions.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarmSerialActions.setDescription('bits 0-7 show which messages 1-8 are sent')
fileAlarmPagerActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fileAlarmPagerActions.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarmPagerActions.setDescription('bits 0-7 show which pagers 1-8 are used')
fileAlarmCalloutActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fileAlarmCalloutActions.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarmCalloutActions.setDescription('bits 0-7 show which modem callouts 1-8 are used')
fileAlarmTrapActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 9, 2, 3, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fileAlarmTrapActions.setStatus('mandatory')
if mibBuilder.loadTexts: fileAlarmTrapActions.setDescription('0/1 for traps are sent or not')
dataAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1), )
if mibBuilder.loadTexts: dataAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmTable.setDescription('table of read-only items for data alarm setup')
dataAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1), ).setIndexNames((0, "DATALINK-MIB", "dataAlarmIndex"))
if mibBuilder.loadTexts: dataAlarmEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmEntry.setDescription('Data alarm table entry')
dataAlarmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmIndex.setDescription('index for data alarms')
dataAlarmActive = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmActive.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmActive.setDescription('0/1 active')
dataAlarmName = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmName.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmName.setDescription('name of alarm')
dataAlarmCounter = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmCounter.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmCounter.setDescription('counter for this alarm')
dataAlarmThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmThreshold.setDescription('threshold for this alarm')
dataAlarmClearMode = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmClearMode.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmClearMode.setDescription('code for clearing mode')
dataAlarmClearTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmClearTime.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmClearTime.setDescription('time of day, e.g., 01:20')
dataAlarmAcked = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dataAlarmAcked.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmAcked.setDescription('0 on read, any set to ack this alarm')
dataAlarmBeeperActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmBeeperActions.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmBeeperActions.setDescription('0 1 2, none, 1/10 or 10/10')
dataAlarmSerialActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmSerialActions.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmSerialActions.setDescription('bits 0-7 show which messages 1-8 are sent')
dataAlarmPagerActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmPagerActions.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmPagerActions.setDescription('bits 0-7 show which pagers 1-8 are used')
dataAlarmCalloutActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmCalloutActions.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmCalloutActions.setDescription('bits 0-7 show which modem callouts 1-8 are used')
dataAlarmTrapActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmTrapActions.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmTrapActions.setDescription('0/1 for traps are sent or not')
dataAlarmString = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 14), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmString.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmString.setDescription('last data alarm string for this alarm')
dataAlarmPort = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmPort.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmPort.setDescription('port number for last data alarm string for this alarm')
dataAlarmAutoClear = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 1, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataAlarmAutoClear.setStatus('mandatory')
if mibBuilder.loadTexts: dataAlarmAutoClear.setDescription('0/1 disabled/enabled to auto clear counter when it reached threshold')
sensorAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2), )
if mibBuilder.loadTexts: sensorAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmTable.setDescription('table of read-only items for sensor alarm setup')
sensorAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "sensorAlarmIndex"))
if mibBuilder.loadTexts: sensorAlarmEntry.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmEntry.setDescription('sensor alarm table entry')
sensorAlarmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sensorAlarmIndex.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmIndex.setDescription('index for sensor alarms')
sensorAlarmActive = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmActive.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmActive.setDescription('0/1 active')
sensorAlarmName = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmName.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmName.setDescription('name of alarm')
sensorAlarmMode = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmMode.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmMode.setDescription('0 - open active, 1 - closed active')
sensorAlarmCounter = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmCounter.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmCounter.setDescription('counter for this alarm')
sensorAlarmThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmThreshold.setDescription('threshold for this alarm')
sensorAlarmAcked = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmAcked.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmAcked.setDescription('0 on read, any set to ack this alarm')
sensorAlarmBeeperActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmBeeperActions.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmBeeperActions.setDescription('0 1 2, none, 1/10 or 10/10')
sensorAlarmSerialActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmSerialActions.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmSerialActions.setDescription('bits 0-7 show which messages 1-8 are sent')
sensorAlarmPagerActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmPagerActions.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmPagerActions.setDescription('bits 0-7 show which pagers 1-8 are used')
sensorAlarmCalloutActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmCalloutActions.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmCalloutActions.setDescription('bits 0-7 show which modem callouts 1-8 are used')
sensorAlarmTrapActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sensorAlarmTrapActions.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmTrapActions.setDescription('0/1 for traps are sent or not')
sensorAlarmState = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 2, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sensorAlarmState.setStatus('mandatory')
if mibBuilder.loadTexts: sensorAlarmState.setDescription('0-> open 1-> closed for current state')
nodataAlarmStatus = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 1), )
if mibBuilder.loadTexts: nodataAlarmStatus.setStatus('mandatory')
if mibBuilder.loadTexts: nodataAlarmStatus.setDescription('no data status table')
nodataAlarmStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 1, 1), ).setIndexNames((0, "DATALINK-MIB", "nodataAlarmStatusIndex"))
if mibBuilder.loadTexts: nodataAlarmStatusEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nodataAlarmStatusEntry.setDescription('status table entry')
nodataAlarmStatusIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nodataAlarmStatusIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nodataAlarmStatusIndex.setDescription('index for table')
nodataAlarmStatusCounter = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 1, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataAlarmStatusCounter.setStatus('mandatory')
if mibBuilder.loadTexts: nodataAlarmStatusCounter.setDescription('the nodata counter')
nodataAlarmStatusAcked = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataAlarmStatusAcked.setStatus('mandatory')
if mibBuilder.loadTexts: nodataAlarmStatusAcked.setDescription('reads as 0, any write acks this alarm')
nodataTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2), )
if mibBuilder.loadTexts: nodataTable.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTable.setDescription('nodata table')
nodataTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "nodataTablePortIndex"), (0, "DATALINK-MIB", "nodataTableScheduleIndex"), (0, "DATALINK-MIB", "nodataTableLevelIndex"))
if mibBuilder.loadTexts: nodataTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTableEntry.setDescription('nodata defn. table entry')
nodataTablePortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: nodataTablePortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTablePortIndex.setDescription('index by port')
nodataTableScheduleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 2), Integer32())
if mibBuilder.loadTexts: nodataTableScheduleIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTableScheduleIndex.setDescription('index by schedule')
nodataTableLevelIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 3), Integer32())
if mibBuilder.loadTexts: nodataTableLevelIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTableLevelIndex.setDescription('index by level')
nodataTableActive = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataTableActive.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTableActive.setDescription('0/1 , enabled or not')
nodataTableSchedule = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataTableSchedule.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTableSchedule.setDescription('schedule, format is hh:mm-hh:mm')
nodataTableThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataTableThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTableThreshold.setDescription('#minutes for no data for alarm')
nodataTableBeeperActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataTableBeeperActions.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTableBeeperActions.setDescription('0 1 2, none, 1/10 or 10/10')
nodataTableSerialActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataTableSerialActions.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTableSerialActions.setDescription('bits 0-7 show which messages 1-8 are sent')
nodataTablePagerActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataTablePagerActions.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTablePagerActions.setDescription('bits 0-7 show which pagers 1-8 are used')
nodataTableCalloutActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataTableCalloutActions.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTableCalloutActions.setDescription('bits 0-7 show which modem callouts 1-8 are used')
nodataTableTrapActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 2, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataTableTrapActions.setStatus('mandatory')
if mibBuilder.loadTexts: nodataTableTrapActions.setDescription('0/1 for traps are sent or not')
nodataNumberHolidays = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 3, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nodataNumberHolidays.setStatus('mandatory')
if mibBuilder.loadTexts: nodataNumberHolidays.setDescription('number of nodata holidays defined')
nodataHolidayTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 3, 2), )
if mibBuilder.loadTexts: nodataHolidayTable.setStatus('mandatory')
if mibBuilder.loadTexts: nodataHolidayTable.setDescription('holiday table')
nodataHolidayTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 3, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "nodataHolidayIndex"))
if mibBuilder.loadTexts: nodataHolidayTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nodataHolidayTableEntry.setDescription('holiday table entry')
nodataHolidayIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 3, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nodataHolidayIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nodataHolidayIndex.setDescription('index for holiday list')
nodataHolidayItem = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 3, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nodataHolidayItem.setStatus('mandatory')
if mibBuilder.loadTexts: nodataHolidayItem.setDescription('holiday list item, format is mm/dd')
nodataHolidayAdd = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 3, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataHolidayAdd.setStatus('mandatory')
if mibBuilder.loadTexts: nodataHolidayAdd.setDescription('null on read, set with holiday to add MM/DD')
nodataHolidayDelete = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 3, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataHolidayDelete.setStatus('mandatory')
if mibBuilder.loadTexts: nodataHolidayDelete.setDescription('null on read, set with holiday to delete MM/DD')
nodataHolidayClear = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 10, 3, 3, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nodataHolidayClear.setStatus('mandatory')
if mibBuilder.loadTexts: nodataHolidayClear.setDescription('read returns 0, write requires private community name. used to clear the holiday list')
scheduleAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4), )
if mibBuilder.loadTexts: scheduleAlarmTable.setStatus('mandatory')
if mibBuilder.loadTexts: scheduleAlarmTable.setDescription('scheduled alarm table')
scheduleAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4, 1), ).setIndexNames((0, "DATALINK-MIB", "scheduleIndex"))
if mibBuilder.loadTexts: scheduleAlarmEntry.setStatus('mandatory')
if mibBuilder.loadTexts: scheduleAlarmEntry.setDescription('schedule table entry')
scheduleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: scheduleIndex.setStatus('mandatory')
if mibBuilder.loadTexts: scheduleIndex.setDescription('day index')
scheduleActive = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: scheduleActive.setStatus('mandatory')
if mibBuilder.loadTexts: scheduleActive.setDescription('if active or not')
scheduleTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: scheduleTime.setStatus('mandatory')
if mibBuilder.loadTexts: scheduleTime.setDescription('time of day format is: hh:mm')
scheduleAcked = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: scheduleAcked.setStatus('mandatory')
if mibBuilder.loadTexts: scheduleAcked.setDescription('reads 0, any set acks alarm')
scheduleBeeperActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: scheduleBeeperActions.setStatus('mandatory')
if mibBuilder.loadTexts: scheduleBeeperActions.setDescription('0 1 2, none, 1/10 or 10/10')
scheduleSerialActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: scheduleSerialActions.setStatus('mandatory')
if mibBuilder.loadTexts: scheduleSerialActions.setDescription('bits 0-7 show which messages 1-8 are sent')
schedulePagerActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: schedulePagerActions.setStatus('mandatory')
if mibBuilder.loadTexts: schedulePagerActions.setDescription('bits 0-7 show which pagers 1-8 are used')
scheduleCalloutActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: scheduleCalloutActions.setStatus('mandatory')
if mibBuilder.loadTexts: scheduleCalloutActions.setDescription('bits 0-7 show which modem callouts 1-8 are used')
scheduleTrapActions = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 10, 4, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: scheduleTrapActions.setStatus('mandatory')
if mibBuilder.loadTexts: scheduleTrapActions.setDescription('0/1 for traps are sent or not')
actionsBuzzerState = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 11, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionsBuzzerState.setStatus('mandatory')
if mibBuilder.loadTexts: actionsBuzzerState.setDescription('current buzzer state 0.1.2')
actionsSerialTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 11, 2), )
if mibBuilder.loadTexts: actionsSerialTable.setStatus('mandatory')
if mibBuilder.loadTexts: actionsSerialTable.setDescription('serial message table')
actionsSerialTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 11, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "serialTableIndex"))
if mibBuilder.loadTexts: actionsSerialTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: actionsSerialTableEntry.setDescription('serial table entry')
serialTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: serialTableIndex.setDescription('serial table index')
serialTableMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 2, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: serialTableMessage.setStatus('mandatory')
if mibBuilder.loadTexts: serialTableMessage.setDescription('serial table string')
actionsPagerTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3), )
if mibBuilder.loadTexts: actionsPagerTable.setStatus('mandatory')
if mibBuilder.loadTexts: actionsPagerTable.setDescription('pager table')
actionsPagerTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1), ).setIndexNames((0, "DATALINK-MIB", "pagerTableIndex"))
if mibBuilder.loadTexts: actionsPagerTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: actionsPagerTableEntry.setDescription('pager table entry')
pagerTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pagerTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: pagerTableIndex.setDescription('table index')
pagerType = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerType.setStatus('mandatory')
if mibBuilder.loadTexts: pagerType.setDescription('0-numeric, 1-alpha')
pagerPhonenumber = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerPhonenumber.setStatus('mandatory')
if mibBuilder.loadTexts: pagerPhonenumber.setDescription('phone number to call for pager')
pagerID = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerID.setStatus('mandatory')
if mibBuilder.loadTexts: pagerID.setDescription('ID or 2nd number to dial')
pagerDialDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerDialDelay.setStatus('mandatory')
if mibBuilder.loadTexts: pagerDialDelay.setDescription('# seconds on numeric to delay between dial and send pagerID or message')
pagerHangupDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerHangupDelay.setStatus('mandatory')
if mibBuilder.loadTexts: pagerHangupDelay.setDescription('# seconds on numeric to delay between messages and before hangup')
pagerMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerMessage.setStatus('mandatory')
if mibBuilder.loadTexts: pagerMessage.setDescription('message, either alpha or numeric')
pagerSendId = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerSendId.setStatus('mandatory')
if mibBuilder.loadTexts: pagerSendId.setDescription('0/1 send unit ID or not to pager')
pagerSendReason = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerSendReason.setStatus('mandatory')
if mibBuilder.loadTexts: pagerSendReason.setDescription('0/1 send reason for page or not to pager')
pagerMaxAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerMaxAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: pagerMaxAttempts.setDescription('max tries to be successful')
pagerAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: pagerAttempts.setDescription('current number of tries')
pagerAttemptDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerAttemptDelay.setStatus('mandatory')
if mibBuilder.loadTexts: pagerAttemptDelay.setDescription('# minutes between attempts')
pagerRepeat = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 13), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerRepeat.setStatus('mandatory')
if mibBuilder.loadTexts: pagerRepeat.setDescription('0/1 do we repeat successful')
pagerRepeatDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 3, 1, 14), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pagerRepeatDelay.setStatus('mandatory')
if mibBuilder.loadTexts: pagerRepeatDelay.setDescription('# minutes between repeats, if used')
actionsCalloutTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4), )
if mibBuilder.loadTexts: actionsCalloutTable.setStatus('mandatory')
if mibBuilder.loadTexts: actionsCalloutTable.setDescription('callout table')
actionsCalloutTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1), ).setIndexNames((0, "DATALINK-MIB", "calloutTableIndex"))
if mibBuilder.loadTexts: actionsCalloutTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: actionsCalloutTableEntry.setDescription('callout table entry')
calloutTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: calloutTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: calloutTableIndex.setDescription('table index')
calloutPhonenumber = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutPhonenumber.setStatus('mandatory')
if mibBuilder.loadTexts: calloutPhonenumber.setDescription('phone number to call for callout')
calloutMaxConnecttime = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutMaxConnecttime.setStatus('mandatory')
if mibBuilder.loadTexts: calloutMaxConnecttime.setDescription('# seconds to wait for carrier')
calloutMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutMessage.setStatus('mandatory')
if mibBuilder.loadTexts: calloutMessage.setDescription('message to send')
calloutSendId = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutSendId.setStatus('mandatory')
if mibBuilder.loadTexts: calloutSendId.setDescription('0/1 send unit ID or not to callout')
calloutSendReason = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutSendReason.setStatus('mandatory')
if mibBuilder.loadTexts: calloutSendReason.setDescription('0/1 send reason for page or not to callout')
calloutCommandWait = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutCommandWait.setStatus('mandatory')
if mibBuilder.loadTexts: calloutCommandWait.setDescription('#seconds to wait for a command on a callout before hangup')
calloutMaxAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutMaxAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: calloutMaxAttempts.setDescription('max tries to be successful')
calloutAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: calloutAttempts.setDescription('current number of tries')
calloutAttemptDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutAttemptDelay.setStatus('mandatory')
if mibBuilder.loadTexts: calloutAttemptDelay.setDescription('# minutes between attempts')
calloutRepeat = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutRepeat.setStatus('mandatory')
if mibBuilder.loadTexts: calloutRepeat.setDescription('0/1 do we repeat successful')
calloutRepeatDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 11, 4, 1, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: calloutRepeatDelay.setStatus('mandatory')
if mibBuilder.loadTexts: calloutRepeatDelay.setDescription('# minutes between repeats, if used')
actionsTrapsEntSpecific = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 11, 5, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: actionsTrapsEntSpecific.setStatus('mandatory')
if mibBuilder.loadTexts: actionsTrapsEntSpecific.setDescription('0/1 enterprise specific traps enabled')
actionsTrapsEntSpecCount = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 11, 5, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionsTrapsEntSpecCount.setStatus('mandatory')
if mibBuilder.loadTexts: actionsTrapsEntSpecCount.setDescription('number of enterprise specific traps sent since last re-boot')
linefeeds = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: linefeeds.setStatus('mandatory')
if mibBuilder.loadTexts: linefeeds.setDescription('0/1 are linefeeds added to CRs on command responses?')
duplex = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: duplex.setStatus('mandatory')
if mibBuilder.loadTexts: duplex.setDescription('0-half 1-full')
response = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: response.setStatus('mandatory')
if mibBuilder.loadTexts: response.setDescription('0-codes 1-words')
datafilterEnabled = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: datafilterEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: datafilterEnabled.setDescription('0/1 off/on')
alarmfilterEnabled = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alarmfilterEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: alarmfilterEnabled.setDescription('0/1 off/on')
operatingMode = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 6, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: operatingMode.setStatus('mandatory')
if mibBuilder.loadTexts: operatingMode.setDescription('1 command 2 input/access 3 unused 4 inline 5 extmodem')
inlineMode = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 6, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: inlineMode.setStatus('mandatory')
if mibBuilder.loadTexts: inlineMode.setDescription('1,2,3 if inline, mode 1 (N->2) mode 2 (1->2, 3->4) mode 3 (1->2, 3->4, 5->6)')
inlineSource = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 6, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: inlineSource.setStatus('mandatory')
if mibBuilder.loadTexts: inlineSource.setDescription('if inline and inlineMode==1, source of I/O2 inline')
inlineHsk2 = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 6, 4, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: inlineHsk2.setStatus('mandatory')
if mibBuilder.loadTexts: inlineHsk2.setDescription('handshake mode 0-3 for inline port I/O 2')
inlineHsk4 = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 6, 4, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: inlineHsk4.setStatus('mandatory')
if mibBuilder.loadTexts: inlineHsk4.setDescription('handshake mode 0-3 for inline port I/O 4')
inlineHsk6 = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 6, 4, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: inlineHsk6.setStatus('mandatory')
if mibBuilder.loadTexts: inlineHsk6.setDescription('handshake mode 0-3 for inline port I/O 6')
sureEnabled = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sureEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: sureEnabled.setDescription('0/1 off/on')
commandTcpipTimeout = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: commandTcpipTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: commandTcpipTimeout.setDescription('0-none, else number of no-activity minutes -> tcpip command to drop')
sysadminTcpipTimeout = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sysadminTcpipTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: sysadminTcpipTimeout.setDescription('0-none, else number of no-activity minutes -> tcpip sysadmin to drop')
bypassEndchar = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bypassEndchar.setStatus('mandatory')
if mibBuilder.loadTexts: bypassEndchar.setDescription('ascii value for character to exit bypass mode. default ->27')
routerAutoPing = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: routerAutoPing.setStatus('mandatory')
if mibBuilder.loadTexts: routerAutoPing.setDescription('0/1 = no/yes, default is 0, do we ping the default router every 10 minutes?')
modemParity = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemParity.setStatus('mandatory')
if mibBuilder.loadTexts: modemParity.setDescription('1 7E 2 7O 3 8N')
modemUserSetup = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemUserSetup.setStatus('mandatory')
if mibBuilder.loadTexts: modemUserSetup.setDescription('sent to modem on init before the factory setup string')
modemTapSetup = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemTapSetup.setStatus('mandatory')
if mibBuilder.loadTexts: modemTapSetup.setDescription('sent to modem on just before doing TAP (alpha pager) protocol')
modemAnswerString = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemAnswerString.setStatus('mandatory')
if mibBuilder.loadTexts: modemAnswerString.setDescription('sent when modem makes connection')
modemExtSetup = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemExtSetup.setStatus('mandatory')
if mibBuilder.loadTexts: modemExtSetup.setDescription('sent to ext. modem for setup string')
modemExtSetupTime = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemExtSetupTime.setStatus('mandatory')
if mibBuilder.loadTexts: modemExtSetupTime.setDescription('# minutes of idle time between sending ext. modem setup string')
modemInactivityTimer = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemInactivityTimer.setStatus('mandatory')
if mibBuilder.loadTexts: modemInactivityTimer.setDescription('# minutes of no transmit which aborts a connection')
modemAutoexecString = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemAutoexecString.setStatus('mandatory')
if mibBuilder.loadTexts: modemAutoexecString.setDescription('command string which auto-executes after modem connection if no other command within 10 seconds')
modemAutoexecEnabled = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemAutoexecEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: modemAutoexecEnabled.setDescription('0/1 autoexec enabled')
modemTimeBetweenOutbound = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 2, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modemTimeBetweenOutbound.setStatus('mandatory')
if mibBuilder.loadTexts: modemTimeBetweenOutbound.setDescription('# seconds (minimum) between outbound call attempts')
releaseMode = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 3, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: releaseMode.setStatus('mandatory')
if mibBuilder.loadTexts: releaseMode.setDescription('1-Line 3-CBB 4-Xmodem')
autodeleteEnable = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 3, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: autodeleteEnable.setStatus('mandatory')
if mibBuilder.loadTexts: autodeleteEnable.setDescription('1/2 off/on autodelete for CBB and Xmodem')
releaseCompressed = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 3, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: releaseCompressed.setStatus('mandatory')
if mibBuilder.loadTexts: releaseCompressed.setDescription('1-compressed 2-decompressed')
waitMode = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 4, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: waitMode.setStatus('mandatory')
if mibBuilder.loadTexts: waitMode.setDescription('1/2 off/on wait for 02 after 01 on rlmodes')
tagMode = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 4, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tagMode.setStatus('mandatory')
if mibBuilder.loadTexts: tagMode.setDescription('1/2 off/on Line/Block tag enabled')
crcMode = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 4, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: crcMode.setStatus('mandatory')
if mibBuilder.loadTexts: crcMode.setDescription('1/2 off/on add CRC to ascii releases')
dleMode = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 4, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dleMode.setStatus('mandatory')
if mibBuilder.loadTexts: dleMode.setDescription('1/2 off/on use DLE stuffing on CBB')
cbbRetransmits = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 4, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cbbRetransmits.setStatus('mandatory')
if mibBuilder.loadTexts: cbbRetransmits.setDescription('# times a block retransmitted in CBB mode')
cbbTimeout = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 4, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cbbTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: cbbTimeout.setDescription('# seconds to wait for an ack before retransmit in CBB mode')
activeDatabase = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 12, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: activeDatabase.setStatus('mandatory')
if mibBuilder.loadTexts: activeDatabase.setDescription('selects a file. ports 2001-2006 auto select this variable')
actionCount = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionCount.setStatus('mandatory')
if mibBuilder.loadTexts: actionCount.setDescription('number of active items in action table, 0-nn')
actionTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2), )
if mibBuilder.loadTexts: actionTable.setStatus('mandatory')
if mibBuilder.loadTexts: actionTable.setDescription('action queue table')
actionTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "actionTableIndex"))
if mibBuilder.loadTexts: actionTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: actionTableEntry.setDescription('action queue entry')
actionTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: actionTableIndex.setDescription('action table entry')
actionAcked = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: actionAcked.setStatus('mandatory')
if mibBuilder.loadTexts: actionAcked.setDescription('reads 0, any set removes (acks) this action')
actionReason = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionReason.setStatus('mandatory')
if mibBuilder.loadTexts: actionReason.setDescription('code reason for action')
actionReasonID = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionReasonID.setStatus('mandatory')
if mibBuilder.loadTexts: actionReasonID.setDescription('which of the (reasons) e.g, alarm 3 vs. alarm 4')
actionReasonLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionReasonLevel.setStatus('mandatory')
if mibBuilder.loadTexts: actionReasonLevel.setDescription('which of the levels for alarms which have 1-3 levels')
actionType = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionType.setStatus('mandatory')
if mibBuilder.loadTexts: actionType.setDescription('type of action being taken (page, callout, etc.)')
actionTypeID = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionTypeID.setStatus('mandatory')
if mibBuilder.loadTexts: actionTypeID.setDescription('which of the actions e.g, pager 3 vs. pager 4')
actionRepeatTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionRepeatTime.setStatus('mandatory')
if mibBuilder.loadTexts: actionRepeatTime.setDescription('#minutes between repeats of attempts of this action')
actionAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionAttempts.setStatus('mandatory')
if mibBuilder.loadTexts: actionAttempts.setDescription('# of attempts to try this action so far')
actionNextAttempt = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionNextAttempt.setStatus('mandatory')
if mibBuilder.loadTexts: actionNextAttempt.setDescription('# minutes until the next attempt of this action')
actionTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 1, 2, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: actionTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: actionTimeStamp.setDescription('date and time string: 02/34 12:34, or text message if no items')
historyCount = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: historyCount.setStatus('mandatory')
if mibBuilder.loadTexts: historyCount.setDescription('number of history items in history table, 0-nn')
historyTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2), )
if mibBuilder.loadTexts: historyTable.setStatus('mandatory')
if mibBuilder.loadTexts: historyTable.setDescription('action history table')
historyTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2, 1), ).setIndexNames((0, "DATALINK-MIB", "historyTableIndex"))
if mibBuilder.loadTexts: historyTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: historyTableEntry.setDescription('history queue entry')
historyTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: historyTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: historyTableIndex.setDescription('history table entry')
historyEntryType = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: historyEntryType.setStatus('mandatory')
if mibBuilder.loadTexts: historyEntryType.setDescription('type of entry (e.g., modem fail, pager pass, etc.)')
historyReason = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: historyReason.setStatus('mandatory')
if mibBuilder.loadTexts: historyReason.setDescription('code reason for history')
historyReasonID = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: historyReasonID.setStatus('mandatory')
if mibBuilder.loadTexts: historyReasonID.setDescription('which of the (reasons) e.g, alarm 3 vs. alarm 4')
historyReasonLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: historyReasonLevel.setStatus('mandatory')
if mibBuilder.loadTexts: historyReasonLevel.setDescription('which of the levels for alarms which have 1-3 levels')
historyType = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: historyType.setStatus('mandatory')
if mibBuilder.loadTexts: historyType.setDescription('type of history being taken (page, callout, etc.)')
historyTypeID = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: historyTypeID.setStatus('mandatory')
if mibBuilder.loadTexts: historyTypeID.setDescription('which of the historys e.g, pager 3 vs. pager 4')
historyTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: historyTimeStamp.setStatus('mandatory')
if mibBuilder.loadTexts: historyTimeStamp.setDescription('date and time string: 02/34 12:34, or text message if no items')
historyClearLog = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 13, 2, 2, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: historyClearLog.setStatus('mandatory')
if mibBuilder.loadTexts: historyClearLog.setDescription('reads 0, any set clears all history log items')
lastCalloutPageReason = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 13, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lastCalloutPageReason.setStatus('mandatory')
if mibBuilder.loadTexts: lastCalloutPageReason.setDescription('the reason string for the last callout or page, or NONE if never used')
rtsShowAnswer = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 14, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtsShowAnswer.setStatus('deprecated')
if mibBuilder.loadTexts: rtsShowAnswer.setDescription('0-no 1-yes, show answer string on connection (deprecated)')
rtsNeedPassword = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 14, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rtsNeedPassword.setStatus('deprecated')
if mibBuilder.loadTexts: rtsNeedPassword.setDescription('0-no 1-yes, need password on RTS connection (deprecated)')
rtsWaitXon = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 14, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtsWaitXon.setStatus('deprecated')
if mibBuilder.loadTexts: rtsWaitXon.setDescription('0-no 1-yes, wait for Xon after connection before sending data (deprecated)')
rtsIdleTimeout = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 14, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtsIdleTimeout.setStatus('deprecated')
if mibBuilder.loadTexts: rtsIdleTimeout.setDescription('0-255, 0-none, 1-255 #idle minutes no data = shutdown socket (deprecated)')
rtsEmptyClose = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 14, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtsEmptyClose.setStatus('deprecated')
if mibBuilder.loadTexts: rtsEmptyClose.setDescription('0->no, 1-> yes, when file empty close socket (polling, not rt data) (deprecated)')
rtsTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6), )
if mibBuilder.loadTexts: rtsTable.setStatus('mandatory')
if mibBuilder.loadTexts: rtsTable.setDescription('real time socket table')
rtsTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6, 1), ).setIndexNames((0, "DATALINK-MIB", "rtsTableIndex"))
if mibBuilder.loadTexts: rtsTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: rtsTableEntry.setDescription('rts table entry')
rtsTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rtsTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: rtsTableIndex.setDescription('rts table entry index')
rtsNoStore = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtsNoStore.setStatus('mandatory')
if mibBuilder.loadTexts: rtsNoStore.setDescription("0-allow storage, 1-don't store data when RTS socket not connected")
rtsDenied = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtsDenied.setStatus('mandatory')
if mibBuilder.loadTexts: rtsDenied.setDescription("0-don't allow, 1=yes allow this rts socket to connect")
rtsSocketState = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rtsSocketState.setStatus('mandatory')
if mibBuilder.loadTexts: rtsSocketState.setDescription('0-closed, 1-wait for pass, 2-wait for xon, 3=open for data')
rtsPortShowAnswer = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtsPortShowAnswer.setStatus('mandatory')
if mibBuilder.loadTexts: rtsPortShowAnswer.setDescription('0-no 1-yes, show answer string on connection')
rtsPortNeedPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rtsPortNeedPassword.setStatus('mandatory')
if mibBuilder.loadTexts: rtsPortNeedPassword.setDescription('0-no 1-yes, need password on RTS connection')
rtsPortWaitXon = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtsPortWaitXon.setStatus('mandatory')
if mibBuilder.loadTexts: rtsPortWaitXon.setDescription('0-no 1-yes, wait for Xon after connection before sending data')
rtsPortIdleTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtsPortIdleTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: rtsPortIdleTimeout.setDescription('0-255, 0-none, 1-255 #idle minutes no data = shutdown socket')
rtsPortEmptyClose = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 14, 6, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtsPortEmptyClose.setStatus('mandatory')
if mibBuilder.loadTexts: rtsPortEmptyClose.setDescription('0->no, 1-> yes, when file empty close socket (polling, not rt data)')
iprestrictTable = MibTable((1, 3, 6, 1, 4, 1, 3052, 1, 15, 1), )
if mibBuilder.loadTexts: iprestrictTable.setStatus('mandatory')
if mibBuilder.loadTexts: iprestrictTable.setDescription('ip restrictions table')
iprestrictTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3052, 1, 15, 1, 1), ).setIndexNames((0, "DATALINK-MIB", "iprestrictTableIndex"))
if mibBuilder.loadTexts: iprestrictTableEntry.setStatus('mandatory')
if mibBuilder.loadTexts: iprestrictTableEntry.setDescription('ip restriction table entry')
iprestrictTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 15, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: iprestrictTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: iprestrictTableIndex.setDescription('ip restrict table entry index')
iprestrictIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 3052, 1, 15, 1, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: iprestrictIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: iprestrictIpAddress.setDescription('an ip address which forces a restriction or allowance for an ip range')
suspendIPRestrictions = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 15, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: suspendIPRestrictions.setStatus('mandatory')
if mibBuilder.loadTexts: suspendIPRestrictions.setDescription('read returns 0, writing requires the private community name. default is 0 set to 1 to suspend IP restrictions while loading the list set back to 0 to allow the restrictions to be used.')
killIPRestrictions = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 15, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: killIPRestrictions.setStatus('mandatory')
if mibBuilder.loadTexts: killIPRestrictions.setDescription('read returns 0, writing requires the private community name. any set removes all entries from the IP restrcition list.')
addIPRestrictions = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 15, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: addIPRestrictions.setStatus('mandatory')
if mibBuilder.loadTexts: addIPRestrictions.setDescription('read returns 0, writing requires the private community name. any set adds an entry to the IP restriction list note that list is no re-sorted, so must add in order')
ipCurrentStatic = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 16, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCurrentStatic.setStatus('mandatory')
if mibBuilder.loadTexts: ipCurrentStatic.setDescription('1=static, 0=dynamic')
ipCurrentAddress = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 16, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCurrentAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ipCurrentAddress.setDescription('current IP address')
ipCurrentSubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 16, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCurrentSubnetMask.setStatus('mandatory')
if mibBuilder.loadTexts: ipCurrentSubnetMask.setDescription('current subnet mask')
ipCurrentDefaultRouter = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 16, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipCurrentDefaultRouter.setStatus('mandatory')
if mibBuilder.loadTexts: ipCurrentDefaultRouter.setDescription('current default router')
ipNewStatic = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 16, 2, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipNewStatic.setStatus('mandatory')
if mibBuilder.loadTexts: ipNewStatic.setDescription('1=static, 0=dynamic. write requires private community name.')
ipNewAddress = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 16, 2, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipNewAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ipNewAddress.setDescription('read=current new address, write requires private community name.')
ipNewSubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 16, 2, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipNewSubnetMask.setStatus('mandatory')
if mibBuilder.loadTexts: ipNewSubnetMask.setDescription('read=current new subnet mask, write requires private community name.')
ipNewDefaultRouter = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 16, 2, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipNewDefaultRouter.setStatus('mandatory')
if mibBuilder.loadTexts: ipNewDefaultRouter.setDescription('read=current new default router, write requires private community name.')
ipNewSetup = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 16, 2, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipNewSetup.setStatus('mandatory')
if mibBuilder.loadTexts: ipNewSetup.setDescription('read=0. write requires private community name. any write causes the current object values for ipNewStatic, ipNewAddress, ipNewSubnetMask and ipNewDefaultRouter to be used. Causes the unit to re-initialize its network stacks with these new values. Changes to ipNewStatic, ipNewAddress, ipNewSubnetMask and ipNewDefaultRouter do not affect the network stack until ipNewSetup is written with some value:')
pppIDString = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 17, 1, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pppIDString.setStatus('mandatory')
if mibBuilder.loadTexts: pppIDString.setDescription('sent in ppp up trap to provide host identification string')
pppIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 17, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pppIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: pppIPAddress.setDescription('sent in ppp up trap to provide host identification by IP address')
ccodeLoaded = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 18, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ccodeLoaded.setStatus('mandatory')
if mibBuilder.loadTexts: ccodeLoaded.setDescription('0/1, no/yes, is ccode loaded')
ccodeRunning = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 18, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ccodeRunning.setStatus('mandatory')
if mibBuilder.loadTexts: ccodeRunning.setDescription('0/1, no/yes, is ccode running')
ccodeStackMainWas = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 18, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ccodeStackMainWas.setStatus('mandatory')
if mibBuilder.loadTexts: ccodeStackMainWas.setDescription('# of bytes of stack used by main app, last time run')
ccodeStackMainNow = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 18, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ccodeStackMainNow.setStatus('mandatory')
if mibBuilder.loadTexts: ccodeStackMainNow.setDescription('# of bytes of stack used by main app, this time run')
ccodeStackT2Was = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 18, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ccodeStackT2Was.setStatus('mandatory')
if mibBuilder.loadTexts: ccodeStackT2Was.setDescription('# of bytes of stack used by 2nd task of app, last time run')
ccodeStackT2Was2 = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 18, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ccodeStackT2Was2.setStatus('mandatory')
if mibBuilder.loadTexts: ccodeStackT2Was2.setDescription('# of bytes of stack used by 2nd task of app, this time run')
techsupportInt1 = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 99, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: techsupportInt1.setStatus('mandatory')
if mibBuilder.loadTexts: techsupportInt1.setDescription('a debugging integer for technical support use only. Do not use')
techsupportInt2 = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 99, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: techsupportInt2.setStatus('mandatory')
if mibBuilder.loadTexts: techsupportInt2.setDescription('a debugging integer for technical support use only. Do not use')
techsupportInt3 = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 99, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: techsupportInt3.setStatus('mandatory')
if mibBuilder.loadTexts: techsupportInt3.setDescription('a debugging integer for technical support use only. Do not use')
techsupportInt4 = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 99, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: techsupportInt4.setStatus('mandatory')
if mibBuilder.loadTexts: techsupportInt4.setDescription('a debugging integer for technical support use only. Do not use')
techsupportInt5 = MibScalar((1, 3, 6, 1, 4, 1, 3052, 1, 99, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: techsupportInt5.setStatus('mandatory')
if mibBuilder.loadTexts: techsupportInt5.setDescription('a debugging integer for technical support use only. Do not use')
datalinkDbasePfullTrap = NotificationType((1, 3, 6, 1, 4, 1, 3052) + (0,501)).setObjects(("DATALINK-MIB", "databaseAlarmIndex"), ("DATALINK-MIB", "databasePfull"))
if mibBuilder.loadTexts: datalinkDbasePfullTrap.setDescription('The datalinkDbasePfullTrap is issued when the database reaches a pre-determined threshold level, which causes a trap.')
datalinkFilePfullTrap = NotificationType((1, 3, 6, 1, 4, 1, 3052) + (0,502)).setObjects(("DATALINK-MIB", "fileAlarmFileIndex"), ("DATALINK-MIB", "fileAlarmThresholdIndex"), ("DATALINK-MIB", "filePercentNow"))
if mibBuilder.loadTexts: datalinkFilePfullTrap.setDescription('The datalinkFilePfullTrap is issued when one of the data files reaches a pre-determined threshold level, which causes a trap.')
datalinkDataAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 3052) + (0,503)).setObjects(("DATALINK-MIB", "dataAlarmIndex"), ("DATALINK-MIB", "dataAlarmName"), ("DATALINK-MIB", "dataAlarmString"), ("DATALINK-MIB", "dataAlarmPort"))
if mibBuilder.loadTexts: datalinkDataAlarmTrap.setDescription('The datalinkDataAlarmTrap is issued when one of the data alarms reaches a pre-determined threshold level, which causes a trap.')
datalinkSensorAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 3052) + (0,504)).setObjects(("DATALINK-MIB", "sensorAlarmIndex"), ("DATALINK-MIB", "sensorAlarmName"), ("DATALINK-MIB", "sensorAlarmState"))
if mibBuilder.loadTexts: datalinkSensorAlarmTrap.setDescription('The datalinkSensorAlarmTrap is issued when one of the External Sensors is triggered for a pre-determined threshold amount of time, which causes a trap.')
datalinkNoDataAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 3052) + (0,505)).setObjects(("DATALINK-MIB", "nodataTablePortIndex"), ("DATALINK-MIB", "nodataTableScheduleIndex"), ("DATALINK-MIB", "nodataTableLevelIndex"), ("DATALINK-MIB", "nodataAlarmStatusCounter"), ("DATALINK-MIB", "nodataTableThreshold"))
if mibBuilder.loadTexts: datalinkNoDataAlarmTrap.setDescription('The datalinkNoDataAlarmTrap is issued when one of the ports receives no input data for a pre-determined threshold amount of time, which causes a trap.')
datalinkSchedTrap = NotificationType((1, 3, 6, 1, 4, 1, 3052) + (0,506)).setObjects(("DATALINK-MIB", "scheduleIndex"))
if mibBuilder.loadTexts: datalinkSchedTrap.setDescription('The datalinkSchedTrap is issued when a scheduled event causes a trap.')
datalinkImmediateTrap = NotificationType((1, 3, 6, 1, 4, 1, 3052) + (0,507))
if mibBuilder.loadTexts: datalinkImmediateTrap.setDescription('The datalinkImmediateTrap is issued when the dotrap command is used to issue a test trap to all snmp managers')
datalinkPPPupTrap = NotificationType((1, 3, 6, 1, 4, 1, 3052) + (0,509)).setObjects(("DATALINK-MIB", "pppIDString"), ("DATALINK-MIB", "pppIPAddress"))
if mibBuilder.loadTexts: datalinkPPPupTrap.setDescription('The datalinkPPPupTrap is issued when the PPP interface is brought up and the ppp connection has been established')
mibBuilder.exportSymbols("DATALINK-MIB", cbbRetransmits=cbbRetransmits, rtsWaitXon=rtsWaitXon, sensorAlarmTable=sensorAlarmTable, actionHistory=actionHistory, scheduleActive=scheduleActive, alarmfilterEnabled=alarmfilterEnabled, rtsNoStore=rtsNoStore, pagerTableIndex=pagerTableIndex, modemTapSetup=modemTapSetup, actionTable=actionTable, ccodeStackMainWas=ccodeStackMainWas, modemExtSetup=modemExtSetup, ftpsetup=ftpsetup, rtsTable=rtsTable, dleMode=dleMode, netcard=netcard, releaseCompressed=releaseCompressed, nodataAlarmStatus=nodataAlarmStatus, modemTimeBetweenOutbound=modemTimeBetweenOutbound, calloutRepeatDelay=calloutRepeatDelay, productname=productname, calloutSendReason=calloutSendReason, rtsSocketState=rtsSocketState, ftpPushEnabled=ftpPushEnabled, pagerHangupDelay=pagerHangupDelay, systemversion=systemversion, ftpPushCount=ftpPushCount, portParity=portParity, calloutMessage=calloutMessage, dataAlarmString=dataAlarmString, fileRecordsAvailable=fileRecordsAvailable, dataAlarmName=dataAlarmName, databaseAlarmCalloutActions=databaseAlarmCalloutActions, calloutTableIndex=calloutTableIndex, techsupport=techsupport, fileTableIndex=fileTableIndex, databaseSize=databaseSize, actionCount=actionCount, rtsNeedPassword=rtsNeedPassword, scheduleAlarmTable=scheduleAlarmTable, actionTableIndex=actionTableIndex, calloutSendId=calloutSendId, modems=modems, dataAlarmAcked=dataAlarmAcked, modemAutoexecString=modemAutoexecString, datalinkPPPupTrap=datalinkPPPupTrap, ftpAutoDelete=ftpAutoDelete, nodataNumberHolidays=nodataNumberHolidays, historyTypeID=historyTypeID, filesetup=filesetup, sitebyport=sitebyport, pagerMaxAttempts=pagerMaxAttempts, historyReasonLevel=historyReasonLevel, techsupportInt3=techsupportInt3, autodeleteEnable=autodeleteEnable, binRecordBlocking=binRecordBlocking, charmask=charmask, autoDstAdjust=autoDstAdjust, databaseAlarmThreshold=databaseAlarmThreshold, factorysetup=factorysetup, bypassEndchar=bypassEndchar, modemUserSetup=modemUserSetup, datalinkFilePfullTrap=datalinkFilePfullTrap, ftpPasswords=ftpPasswords, commandPassword=commandPassword, pagerPhonenumber=pagerPhonenumber, nodataTableLevelIndex=nodataTableLevelIndex, ftpPush=ftpPush, nodataAlarmStatusCounter=nodataAlarmStatusCounter, sensorAlarmEntry=sensorAlarmEntry, datalinkSiteId=datalinkSiteId, memorysize=memorysize, scheduleAcked=scheduleAcked, snmpManagerTable=snmpManagerTable, nodataTableSerialActions=nodataTableSerialActions, portSetupEntry=portSetupEntry, ftpPushUser=ftpPushUser, inlineHskMode=inlineHskMode, ipNewDefaultRouter=ipNewDefaultRouter, fileAlarmBeeperActions=fileAlarmBeeperActions, nodataHolidayClear=nodataHolidayClear, modemExtSetupTime=modemExtSetupTime, fileName=fileName, nodataTablePortIndex=nodataTablePortIndex, snmpSendTestTrap=snmpSendTestTrap, pagerID=pagerID, dataAlarmPort=dataAlarmPort, nodataTableThreshold=nodataTableThreshold, duplex=duplex, nodataAlarmHolidays=nodataAlarmHolidays, pagerAttemptDelay=pagerAttemptDelay, sensorAlarmActive=sensorAlarmActive, databasemode=databasemode, portWord=portWord, tcpPasswords=tcpPasswords, nodataTableEntry=nodataTableEntry, modemsetupstring=modemsetupstring, rtsIdleTimeout=rtsIdleTimeout, filePercentNow=filePercentNow, rtsTableEntry=rtsTableEntry, sensorAlarmMode=sensorAlarmMode, scheduleCalloutActions=scheduleCalloutActions, datafilterEnabled=datafilterEnabled, maxRecordChars=maxRecordChars, dataAlarmTable=dataAlarmTable, ftpPushIPAddress=ftpPushIPAddress, actionAcked=actionAcked, nodataAlarms=nodataAlarms, modemcddelay=modemcddelay, ftpPushAlarms=ftpPushAlarms, sensorAlarmState=sensorAlarmState, fileAlarmFileIndex=fileAlarmFileIndex, inlineHsk4=inlineHsk4, historyClearLog=historyClearLog, scheduleIndex=scheduleIndex, unitIds=unitIds, snmpMgrIndex=snmpMgrIndex, historyType=historyType, dataAlarmBeeperActions=dataAlarmBeeperActions, operatingMode=operatingMode, realtimesocket=realtimesocket, actionsPagerTable=actionsPagerTable, calloutRepeat=calloutRepeat, snmpTrapsAutoRepeatTime=snmpTrapsAutoRepeatTime, fileAlarmSerialActions=fileAlarmSerialActions, actionsBuzzer=actionsBuzzer, datalinkThisProduct=datalinkThisProduct, fileRecords=fileRecords, nodataTableCalloutActions=nodataTableCalloutActions, fileType=fileType, pagerType=pagerType, pagerMessage=pagerMessage, killIPRestrictions=killIPRestrictions, dataAlarmClearTime=dataAlarmClearTime, sensorAlarmSerialActions=sensorAlarmSerialActions, ipCurrentAddress=ipCurrentAddress, rtsShowAnswer=rtsShowAnswer, schedulePagerActions=schedulePagerActions, snmpManagerName=snmpManagerName, entireDatabase=entireDatabase, modemSettings=modemSettings, fileAlarmThreshold=fileAlarmThreshold, sensorAlarmCalloutActions=sensorAlarmCalloutActions, scheduleAlarmEntry=scheduleAlarmEntry, ipNew=ipNew, fileTableEntry=fileTableEntry, passwordTableEntry=passwordTableEntry, modemreport=modemreport, portDateTimeStampMode=portDateTimeStampMode, sensorAlarmTrapActions=sensorAlarmTrapActions, alarmhistory=alarmhistory, passwordCommand=passwordCommand, historyReason=historyReason, dataAlarmIndex=dataAlarmIndex, serialTableIndex=serialTableIndex, ftpPushAcct=ftpPushAcct, actionType=actionType, fileAlarmEntry=fileAlarmEntry, ftpPushStatusMode=ftpPushStatusMode, commandPasswordTimeout=commandPasswordTimeout, dataRelease=dataRelease, databaseAlarmFileMaxSize=databaseAlarmFileMaxSize, ftpPushTiming=ftpPushTiming, modemtype=modemtype, portSetupTable=portSetupTable, time=time, ipCurrent=ipCurrent, pppIDString=pppIDString, techsupportInt2=techsupportInt2, fileAlarmPagerActions=fileAlarmPagerActions, currenttime=currenttime, siteindex=siteindex, inlineSource=inlineSource, rtsTableIndex=rtsTableIndex, modemAutoexecEnabled=modemAutoexecEnabled, dataAlarmEntry=dataAlarmEntry, historyTimeStamp=historyTimeStamp, iprestrictTableEntry=iprestrictTableEntry, actionQueue=actionQueue, serialnumber=serialnumber, rtsPortNeedPassword=rtsPortNeedPassword, actionsTrapsEntSpecCount=actionsTrapsEntSpecCount, scheduleTrapActions=scheduleTrapActions, ftpPushDir=ftpPushDir, ccodeRunning=ccodeRunning, fileAlarms=fileAlarms, snmpTableEntry=snmpTableEntry, dataAlarmActive=dataAlarmActive, portWrapMode=portWrapMode, productIds=productIds, snmpTrapsEnabled=snmpTrapsEnabled, asentria=asentria, actionAttempts=actionAttempts, sysadminTcpipTimeout=sysadminTcpipTimeout, dataAlarmSerialActions=dataAlarmSerialActions, passwordIndex=passwordIndex, fileAlarmThresholdIndex=fileAlarmThresholdIndex, sensorAlarmBeeperActions=sensorAlarmBeeperActions, otherControls=otherControls, nodataAlarmStatusIndex=nodataAlarmStatusIndex, datalink=datalink, historyTableIndex=historyTableIndex, portPtStripOutputLfs=portPtStripOutputLfs, dataAlarmCalloutActions=dataAlarmCalloutActions, dataAlarmClearMode=dataAlarmClearMode, serialTableMessage=serialTableMessage, portlowDTR=portlowDTR, calloutCommandWait=calloutCommandWait, appversion=appversion, actionRepeatTime=actionRepeatTime, databaseAlarmTable=databaseAlarmTable, response=response, rtsDenied=rtsDenied, commandNeedsPassword=commandNeedsPassword, modemInactivityTimer=modemInactivityTimer, nodataTableTrapActions=nodataTableTrapActions, ftppushEnable=ftppushEnable, iprestrictTableIndex=iprestrictTableIndex, actionsTraps=actionsTraps, actionsSerialTableEntry=actionsSerialTableEntry, datalinkNoDataAlarmTrap=datalinkNoDataAlarmTrap, modemParity=modemParity, opSettings=opSettings, actionsCalloutTable=actionsCalloutTable, databaseAlarmEntry=databaseAlarmEntry, portIndex=portIndex, nodataTableBeeperActions=nodataTableBeeperActions, actionReasonID=actionReasonID, portStoreFile=portStoreFile, passwords=passwords, databaseAlarmBeeperActions=databaseAlarmBeeperActions, ftpPushTimer=ftpPushTimer, calloutMaxConnecttime=calloutMaxConnecttime, calloutAttemptDelay=calloutAttemptDelay, fileRecordsDeleted=fileRecordsDeleted, databaseAlarmActive=databaseAlarmActive, waitMode=waitMode, actions=actions, snmpManagerIp=snmpManagerIp, snmpsetup=snmpsetup, actionTypeID=actionTypeID, passwordAccess=passwordAccess, datalinkImmediateTrap=datalinkImmediateTrap, addIPRestrictions=addIPRestrictions, sensorAlarmThreshold=sensorAlarmThreshold, databaseStatus=databaseStatus, nodataHolidayItem=nodataHolidayItem, dataAlarmPagerActions=dataAlarmPagerActions, actionNextAttempt=actionNextAttempt, inlineHsk2=inlineHsk2, nodataAlarmStatusAcked=nodataAlarmStatusAcked, ccode=ccode, ipNewStatic=ipNewStatic, datalinkDataAlarmTrap=datalinkDataAlarmTrap)
mibBuilder.exportSymbols("DATALINK-MIB", ipNewSetup=ipNewSetup, databaseAlarmFileStore=databaseAlarmFileStore, pppsetup=pppsetup, fileTable=fileTable, tagMode=tagMode, databaseRecordsAvailable=databaseRecordsAvailable, ipCurrentStatic=ipCurrentStatic, portPTTime=portPTTime, recordCollectionTimeout=recordCollectionTimeout, sensorAlarmPagerActions=sensorAlarmPagerActions, datalinkSensorAlarmTrap=datalinkSensorAlarmTrap, nodataTableSchedule=nodataTableSchedule, activeDatabase=activeDatabase, pagerSendReason=pagerSendReason, databases=databases, rtsPortEmptyClose=rtsPortEmptyClose, ipsetup=ipsetup, nodataAlarmStatusEntry=nodataAlarmStatusEntry, sensorAlarmAcked=sensorAlarmAcked, rtsPortShowAnswer=rtsPortShowAnswer, nodataTableScheduleIndex=nodataTableScheduleIndex, datalinkDbasePfullTrap=datalinkDbasePfullTrap, scheduleSerialActions=scheduleSerialActions, iprestrictIpAddress=iprestrictIpAddress, numberPorts=numberPorts, ftpDataMode=ftpDataMode, ipNewSubnetMask=ipNewSubnetMask, sureEnabled=sureEnabled, fileAlarmCalloutActions=fileAlarmCalloutActions, rtsPortWaitXon=rtsPortWaitXon, calloutAttempts=calloutAttempts, fileAlarmActive=fileAlarmActive, ftppushIndex=ftppushIndex, actionTimeStamp=actionTimeStamp, modemAnswerString=modemAnswerString, scheduleTime=scheduleTime, siteID=siteID, inlineMode=inlineMode, numberports=numberports, historyReasonID=historyReasonID, databaseFiles=databaseFiles, pagerRepeatDelay=pagerRepeatDelay, ccodeLoaded=ccodeLoaded, actionReason=actionReason, actionsBuzzerState=actionsBuzzerState, passwordTable=passwordTable, alarms=alarms, releaseMode=releaseMode, techsupportInt4=techsupportInt4, pagerSendId=pagerSendId, rtsEmptyClose=rtsEmptyClose, nodataTable=nodataTable, charmaskEnabled=charmaskEnabled, nodataHolidayTableEntry=nodataHolidayTableEntry, suspendIPRestrictions=suspendIPRestrictions, lastCalloutPageReason=lastCalloutPageReason, actionsSerialTable=actionsSerialTable, databaseAlarmTrapActions=databaseAlarmTrapActions, portPTMode=portPTMode, ccodeStackMainNow=ccodeStackMainNow, portStopbits=portStopbits, controls=controls, databasePfull=databasePfull, nodataTableActive=nodataTableActive, databaseRecordsDeleted=databaseRecordsDeleted, ipNewAddress=ipNewAddress, historyCount=historyCount, historyTableEntry=historyTableEntry, serialPorts=serialPorts, fileAlarmTrapActions=fileAlarmTrapActions, nodataHolidayIndex=nodataHolidayIndex, datalinkSchedTrap=datalinkSchedTrap, nodataTablePagerActions=nodataTablePagerActions, techsupportInt1=techsupportInt1, nodataHolidayTable=nodataHolidayTable, rtsPortIdleTimeout=rtsPortIdleTimeout, cbbTimeout=cbbTimeout, actionsTrapsEntSpecific=actionsTrapsEntSpecific, nodataHolidayDelete=nodataHolidayDelete, pagerRepeat=pagerRepeat, ccodeStackT2Was2=ccodeStackT2Was2, ftppushTableEntry=ftppushTableEntry, ipCurrentSubnetMask=ipCurrentSubnetMask, crcMode=crcMode, pagerAttempts=pagerAttempts, routerAutoPing=routerAutoPing, historyEntryType=historyEntryType, fileSize=fileSize, actionsCalloutTableEntry=actionsCalloutTableEntry, inlineHsk6=inlineHsk6, portPtStripInputLfs=portPtStripInputLfs, ftpPushPass=ftpPushPass, dataAlarmThreshold=dataAlarmThreshold, databaseAlarmSerialActions=databaseAlarmSerialActions, databaseAlarmIndex=databaseAlarmIndex, modemportspeed=modemportspeed, hardware=hardware, iprestrictions=iprestrictions, actionTableEntry=actionTableEntry, auxportMode=auxportMode, dataAlarmTrapActions=dataAlarmTrapActions, calloutPhonenumber=calloutPhonenumber, actionReasonLevel=actionReasonLevel, idByPortTable=idByPortTable, dataAlarmCounter=dataAlarmCounter, portDataStore=portDataStore, portHskMode=portHskMode, databaseAlarmPagerActions=databaseAlarmPagerActions, modemPasswords=modemPasswords, techsupportInt5=techsupportInt5, actionsPagerTableEntry=actionsPagerTableEntry, linefeeds=linefeeds, portBaud=portBaud, ftpPushServerName=ftpPushServerName, ipCurrentDefaultRouter=ipCurrentDefaultRouter, iprestrictTable=iprestrictTable, dateofmanufacture=dateofmanufacture, calloutMaxAttempts=calloutMaxAttempts, commandTcpipTimeout=commandTcpipTimeout, pppIdentification=pppIdentification, portBinaryMode=portBinaryMode, promptPasswords=promptPasswords, ftppushTable=ftppushTable, historyTable=historyTable, sensorAlarmIndex=sensorAlarmIndex, pagerDialDelay=pagerDialDelay, sensorAlarmName=sensorAlarmName, sensorAlarmCounter=sensorAlarmCounter, pppIPAddress=pppIPAddress, networkenabled=networkenabled, ccodeStackT2Was=ccodeStackT2Was, productConfig=productConfig, nodataHolidayAdd=nodataHolidayAdd, dataAlarmAutoClear=dataAlarmAutoClear, scheduleBeeperActions=scheduleBeeperActions)
| [
2,
198,
2,
9485,
15571,
7378,
337,
9865,
8265,
360,
1404,
1847,
17248,
12,
8895,
33,
357,
4023,
1378,
16184,
76,
489,
8937,
13,
785,
14,
79,
893,
11632,
8,
198,
2,
7054,
45,
13,
16,
2723,
2393,
1378,
14,
14490,
14,
67,
615,
4756... | 2.840418 | 39,873 |
import logging
import time
import RPi.GPIO as GPIO
| [
11748,
18931,
198,
11748,
640,
198,
198,
11748,
25812,
72,
13,
16960,
9399,
355,
50143,
628
] | 3.3125 | 16 |
import greens.functions as gf
import pdb
import numpy as np
main() | [
11748,
30966,
13,
12543,
2733,
355,
308,
69,
198,
11748,
279,
9945,
198,
11748,
299,
32152,
355,
45941,
198,
198,
12417,
3419
] | 3.045455 | 22 |
import rhinoscriptsyntax as rs
from os import popen
def NewScriptAtom():
"""Create a new Python script in Atom.app for Rhino."""
file_name = rs.SaveFileName ("Save", "Text Files (*.py)|*.py|All Files (*.*)|*.*||")
if not file_name:return
py_template = """# Built-in
# Other Libs
# Rhinoceros
import rhinoscriptsyntax as rs
# import scriptcontext as sc
# import Rhino.DocObjects as rd
# import Rhino.Geometry as rg
# import Rhino as RH
# Grasshopper
# import Grasshopper as GH
# import ghpythonlib.components as ghcomp
# import ghpythonlib.treehelpers as th
def new_script():
pass
if __name__ == "__main__":
new_script()"""
with open(file_name, "w+") as f:
print f.writelines(py_template)
atom_app = r"/Applications/Atom.app/Contents/MacOS/Atom"
popen("{} {}".format(atom_app, file_name))
rs.Command("_StartAtomEditorListener")
if __name__ == "__main__":
NewScriptAtom()
| [
11748,
9529,
11996,
6519,
1837,
41641,
355,
44608,
198,
198,
6738,
28686,
1330,
1461,
268,
628,
198,
4299,
968,
7391,
2953,
296,
33529,
198,
220,
220,
220,
37227,
16447,
257,
649,
11361,
4226,
287,
33102,
13,
1324,
329,
47759,
526,
1593... | 2.695402 | 348 |
from celery import Celery
from celery.schedules import crontab
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
# Array of all URLs for partner api endpoints
urls = ["https://f21c7154-fa77-4f81-89f1-2f254714f45c.mock.pstmn.io/api"]
app = Celery("tasks")
app.conf.broker_url = "redis://localhost:6379/0"
app.conf.result_backend = "redis://localhost:6379/0"
app.conf.beat_schedule = {
"refresh": {
"task": "refresh",
"schedule": crontab(hour="*/12"),
"args": ([urls]),
}
}
app.conf.result_backend_transport_options = {"retry_policy": {"timeout": 5.0}}
| [
6738,
18725,
1924,
1330,
15248,
1924,
198,
6738,
18725,
1924,
13,
1416,
704,
5028,
1330,
1067,
756,
397,
198,
6738,
18725,
1924,
13,
26791,
13,
6404,
1330,
651,
62,
35943,
62,
6404,
1362,
628,
198,
6404,
1362,
796,
651,
62,
35943,
62,... | 2.334586 | 266 |
"""
Plugin manager module that provides functionality to add, modify and delete plugins to the
plugins django app.
"""
import os
import sys
import json
from argparse import ArgumentParser
if "DJANGO_SETTINGS_MODULE" not in os.environ:
# django needs to be loaded (eg. when this script is run from the command line)
sys.path.append(os.path.join(os.path.dirname(__file__), '../../'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
import django
django.setup()
from django.core.files.base import ContentFile
from django.contrib.auth.models import User
from plugins.models import Plugin
from plugins.serializers import PluginSerializer
# ENTRYPOINT
if __name__ == "__main__":
manager = PluginManager()
manager.run()
| [
37811,
198,
37233,
4706,
8265,
326,
3769,
11244,
284,
751,
11,
13096,
290,
12233,
20652,
284,
262,
198,
37390,
42625,
14208,
598,
13,
198,
37811,
198,
198,
11748,
28686,
198,
11748,
25064,
198,
11748,
33918,
198,
6738,
1822,
29572,
1330,
... | 3.129555 | 247 |
from .LoopProjectFile import CreateBasic, Get, Set, OpenProjectFile, CheckFileValid, \
faultEventType, foldEventType, discontinuityEventType, foliationEventType, \
faultObservationType, foldObservationType, foliationObservationType, discontinuityObservationType, \
stratigraphicLayerType, stratigraphicObservationType, contactObservationType, \
eventRelationshipType, ConvertDataFrame
from .Permutations import Event, perm, ApproxPerm, CalcPermutation, checkBrokenRules, checkBrokenEventRules
from .LoopProjectFileUtils import ToCsv, FromCsv, ElementToCsv, ElementFromCsv
from .Version import LoopVersion
| [
6738,
764,
39516,
16775,
8979,
1330,
13610,
26416,
11,
3497,
11,
5345,
11,
4946,
16775,
8979,
11,
6822,
8979,
47139,
11,
3467,
198,
220,
220,
220,
220,
220,
220,
220,
8046,
9237,
6030,
11,
5591,
9237,
6030,
11,
19936,
14834,
9237,
603... | 3.37037 | 189 |
salario = float(input("\033[1;32mInforme o salario R$: \033[m"))
if salario > 1250:
aumento = 10
else:
aumento = 15
salario_novo = salario + salario*aumento/100
print("\033[1;31mSalario antigo\033[m \033[1;32mR$: {0:.2f} reais\033[m\033[1;31m.\033[m".format(salario))
print("\033[1;31mSalario com\033[m \033[1;30m{0}%\033[m \033[1;31mde aumento.\033[m \033[1;32mR$ {1:.2f} reais\033["
"m\033[1;31m.\033[m".format(aumento, salario_novo))
| [
21680,
4982,
796,
12178,
7,
15414,
7203,
59,
44427,
58,
16,
26,
2624,
76,
818,
687,
68,
267,
3664,
4982,
371,
3,
25,
3467,
44427,
58,
76,
48774,
198,
198,
361,
3664,
4982,
1875,
1105,
1120,
25,
198,
220,
220,
220,
257,
1713,
78,
... | 1.891667 | 240 |
# azureml-core of version 1.0.72 or higher is required
# azureml-dataprep[pandas] of version 1.1.34 or higher is required
from azureml.core import Workspace, Dataset
# upload the local file to a datastore on the cloud
subscription_id = 'b3ec17a5-8d95-4801-9a7e-9ee6a85637c7'
resource_group = 'D_RG_Data_Sandbox'
workspace_name = 'dev_mls'
workspace = Workspace(subscription_id, resource_group, workspace_name)
print(workspace)
# get the datastore to upload prepared data
datastore = workspace.get_default_datastore()
local_path = "C:\\Users\\rdholakia\\Documents\\Project\\MLOpsVector\\MlOps\\data"
# upload the local file from src_dir to the target_path in datastore
datastore.upload(src_dir=local_path, target_path='vector_data')
dataset = Dataset.Tabular.from_delimited_files(path =[(datastore, ('vector_data/IndexationValuesSQL.csv'))])
dataset = dataset.register(workspace = workspace,
name = 'vector_indexation_values_sql')
# List all datastores registered in the current workspace
datastores = workspace.datastores
for name, datastore in datastores.items():
print(name, datastore.datastore_type)
vector_ds = vector_ds.register(workspace=workspace,
name='vector_ds',
description='vector training data')
from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient
connect_str = <connectionstring>
blob_service_client = BlobServiceClient.from_connection_string(connect_str)
container_name="dummy"
container_client=blob_service_client.get_container_client(container_name)
blob_list = container_client.list_blobs(name_starts_with="dir1/")
for blob in blob_list:
print("\t" + blob.name) | [
198,
2,
35560,
495,
4029,
12,
7295,
286,
2196,
352,
13,
15,
13,
4761,
393,
2440,
318,
2672,
198,
2,
35560,
495,
4029,
12,
19608,
499,
7856,
58,
79,
392,
292,
60,
286,
2196,
352,
13,
16,
13,
2682,
393,
2440,
318,
2672,
198,
198,
... | 2.644377 | 658 |
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect
from django.shortcuts import redirect, render, get_object_or_404, resolve_url
from django.template.loader import render_to_string
from account.functions import user_can_edit_check
from common.functions import get_success_message, camelcase_to_underscore
from party.models import Party
from relation.forms import ExperienceEditForm, ReceivedFundingEditForm, InviteTestifyEditForm
from relation.functions import experience_render_reference, received_funding_render_reference
from relation.models import UserExperienceOrganization, PartyReceivedFundingParty, PartyInviteTestifyParty, \
PartyReceivedInvestingParty
@login_required
@login_required
@login_required
@login_required
@login_required
| [
6738,
42625,
14208,
13,
3642,
822,
1330,
6218,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
12501,
273,
2024,
1330,
17594,
62,
35827,
198,
6738,
42625,
14208,
13,
4023,
1330,
367,
29281,
31077,
7738,
1060,
198,
6738,
42625,
14... | 3.76 | 225 |
#!/usr/bin/env python3
"""
# loadguard.project.datasets
This file is a part of LoadGuard Runner.
(c) 2021, Deepnox SAS.
This module provides utilities to manage datasets.
"""
import csv
import os
CURRENT_PATH = os.path.dirname(__file__)
LG_HOME = os.path.abspath(os.path.join(CURRENT_PATH, '..', '..'))
LG_RESOURCES_DATASET_DIR = os.path.join(LG_HOME, 'resources', 'datasets')
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
37811,
198,
2,
3440,
14864,
13,
16302,
13,
19608,
292,
1039,
198,
198,
1212,
2393,
318,
257,
636,
286,
8778,
24502,
21529,
13,
198,
198,
7,
66,
8,
33448,
11,
10766,
35420,
35516,... | 2.594595 | 148 |
# Copyright 2021 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from geometry_msgs.msg import TransformStamped
import rclpy
from rclpy.node import Node
from tf2_ros import TransformBroadcaster
import tf_transformations
from turtlesim.msg import Pose
| [
2,
15069,
33448,
4946,
8090,
47061,
5693,
11,
3457,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
... | 3.799043 | 209 |
import sys
from src.model.deck import (Hand, DealerHand)
| [
11748,
25064,
198,
198,
6738,
12351,
13,
19849,
13,
35875,
1330,
357,
12885,
11,
44480,
12885,
8,
628,
628,
198
] | 3.1 | 20 |
"""
Copyright 2008,2013 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
try:
import pygtk
pygtk.require('2.0')
import gtk
_COLORMAP = gtk.gdk.colormap_get_system() #create all of the colors
HIGHLIGHT_COLOR = get_color('#00FFFF')
BORDER_COLOR = get_color('#444444')
# missing blocks stuff
MISSING_BLOCK_BACKGROUND_COLOR = get_color('#FFF2F2')
MISSING_BLOCK_BORDER_COLOR = get_color('red')
#param entry boxes
ENTRYENUM_CUSTOM_COLOR = get_color('#EEEEEE')
#flow graph color constants
FLOWGRAPH_BACKGROUND_COLOR = get_color('#FFFFFF')
COMMENT_BACKGROUND_COLOR = get_color('#F3F3F3')
FLOWGRAPH_EDGE_COLOR = COMMENT_BACKGROUND_COLOR
#block color constants
BLOCK_ENABLED_COLOR = get_color('#F1ECFF')
BLOCK_DISABLED_COLOR = get_color('#CCCCCC')
BLOCK_BYPASSED_COLOR = get_color('#F4FF81')
#connection color constants
CONNECTION_ENABLED_COLOR = get_color('black')
CONNECTION_DISABLED_COLOR = get_color('#BBBBBB')
CONNECTION_ERROR_COLOR = get_color('red')
except:
print 'Unable to import Colors'
DEFAULT_DOMAIN_COLOR_CODE = '#777777'
| [
37811,
198,
15269,
3648,
11,
6390,
3232,
10442,
5693,
11,
3457,
13,
198,
1212,
2393,
318,
636,
286,
22961,
8829,
198,
198,
16630,
52,
8829,
30653,
318,
1479,
3788,
26,
345,
460,
17678,
4163,
340,
290,
14,
273,
198,
4666,
1958,
340,
... | 2.897476 | 634 |
# -*- coding:utf-8 -*-
# Created by Hans-Thomas on 2011-05-11.
#=============================================================================
# parser.py --- Parse features
#=============================================================================
from __future__ import print_function, unicode_literals
import os.path
import re
import sys
import yaml
import six
from io import StringIO
from .compat import combinations_with_replacement
#.............................................................................
# parser.py
| [
2,
532,
9,
12,
19617,
25,
40477,
12,
23,
532,
9,
12,
198,
2,
15622,
416,
13071,
12,
22405,
319,
2813,
12,
2713,
12,
1157,
13,
198,
2,
23926,
25609,
28,
198,
2,
220,
220,
30751,
13,
9078,
11420,
2547,
325,
3033,
198,
2,
23926,
... | 4.713043 | 115 |
"""
Django settings for server project.
Generated by 'django-admin startproject' using Django 3.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import random
import string
from pathlib import Path
import json
import os
from module.manager.internal_database_concurrency_manager import InternalDatabaseConcurrencyManager
from module.specification.System_config import SystemConfig
config = None
# Load Config File
try:
with open('server/config.json', 'r') as f:
config = json.load(f)
except Exception as e:
print("Config data is not setting, please back to `bin` directory and run command `perl install.pl install` ")
exit(0)
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
# 해당 어플리케이션은 사용작 직접 NAS에 설치하게 되므로 50자 랜덤으로 설정한다.
chars = ''.join([string.ascii_letters, string.digits, string.punctuation]). \
replace('\'', '').replace('"', '').replace('\\', '')
SECRET_KEY = ''.join([random.SystemRandom().choice(chars) for i in range(50)])
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
# 'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'app',
'corsheaders'
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware'
]
ROOT_URLCONF = 'server.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# 'DIRS': [os.path.join(BASE_DIR, 'templates')],
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
"""
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'templates', 'static')
]
"""
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
WSGI_APPLICATION = 'server.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = None
# 타입에 따라 다름
try:
if config["database"]["rdbms"]["type"] == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
elif config["database"]["rdbms"]["type"] == "mysql":
DATABASES = {
'default': {
'ENGINE': config["database"]["rdbms"]["engine"],
'NAME': config["database"]["rdbms"]["name"],
'USER': config["database"]["rdbms"]["user"],
'PASSWORD': config["database"]["rdbms"]["password"],
'HOST': config["database"]["rdbms"]["host"],
'PORT': str(config["database"]["rdbms"]["port"])
}
}
except Exception as e:
print(e)
print("config data has illeagal data")
print("please back to bin directory and run `perl install.pl install` again ")
exit(0)
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
# CORS
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_CREDENTIALS = True
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'0.0.0.0',
'[::1]',
]
"""
ALLOWED_HOSTS = [
config['system']['host'],
"0.0.0.0",
"[::1]"
]
"""
CORS_ALLOW_HEADERS = [
'Set-Cookie'
]
REST_FRAMEWORK = {
# datetime format 지정
'DATETIME_FORMAT': "%Y-%m-%d %H:%M:%S.%f%z",
}
SYSTEM_CONFIG: SystemConfig = SystemConfig()
INTERNAL_DATABASE_MANAGER: InternalDatabaseConcurrencyManager = \
InternalDatabaseConcurrencyManager(SYSTEM_CONFIG)
| [
37811,
198,
35,
73,
14208,
6460,
329,
4382,
1628,
13,
198,
198,
8645,
515,
416,
705,
28241,
14208,
12,
28482,
923,
16302,
6,
1262,
37770,
513,
13,
17,
13,
20,
13,
198,
198,
1890,
517,
1321,
319,
428,
2393,
11,
766,
198,
5450,
1378... | 2.26009 | 2,453 |
import numpy as np
def create_hist(array):
"""
Create Histogram from 2D Numpy Array
"""
# Histogram as a dictionary
hist = {}
rows, cols = array.shape
# Iterate through the array
for r in range(rows):
for c in range(cols):
if hist.get(array[r][c]) is None:
hist[array[r][c]] = 1
else:
hist[array[r][c]] += 1
return hist
if __name__ == "__main__":
h = {
6: 6,
10: 2,
17: 7,
88: 1
}
print(optimal_threshold(h)) | [
11748,
299,
32152,
355,
45941,
198,
198,
4299,
2251,
62,
10034,
7,
18747,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
13610,
5590,
21857,
422,
362,
35,
399,
32152,
15690,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1303,
... | 1.893688 | 301 |
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
| [
6738,
1366,
62,
43681,
13,
27604,
13,
9503,
1746,
1330,
7308,
55,
8439,
11522,
17818,
42350,
34,
21370,
3546,
26634,
628
] | 4 | 21 |
"""
Module defining a first order allpass filter with modulating coefficients.
"""
import matplotlib.pyplot as plt
import numpy as np
from scipy import signal
from wavetable.oscillators import StandardOscillator, RealTimeResamplingOscillator
from wavetable.wavetable import WaveType
class AllpassFilter:
"""
First-order allpass filter class with modulating coefficients. Also
maintains the constraint that the b0 coefficient always equals the a1
coefficient.
Parameters
offset : [0.0, 1.0] : The initial value for the a1 coefficient, and the
value about which the modulating operator oscillates.
amplitude : [0.0, 1.0] : The amplitude of the modulating signal.
Effectively the "amount" of modulation.
rate : [0, 96000] : The rate (Hz) of the modulating signal.
"""
if __name__ == '__main__':
# Show the frequency response as we move the cutoff frequency.
_, (ax1, ax2) = plt.subplots(2, sharex=True)
for i in np.linspace(0.0, 0.8, 4):
apf = AllpassFilter(0.5, 1.0, 64000)
apf.plot(ax1, ax2, 'c', i / 0.8)
plt.show()
# Now we'll take a look at how this allpass filter affects the waveform
# of an input signal so we can compare it with how the drop-sample
# interpolation filter of the ResamplingOscillator.
fs = 44100
duration = 1
size = fs * duration
x = np.linspace(0, size, size)
ss = np.zeros(size, dtype='d')
StandardOscillator(WaveType.SAWTOOTH, 43.65, 3.0, 1.0).render(ss)
rs = np.zeros(size, dtype='d')
RealTimeResamplingOscillator(WaveType.SAWTOOTH, 43.65, 3.0, 1.0).render(rs)
ap = np.zeros(size, dtype='d')
apf = AllpassFilter(0.5, 1.0, 64000)
apf.process_block(ss, ap)
plt.figure()
plt.subplot(211)
plt.plot(x, rs - ss)
plt.subplot(212)
plt.plot(x, ap - ss)
plt.show()
| [
37811,
198,
26796,
16215,
257,
717,
1502,
477,
6603,
8106,
351,
953,
8306,
44036,
13,
198,
37811,
198,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
629,
541,
88,
133... | 2.501976 | 759 |
# Disparity threshold experiment
#
# Copyright (C) 2010-2013 Huang Xin
#
# See LICENSE.TXT that came with this file.
from StimControl.LightStim.FrameControl import FrameSweep
from StimControl.LightStim.LightData import dictattr
from StimControl.LightStim.Core import DefaultScreen
from StimControl.LightStim.Target import Fixation
from StimControl.LightStim.RandomDots import RandomDots, StereoDisc
from StimControl.LightStim.Text import Hint
import random
import pygame
import VisionEgg
import VisionEgg.ParameterTypes as ve_types
from VisionEgg.DaqKeyboard import KeyboardInput
from VisionEgg.ResponseControl import KeyboardResponseController
DefaultScreen(['left','right'],bgcolor=(0.5,0.5,0.5))
class LeftRightKeyResponse(KeyboardResponseController):
"""Use the keyboard to collect responses during a presentation is running."""
| [
2,
3167,
1845,
414,
11387,
6306,
198,
2,
198,
2,
15069,
357,
34,
8,
3050,
12,
6390,
31663,
25426,
198,
2,
220,
198,
2,
4091,
38559,
24290,
13,
51,
25010,
326,
1625,
351,
428,
2393,
13,
198,
198,
6738,
41669,
15988,
13,
15047,
1273... | 3.56962 | 237 |
# Functions for venmo classification project
import pandas as pd
import numpy as np
import psycopg2
import pymongo
import json
import datetime
import pickle
import requests
import matplotlib.pyplot as plt
from gensim.models.doc2vec import Doc2Vec, TaggedDocument
import emoji
import regex
import nltk
from nltk import FreqDist
from nltk import word_tokenize
from nltk.corpus import stopwords, wordnet
import string
from emoji.unicode_codes import UNICODE_EMOJI as ue
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.cluster import cosine_distance
from nltk.cluster.kmeans import KMeansClusterer
import gensim
from gensim.utils import simple_preprocess
from gensim import corpora
import itertools
# Functions to extract data from the Mongo DB database
def collection():
"Function that returns a collection from a MongoDB"
# Instantiate a MongoClient and inspect the database names
mc = pymongo.MongoClient()
# Create a db from the test database in MongoClient
mydb = mc['test']
# Accessing the venmo collection in the test database
venmo = mydb['venmo']
return venmo
def initial_25pct(collection):
"""Function that returns a list of dictionaries with the initial 5% of
transactions
:input param - collection: MongoDB collection containing all transactions
:ouput param - initial_10pct: returns initial 5% of transactions as a
list of dictionaries
"""
_25pct = round(0.25 * (collection.count()))
cur = collection.find({})[:_25pct]
transactions = [transaction for transaction in cur]
# with open('initial_5pct_transactions.pkl', 'wb') as f:
# pickle.dump(transactions, f)
return transactions
# Function to extract and store transaction specific info into the venmo db
def get_transaction_specific_information(json_list_of_transactions):
"""Function that extracts transaction specific information and
stores each it in a table in the venmo transactions database."""
transactions = []
weird_transactions= []
# Not including in _id because that is the object id from Venmo's db
keys = ['note', 'type', 'date_updated', 'id', 'date_created', 'audience']
subkeys = ['mentions', 'likes', 'comments']
for details in json_list_of_transactions:
transaction = {}
for key in keys:
transaction[key] = details.get(key)
for key in subkeys:
transaction[f'{key}_count'] = details[key].get('count')
# Count determines if users interacted with transactions
if transaction[f'{key}_count'] > 0:
details[key].get('data')
transaction[f'{key}_data'] = []
# Getting the ids of users than interacted with transactions
for inter in details[f'{key}']['data']:
try:
transaction[f'{key}_data'].append(inter['user']['id'])
except:
transaction[f'{key}_data'].append(inter['id'])
else:
transaction[f'{key}_data'] = None
try:
transaction['payment_id'] = details['payment'].get('id')
transaction['payment_actor_id'] = details['payment']['actor'].get('id')
except:
weird_transactions.append(transaction.copy())
# Rename col id to transaction_id for easier recognition in the db
transaction['transaction_id'] = transaction.pop('id')
transactions.append(transaction.copy())
return transactions, weird_transactions
# Function to extract payment data and store it into the venmo database
def get_payment_info(json_list_of_transactions):
"""Function that extracts payment specific information and identifies
whether payers have made settled, unsettled or both types of payments."""
payments = []
# Keys in the payment dictionary that have the same structure
keys = (['note', 'action', 'status', 'date_created', 'id',
'audience', 'date_completed'])
settled_payer_id = set() # Set of actor_ids that have settled payments
unsettled_payer_id = set() # Set of actor_ids that have unsettled payments
for transaction in json_list_of_transactions:
if transaction['id'] == '2541220786958500195':
continue
else:
payment = {}
payment_details = transaction['payment']
for key in keys:
payment[key] = payment_details.get(key)
payment['target_type'] = payment_details['target'].get('type')
try:
payment['target_user_id'] = payment_details['target']['user']['id']
settled_payer_id.add(transaction['payment']['actor']['id'])
except TypeError:
# Identify payers who have pending or cancelled transactions
unsettled_payer_id.add(transaction['payment']['actor']['id'])
payment['actor_id'] = payment_details['actor'].get('id')
# Rename col id to payment_id for easier recognition in the db
payment['payment_id'] = payment.pop('id')
# Transforming the date created col into datetime object
payment['date_created'] = datetime.datetime.strptime(
payment['date_created'], '%Y-%m-%dT%H:%M:%S')
payments.append(payment.copy())
#settled_and_unsettled_payer_ids = settled_payer_id.intersection(
# unsettled_payer_id)
#unsettled_payer_ids = unsettled_payer_id - settled_payer_id
return payments
def get_true_and_false_transactions_from_settled_transactions(json_list_of_transactions):
"""Function that returns a set of successful and duplicated payment ids.
Payments are deemed as duplicates if a successful payments has happened
within 10 minutes before or after the unsuccessful transaction occured."""
payments_df, settled_and_unsettled_payer_ids, unsettled_payer_ids = get_payment_info(json_list_of_transactions)
duplicated_transaction_ids = set()
non_duplicated_transaction_ids = set()
for actor in settled_and_unsettled_payer_ids:
#Creating actor specific dataframes
settled_and_unsettled_trans_df = payments_df.loc[payments_df['actor_id'] == f'{actor}']
transaction_dates = [date for date in settled_and_unsettled_trans_df['date_created']]
#Separating the dates of created payments for each user
for i in range(len(transaction_dates)-1):
time_diff = transaction_dates[i+1] - transaction_dates[i]
time_diff = time_diff.total_seconds()
#If the payments are made within 10 minutes then identify those transactions
if time_diff < 600: #WHY 10 MINUTES THOUGH?
date_tuple = (transaction_dates[i], transaction_dates[i+1])
#Create a new dataframe for each user that contains transactions made within 10 minute of each other
transaction_within_10 = (
settled_and_unsettled_trans_df.loc[settled_and_unsettled_trans_df['date_created'].isin(date_tuple)])
#Extract the status' of both transactions
for status in transaction_within_10['status']:
#If one of the status' is settled it means that the rest are duplicates
if status != 'settled':
duplicated_id = transaction_within_10.loc[transaction_within_10['status'] == status]['payment_id']
for _id in duplicated_id:
duplicated_transaction_ids.add(_id)
else:
date_tuple = (transaction_dates[i], transaction_dates[i+1])
#Create a new dataframe for each user that contains transactions made within 10 minute of each other
transaction_within_10 = (
settled_and_unsettled_trans_df.loc[settled_and_unsettled_trans_df['date_created'].isin(date_tuple)])
#Extract the status' of both transactions
for status in transaction_within_10['status']:
#If one of the status' is settled it means that the rest are duplicates
if status != 'settled':
non_duplicated_id = transaction_within_10.loc[transaction_within_10['status'] == status]['payment_id']
for _id in non_duplicated_id:
non_duplicated_transaction_ids.add(_id)
return duplicated_transaction_ids, non_duplicated_transaction_ids
def get_true_and_false_transactions_from_unsettled_transactions(json_list_of_transactions):
"""Function that returns a set of duplicated payment ids from unsettled transactions.
Payments are deemed as duplicates if another unsuccessfull payment has happened
10 minutes before the unsuccessful transaction occured."""
payments_df, settled_and_unsettled_payer_ids, unsettled_payer_ids = get_payment_info(json_list_of_transactions)
# Select the transactions which users with unsettled payments have made within 10 minutes of each other.
duplicated_unsettled_transaction_ids = set()
non_duplicated_unsettled_transaction_ids = set()
for actor in unsettled_payer_ids:
#Creating actor specific dataframes
unsettled_trans_df = payments_df.loc[payments_df['actor_id'] == f'{actor}']
#Separating the dates of created payments for each user
transaction_dates = [date for date in unsettled_trans_df['date_created']]
if len(transaction_dates) == 1:
tran_id = (
unsettled_trans_df.loc[unsettled_trans_df['date_created'] == transaction_dates[0]]['payment_id'])
non_duplicated_unsettled_transaction_ids.add(tran_id.any())
else:
first_trans_date = None
for i in range(len(transaction_dates)-1):
time_diff = transaction_dates[i+1] - transaction_dates[i]
time_diff = time_diff.total_seconds()
#If the payments are made within 10 minutes then identify those transactions
if time_diff < 600: #WHY 10 MINUTES THOUGH?
date_tuple = (transaction_dates[i], transaction_dates[i+1])
trans_ids_for_date_tuple = (
unsettled_trans_df.loc[unsettled_trans_df['date_created'] == transaction_dates[i]]['payment_id'])
if trans_ids_for_date_tuple.all() in duplicated_unsettled_transaction_ids:
duplicated_trans_id = (
unsettled_trans_df.loc[unsettled_trans_df['date_created'] == transaction_dates[i+1]]['payment_id'])
duplicated_unsettled_transaction_ids.add(duplicated_trans_id.any())
else:
first_trans_id = (
unsettled_trans_df.loc[unsettled_trans_df['date_created'] == transaction_dates[i]]['payment_id'])
non_duplicated_unsettled_transaction_ids.add(first_trans_id.any())
duplicated_trans_id = (
unsettled_trans_df.loc[unsettled_trans_df['date_created'] == transaction_dates[i+1]]['payment_id'])
duplicated_unsettled_transaction_ids.add(duplicated_trans_id.any())
else:
if transaction_dates[i+1] == transaction_dates[-1]:
date_tuple = (transaction_dates[i], transaction_dates[i+1])
non_duplicated_transaction_id = (
unsettled_trans_df.loc[unsettled_trans_df['date_created'].isin(date_tuple)]['payment_id'])
for _id in non_duplicated_transaction_id:
non_duplicated_unsettled_transaction_ids.add(_id)
else:
non_duplicated_transaction_id = (
unsettled_trans_df.loc[unsettled_trans_df['date_created'] == transaction_dates[i]]['payment_id'])
non_duplicated_unsettled_transaction_ids.add(non_duplicated_transaction_id.any())
return duplicated_unsettled_transaction_ids, non_duplicated_unsettled_transaction_ids
def diff_between_true_and_false_payments(json_list_of_transactions):
"Function that adds columns to differentiate between true and false payments"
duplicated_transaction_ids, non_duplicated_transaction_ids = (
get_true_and_false_transactions_from_settled_transactions(json_list_of_transactions)
)
duplicated_unsettled_transaction_ids, non_duplicated_unsettled_transaction_ids = (
get_true_and_false_transactions_from_unsettled_transactions(json_list_of_transactions))
payments_df, settled_and_unsettled_payer_ids, unsettled_payer_ids = (
get_payment_info(json_list_of_transactions))
settled_payment_ids = set(payments_df.loc[payments_df['status'] == 'settled']['payment_id'])
#Create new columns to identify between the two types of transactions
payments_df['true_transactions'] = ([1 if _id in settled_payment_ids else 1
if _id in non_duplicated_transaction_ids else 1
if _id in non_duplicated_unsettled_transaction_ids
else 0 for _id in payments_df['payment_id']])
payments_df['false_transactions'] = ([1 if _id in duplicated_transaction_ids else 1
if _id in duplicated_unsettled_transaction_ids
else 0 for _id in payments_df['payment_id']])
return payments_df
def get_payments_df_with_differentiated_payments(json_list_of_transactions):
"""Function that perform final manipulation on the payments df prior to dumping
the data in the venmo database"""
payments_df = diff_between_true_and_false_payments(json_list_of_transactions)
# Unpack the merchant split type into two diff cols
payments_df = payments_df.drop('merchant_split_purchase', 1).assign(**payments_df['merchant_split_purchase']
.dropna().apply(pd.Series))
# Rename to miror the json structure
payments_df = payments_df.rename(columns={"authorization_id": "merchant_authorization_id"})
# Same process with the target_redeemable_target_col
payments_df = payments_df.drop('target_redeemable_target', 1).assign(**payments_df['target_redeemable_target']
.dropna().apply(pd.Series))
# Rename to miror the json structure
payments_df = payments_df.rename(columns = {"display_name": "target_redeemable_target_display_name",
"type": "target_redeemable_target_type"})
return payments_df
# Function to extract unique user data and store it into the venmo database
def get_payer_information(json_list_of_transactions):
"""Function that returns payer specific information from each transaction
and adds columns relating to user settings."""
# Identifying columns that don't contain values.
# Not adding first or last name since they are present in display name
keys = (["username", "is_active", "display_name", "is_blocked", "about",
"profile_picture_url", "id", "date_joined", "is_group" ])
# Values for default come after having explored the data in eda_venmo.ipynb
about_default = ([' ', 'No Short Bio', 'No short bio', '\n', ' \n', ' ',
'No Short Bio\n'])
payers = []
payer_ids = set() # Set because I only want to retrieve unique ids
for transaction in json_list_of_transactions:
if transaction['id'] == '2541220786958500195':
continue
else:
actor = transaction['payment']['actor']
actor_id = actor['id']
if actor_id in payer_ids:
continue
else:
payer_ids.add(actor_id)
payer = {}
for key in keys:
# Determine if their about col is personalised
if key == 'about':
about = actor.get(key)
payer[key] = actor.get(key)
if about in about_default:
# Col to show if personalised about or not
payer['about_personalised'] = 0
else:
payer['about_personalised'] = 1
else:
payer[key] = actor.get(key)
payer['user_id'] = payer.pop('id')
payers.append(payer.copy())
# Note, there is a case where a user has no about, date_joined or username.
# They have, however, previously made a transaction so we will not drop.
return payers, payer_ids
def get_payee_information(json_list_of_transactions):
"""Function that returns payee specific information from each transaction
and adds columns relating to user settings."""
# Identifying columns that contain null values
keys = (["username", "is_active", "display_name", "is_blocked", "about",
"profile_picture_url", "id", "date_joined", "is_group" ])
# Values for default come after having explored the data in eda_venmo.ipynb
about_default = ([' ', 'No Short Bio', 'No short bio', '\n', ' \n', ' ',
'No Short Bio\n'])
payees = []
payee_ids = set() # Set because I only want to retrieve unique ids
# Some transactions are deemed as unsettled because they never reach the
# targeted payee. Hence, a try function has to be placed for now.
for transaction in json_list_of_transactions:
if transaction['id'] == '2541220786958500195':
continue
else:
user = transaction['payment']['target']['user']
try:
user_id = user['id']
except TypeError:
continue
if user_id in payee_ids:
continue
else:
payee_ids.add(user_id)
payee = {}
for key in keys:
# Determine if their about col is personalised
if key == 'about':
about = user.get(key)
payee[key] = user.get(key)
if about in about_default:
# Col to show if personalised about or not
payee['about_personalised'] = 0
else:
payee['about_personalised'] = 1
else:
payee[key] = user.get(key)
payee['user_id'] = payee.pop('id')
payees.append(payee.copy())
return payees, payee_ids
def get_unique_user_table(json_list_of_transactions):
"""Function that returns unique user information from the combination
of payer details and payee details."""
# Retrieve payer and payee details
payers, payer_ids = get_payer_information(json_list_of_transactions)
payees, payee_ids = get_payee_information(json_list_of_transactions)
# Identifying the payees that have not been payers for a complete user list
payee_ids.difference_update(payer_ids)
clean_payees = [payee for payee in payees if payee['user_id'] in payee_ids]
# Concatenate the payees that have not been payers to the payers to
# generate the unique user table
payers.extend(clean_payees)
return payers
# Function to extract and store different apps into the venmo database
def get_app_specific_information(json_list_of_transactions):
"""Function that extracts the application through which the venmo
transaction was made (ie iPhone app, desktop, etc) and stores
each type in a table in the venmo transactions database."""
apps = []
# Only extracting app information
app_subkeys = ['id', 'image_url', 'description', 'site_url', 'name']
app_ids = set()
for app_detail in json_list_of_transactions:
app_details = app_detail['app']
app_id = app_details['id']
# There are only 8 diff types of apps, so by checking the id
# the process becomes much less computationally expensive
if app_id in app_ids:
continue
else:
app_ids.add(app_id)
app = {}
for key in app_details:
app[key] = app_details.get(key)
apps.append(app.copy())
return apps
# Functions to vectorize text for each user
def get_notes_into_unicode(notes):
"""Function that takes in all the notes and returns the text as well as
the ejomis used in unicode."""
emoji_dict = {}
recomposed_note = []
for note in notes:
note_text = []
data = regex.findall(r'\X', note)
for word in data:
if any(char in emoji.UNICODE_EMOJI for char in word):
unicode_emoji = word.encode('unicode-escape').decode('ASCII')
emoji_dict[word] = unicode_emoji.lower()
note_text.append(unicode_emoji+' ')
else:
note_text.append(word)
recomposed_note.append(''.join(note_text))
return recomposed_note, emoji_dict
def get_clean_text_pattern(recomposed_note):
"""Function that filters through the notes, retrieves those that match
the specified pattern and removes stopwords."""
pattern = "([a-zA-Z0-9\\\]+(?:'[a-z]+)?)"
recomposed_note_raw = []
recomposed_note_raw = (
[nltk.regexp_tokenize(note, pattern) for note in recomposed_note])
# Create a list of stopwords and remove them from our corpus
stopwords_list = stopwords.words('english')
stopwords_list += list(string.punctuation)
# additional slang and informal versions of the original words had to be added to the corpus.
stopwords_list += (["im", "ur", "u", "'s", "n", "z", "n't", "brewskies", "mcd’s", "Ty$",
"Diploooooo", "thx", "Clothessss", "K2", "B", "Comida", "yo", "jobby",
"F", "jus", "bc", "queso", "fil", "Lol", "EZ", "RF", "기프트카드", "감사합니다",
"Bts", "youuuu", "X’s", "bday", "WF", "Fooooood", "Yeeeeehaw", "temp",
"af", "Chipoodle", "Hhuhhyhy", "Yummmmers", "MGE", "O", "Coook", "wahoooo",
"Cuz", "y", "Cutz", "Lax", "LisBnB", "vamanos", "vroom", "Para", "el", "8==",
"bitchhh", "¯\\_(ツ)_/¯", "Ily", "CURRYYYYYYY", "Depósito", "Yup", "Shhhhh"])
recomposed_note_stopped = (
[[w.lower() for w in note if w not in stopwords_list] for note in recomposed_note_raw])
return recomposed_note_stopped
def get_wordnet_pos(word):
"""Map POS tag to first character lemmatize() accepts"""
tag = nltk.pos_tag([word])[0][1][0].upper()
tag_dict = {"J": wordnet.ADJ,
"N": wordnet.NOUN,
"V": wordnet.VERB,
"R": wordnet.ADV}
return tag_dict.get(tag, wordnet.NOUN)
def lemmatize_notes(recomposed_note_stopped):
"Function that lemmatizes the different notes."
# Init Lemmatizer
lemmatizer = WordNetLemmatizer()
lemmatized_notes = []
for sentence in recomposed_note_stopped:
# Notes have unicode to represent emojis and those can't be lemmatized
try:
for word in nltk.word_tokenize(sentence):
# Notes that combine emojis and text
try:
lem = lemmatizer.lemmatize(word, get_wordnet_pos(word))
lemmatized_notes.append(lem)
except:
lemmatized_notes.append(word)
except:
lemmatized_notes.append(sentence)
return lemmatized_notes
def turn_emoji_unicode_to_text(lemmatized_notes, emoji_dict):
"Function that converts unicode into emojis."
recomposed_note_stopped_em = []
for note in lemmatized_notes:
note_list = []
for word in note:
if word.startswith('\\'):
# Emoji dict represents a dict matching emojis and unicode.
for key, val in emoji_dict.items():
if word == val:
note_list.append(key)
else:
note_list.append(word)
recomposed_note_stopped_em.append(note_list)
return recomposed_note_stopped_em
def emojis_to_text(notes_list):
"""Function that takes in all the notes and returns the emojis used
in the form of text captured by :colons:"""
recomposed_note = []
for notes in notes_list:
note_list = []
for note in notes:
note_text = []
data = regex.findall(r'\X', note)
for word in data:
if any(char in emoji.UNICODE_EMOJI for char in word):
note_text.append(emoji.demojize(f'{word}'))
else:
note_text.append(word)
note_list.append(''.join(note_text))
recomposed_note.append(note_list)
return recomposed_note
def train_doc2vec_vectorizer(fully_recomposed_notes):
"Function that returns a trained doc2vec model for the whole note corpus."
# In order to train the Doc2Vec model all the words need to be in the same list
# Combine the notes into a single corpus
whole_corpus_notes = [' '.join(note) for note in fully_recomposed_notes]
tagged_data = [TaggedDocument(words=w.split(' '), tags=[str(i)]) for i, w in enumerate(whole_corpus_notes)]
# Select model hyperparameters
max_epochs = 10
vec_size = 20
alpha = 0.025
min_alpha=0.00025
min_count=1
dm =1
# Input hyparameters into the model
vectorizer = Doc2Vec(size=vec_size, alpha=alpha, min_alpha=min_alpha,
min_count=min_count, dm =dm)
# Build vocab of the notes with tagged data
vectorizer.build_vocab(tagged_data)
# Train the model for the range of epochs specified
for epoch in range(max_epochs):
print('iteration {0}'.format(epoch))
vectorizer.train(tagged_data,
total_examples=vectorizer.corpus_count,
epochs=vectorizer.iter)
# decrease the learning rate
vectorizer.alpha -= 0.0002
# fix the learning rate, no decay
vectorizer.min_alpha = vectorizer.alpha
# Save the model
vectorizer.save("d2v.model")
return tagged_data
def get_aggregated_user_note_vector(username, password, train_window_end):
"Function that turns the notes for each transaction into an n dimmensional vector."
# Load functions to generate a list of fully composed notes
user_notes = extract_user_notes(username, password, train_window_end)
notes = user_notes['note']
recomposed_note, emoji_dict = get_notes_into_unicode(notes)
recomposed_note_stopped = get_clean_text_pattern(recomposed_note)
lemmatized_notes = lemmatize_notes(recomposed_note_stopped)
recomposed_note_stopped_em = turn_emoji_unicode_to_text(lemmatized_notes, emoji_dict)
fully_recomposed_notes = emojis_to_text(recomposed_note_stopped_em)
# Combine the notes into a single corpus
whole_corpus_notes = [' '.join(note) for note in fully_recomposed_notes]
# Load the model
vectorizer= Doc2Vec.load("d2v.model")
# Find the vectors for each note in the whole note corpus
_vectrs = []
for note in whole_corpus_notes:
v = np.array(vectorizer.infer_vector(note))
_vectrs.append(v)
_vectrs_df = pd.DataFrame(_vectrs)
# Each payment note vectorized for each user
user_notes.drop('note', axis=1, inplace=True)
user_vectrs_df = pd.concat([user_notes, _vectrs_df], axis=1)
# Calculate the mean for users with multiple transactions (multiple notes)
user_vectrs_df = user_vectrs_df.groupby('user_id').mean()
return user_vectrs_df
# Functions to generate the relevant user statistics
def extracting_cursor(username, password):
"Function that connects to a database and returns the cursor"
connection = psycopg2.connect(user=f'{username}',
password=f'{password}',
database='venmo_transactions')
cursor = connection.cursor()
return cursor
def user_info(username, password, train_window_end):
""" Function that returns the time period since the user opened the
account and whether or not they have a personalised bio."""
cursor = extracting_cursor(username, password)
q = f"""SELECT u.user_id, u.about_personalised as personalised_bio,
SUM(CAST('{train_window_end}' AS timestamp) -
CAST(u.date_joined AS timestamp)) as time_since_account_inception,
COUNT(CASE WHEN p.status = 'pending' OR p.status = 'cancelled' THEN 1 END) as unsettled
FROM users u
INNER JOIN payments p ON p.actor_id=u.user_id
GROUP BY (user_id, about_personalised);"""
cursor.execute(q)
user_info_df = pd.DataFrame(cursor.fetchall())
user_info_df.columns = [x[0] for x in cursor.description]
return user_info_df
def get_user_interaction(username, password, train_window_end):
"""Function that extract whether a user has interacted with the venmo
platform in the form of mentions, likes or comments."""
cursor = extracting_cursor(username, password)
# Extract the mentions, likes and comments data
q = f"""SELECT t.transaction_id, t.mentions_data,
t.likes_data, t.comments_data
FROM transactions t
WHERE t.date_created <= CAST('{train_window_end}' AS timestamp);"""
cursor.execute(q)
interactions_data = pd.DataFrame(cursor.fetchall())
interactions_data.columns = [x[0] for x in cursor.description]
# Extract the ids in the different cols
mentions = (
[note[1:-1] for note in interactions_data['mentions_data'] if note is not None])
likes = (
[note[1:-1] for note in interactions_data['likes_data'] if note is not None])
comments = (
[note[1:-1] for note in interactions_data['comments_data'] if note is not None])
# Split and chain users in transactions with multiple interactors
mentions = [mention.split(',') for mention in mentions]
mentions = list(itertools.chain.from_iterable(mentions))
likes = [like.split(',') for like in likes]
likes = list(itertools.chain.from_iterable(likes))
comments = [comment.split(',') for comment in comments]
comments = list(itertools.chain.from_iterable(comments))
# Turn them into sets for easier manipulation
mentions_set = set(mention for mention in mentions)
likes_set = set(like for like in likes)
comments_set = set(comment for comment in comments)
# Select whole users in the db
q = f"""SELECT user_id
FROM users u;"""
cursor.execute(q)
users = pd.DataFrame(cursor.fetchall())
users.columns = [x[0] for x in cursor.description]
# Turn the ids into a set
user_set = set(user for user in users['user_id'])
# Find those users that have interacted
user_mentions = user_set.intersection(mentions_set)
user_likes = user_set.intersection(likes_set)
user_comments = user_set.intersection(comments_set)
# Create new binary cols for the different interactions in the user df
users['mentions'] = [1 if u in user_mentions else 0 for u in users['user_id']]
users['likes'] = [1 if u in user_likes else 0 for u in users['user_id']]
users['comments'] = [1 if u in user_comments else 0 for u in users['user_id']]
return users
def payed_transactions(username, password, train_window_end):
""" Function that returns the total number of transactions made during a
given period and the mean, max of the previous transactions made."""
cursor = extracting_cursor(username, password)
q = f"""SELECT DISTINCT u.user_id, MAX(p1.diff_time) as max_time_diff_made_trans,
AVG(p1.diff_time) as mean_time_diff_made_trans,
COUNT (DISTINCT p1.payment_id) as n_transactions_made,
COUNT (DISTINCT p1.target_user_id) as n_trans_made_to_diff_users
FROM (SELECT p.actor_id, p.payment_id, p.target_user_id,
(LEAD(p.date_created, 1) OVER (PARTITION BY p.actor_id ORDER BY p.date_created)
- p.date_created) as diff_time
FROM payments p
WHERE p.date_created <= CAST('{train_window_end}' AS timestamp)) as p1
INNER JOIN users u ON u.user_id = p1.actor_id
GROUP BY (u.user_id);"""
cursor.execute(q)
payed_transactions_df = pd.DataFrame(cursor.fetchall())
payed_transactions_df.columns = [x[0] for x in cursor.description]
return payed_transactions_df
def received_transactions(username, password, train_window_end):
""" Function that returns the total number of transactions received during a given period and
the mean, max of the previous transactions received."""
cursor = extracting_cursor(username, password)
q = f"""SELECT DISTINCT u.user_id, MAX(p1.diff_time) as max_time_diff_received_trans,
AVG(p1.diff_time) as mean_time_diff_received_trans,
COUNT (DISTINCT p1.payment_id) as n_transactions_received,
COUNT (DISTINCT p1.actor_id) as trans_rec_from_diff_users
FROM (SELECT p.target_user_id, p.payment_id, p.actor_id,
(LEAD(p.date_created, 1) OVER (PARTITION BY p.target_user_id ORDER BY p.date_created)
- p.date_created) as diff_time
FROM payments p
WHERE p.date_created <= CAST('{train_window_end}' AS timestamp)) as p1
INNER JOIN users u ON u.user_id = p1.target_user_id
GROUP BY (u.user_id);"""
cursor.execute(q)
received_transactions_df = pd.DataFrame(cursor.fetchall())
received_transactions_df.columns = [x[0] for x in cursor.description]
return received_transactions_df
def transactions_made_weekdays(username, password, train_window_end):
"""Function that calculates the number of transactions made during the week
for each user"""
cursor = extracting_cursor(username, password)
q = f"""SELECT u.user_id, COUNT (DISTINCT p.payment_id) as trans_made_week
FROM payments p
INNER JOIN users u ON u.user_id = p.actor_id
WHERE EXTRACT (DOW FROM p.date_created) NOT IN (0, 6)
AND p.date_created <= CAST('{train_window_end}' AS timestamp)
GROUP BY (u.user_id);"""
cursor.execute(q)
trans_made_week = pd.DataFrame(cursor.fetchall())
trans_made_week.columns = [x[0] for x in cursor.description]
return trans_made_week
def transactions_made_weekends(username, password, train_window_end):
"""Function that calculates the number of transactions made during the
weekend for each user"""
cursor = extracting_cursor(username, password)
q = f"""SELECT u.user_id, COUNT (DISTINCT p.payment_id) as trans_made_weeknd
FROM payments p
INNER JOIN users u ON u.user_id = p.actor_id
WHERE EXTRACT (DOW FROM p.date_created) IN (0, 6)
AND p.date_created <= CAST('{train_window_end}' AS timestamp)
GROUP BY (u.user_id);"""
cursor.execute(q)
trans_made_weeknd = pd.DataFrame(cursor.fetchall())
trans_made_weeknd.columns = [x[0] for x in cursor.description]
return trans_made_weeknd
def transactions_made_previous_day(username, password, previous_day_start,
train_window_end):
""" Function that returns the total number of transactions made the
previous day to our testing time frame."""
cursor = extracting_cursor(username, password)
q = f"""SELECT u.user_id, COUNT (DISTINCT p.payment_id) as n_trans_made_yest
FROM payments p
INNER JOIN users u ON u.user_id = p.actor_id
WHERE p.date_created >= CAST('{previous_day_start}' AS timestamp)
AND p.date_created <= CAST('{train_window_end}' AS timestamp)
GROUP BY (u.user_id);"""
cursor.execute(q)
trans_made_yest_df = pd.DataFrame(cursor.fetchall())
trans_made_yest_df.columns = [x[0] for x in cursor.description]
return trans_made_yest_df
def transactions_rec_weekdays(username, password, train_window_end):
"""Function that calculates the number of transactions received during the
week for each user"""
cursor = extracting_cursor(username, password)
q = f"""SELECT u.user_id, COUNT (DISTINCT p.payment_id) as trans_rec_week
FROM payments p
INNER JOIN users u ON u.user_id = p.target_user_id
WHERE EXTRACT (DOW FROM p.date_created) NOT IN (0, 6)
AND p.date_created <= CAST('{train_window_end}' AS timestamp)
GROUP BY (u.user_id);"""
cursor.execute(q)
trans_rec_week = pd.DataFrame(cursor.fetchall())
trans_rec_week.columns = [x[0] for x in cursor.description]
return trans_rec_week
def transactions_rec_weekends(username, password, train_window_end):
"""Function that calculates the number of transactions received during
the weekend for each user"""
cursor = extracting_cursor(username, password)
q = f"""SELECT u.user_id, COUNT (DISTINCT p.payment_id) as trans_rec_weeknd
FROM payments p
INNER JOIN users u ON u.user_id = p.target_user_id
WHERE EXTRACT (DOW FROM p.date_created) IN (0, 6)
AND p.date_created <= CAST('{train_window_end}' AS timestamp)
GROUP BY (u.user_id);"""
cursor.execute(q)
trans_rec_weeknd = pd.DataFrame(cursor.fetchall())
trans_rec_weeknd.columns = [x[0] for x in cursor.description]
return trans_rec_weeknd
def transactions_rec_previous_day(username, password, previous_day_start,
train_window_end):
""" Function that returns the total number of transactions received the
previous day to our testing time frame."""
cursor = extracting_cursor(username, password)
q = f"""SELECT u.user_id, COUNT (DISTINCT p.payment_id) as n_trans_rec_yest
FROM payments p
INNER JOIN users u ON u.user_id = p.target_user_id
WHERE p.date_created >= CAST('{previous_day_start}' AS timestamp)
AND p.date_created <= CAST('{train_window_end}' AS timestamp)
GROUP BY (u.user_id);"""
cursor.execute(q)
trans_rec_yest_df = pd.DataFrame(cursor.fetchall())
trans_rec_yest_df.columns = [x[0] for x in cursor.description]
return trans_rec_yest_df
def made(username, password, previous_day_start, train_window_end):
"Function that returns a dataframe with combined statistics for payers"
payed_transactions_df = payed_transactions(username, password,
train_window_end)
transactions_made_previous_day_df = (
transactions_made_previous_day(username, password, previous_day_start,
train_window_end)
)
transactions_made_weekdays_df = (
transactions_made_weekdays(username, password, train_window_end)
)
transactions_made_weekends_df = (
transactions_made_weekends(username, password, train_window_end)
)
# Outer join because not everyone who has previously made a transaction
# necessarily made one yesterday
payed_and_previous_day = pd.merge(payed_transactions_df,
transactions_made_previous_day_df,
'outer', on='user_id')
dow = pd.merge(transactions_made_weekdays_df,
transactions_made_weekends_df, 'outer', on='user_id')
# Inner join because every user in either df would have made a transaction
trans_made = pd.merge(payed_and_previous_day, dow, 'inner', on='user_id')
# Filling with 0s the null values that arise when users have made a
# transaction but not yesterday
trans_made.fillna(0, inplace=True)
return trans_made
def received(username, password, previous_day_start, train_window_end):
"Function that returns a dataframe with combined statistics for payees"
received_transactions_df = received_transactions(username, password,
train_window_end)
transactions_rec_previous_day_df = (
transactions_rec_previous_day(username, password, previous_day_start,
train_window_end)
)
transactions_rec_weekdays_df = (
transactions_rec_weekdays(username, password, train_window_end)
)
transactions_rec_weekends_df = (
transactions_rec_weekends(username, password, train_window_end)
)
# Outer join because not everyone who has previously received a transaction
# necessarily received one yesterday
rec_and_previous_day = pd.merge(received_transactions_df,
transactions_rec_previous_day_df,
'outer', on='user_id')
dow = pd.merge(transactions_rec_weekdays_df, transactions_rec_weekends_df,
'outer', on='user_id')
# Inner join because every user in either df would have received a transaction
trans_rec = pd.merge(rec_and_previous_day, dow, 'inner', on='user_id')
# Filling with 0s the null values that arise when users have received a
# transaction but not yesterday
trans_rec.fillna(0, inplace=True)
return trans_rec
def transactions(username, password, previous_day_start, train_window_end):
"Function that returns a dataframe with combined statistics for payees"
made_df = made(username, password, previous_day_start, train_window_end)
received_df = received(username, password, previous_day_start,
train_window_end)
# Outer join because not everyone who has made a transaction necessarily
# received one and viceversa
trans = pd.merge(made_df, received_df, 'outer', on='user_id')
# Filling with 0s the null values that arise when users have made a
# transaction but not received one
trans.fillna(0, inplace=True)
return trans
def get_aggregated_user_statistics(username, password, previous_day_start,
train_window_end):
"""Function that returns a dataframe with aggregated user statistics and
personal user information statistics"""
user_df = user_info(username, password, train_window_end)
user_interactios = get_user_interaction(username, password, train_window_end)
user_vectrs_df = get_aggregated_user_note_vector(username, password,
train_window_end)
user_info_and_inters = pd.merge(user_df, user_interactios, 'inner',
on='user_id')
combined_user_info = pd.merge(user_info_and_inters, user_vectrs_df,
'inner', on='user_id')
trans_df = transactions(username, password, previous_day_start,
train_window_end)
# Inner join because all users should have either made or received a
# transaction, so they will have a user_id
agg_table = pd.merge(combined_user_info, trans_df, 'inner', on='user_id')
time_delta_cols = (['time_since_account_inception',
'max_time_diff_made_trans',
'max_time_diff_received_trans',
'mean_time_diff_made_trans',
'mean_time_diff_received_trans'])
for col in time_delta_cols:
agg_table[f'{col}'] = [diff.total_seconds() for diff in agg_table[f'{col}']]
return agg_table
def extract_target(username, password, test_window_start, test_window_end):
"""Function that returns the target variable (whether someone made a
transaction during a given time period) or not."""
cursor = extracting_cursor(username, password)
q = f"""SELECT u.user_id,
COUNT (DISTINCT p.payment_id) as n_trans_made_in_measured_period
FROM payments p
INNER JOIN users u ON u.user_id = p.actor_id
WHERE p.date_created >= CAST('{test_window_start}' AS timestamp)
AND p.date_created <= CAST('{test_window_end}' AS timestamp)
GROUP BY (u.user_id);"""
cursor.execute(q)
tran_or_not_df = pd.DataFrame(cursor.fetchall())
tran_or_not_df.columns = [x[0] for x in cursor.description]
tran_or_not_df['n_trans_made_in_measured_period'] = (
[1 for trans in tran_or_not_df['n_trans_made_in_measured_period']]
)
return tran_or_not_df
# Formulas for currency analysis
def get_fx_rates(api_key, exchange_currency, desired_currency):
"""Function that returns the 100 day FX rate history for the currency wished to
be exchanged in json format."""
url = 'https://www.alphavantage.co/query?'
function_input = 'FX_DAILY'
from_symbol_input = f'{exchange_currency}'
to_symbol_input = f'{desired_currency}'
url_params = (f"""function={function_input}&from_symbol={from_symbol_input}&to_symbol={to_symbol_input}&apikey={api_key}""")
request_url = url + url_params
response = requests.get(request_url)
return response
def get_adjusted_rate(response_json):
"Function that converts json into pd dataframe with historic adj closed prices."
response_dict = {}
for key, val in response_json.json()['Time Series FX (Daily)'].items():
response_dict[key] = float(val['4. close'])
response_df = pd.DataFrame.from_dict(response_dict, 'index')
response_df.columns = ['Adj Close Price']
response_df = response_df.reindex(index=response_df.index[::-1])
return response_df
def get_bollinger_bands(response_df):
"""Function that returns the bollinger bands for the exchange rate in question."""
response_df['30 Day MA'] = response_df['Adj Close Price'].rolling(window=20).mean()
response_df['30 Day STD'] = response_df['Adj Close Price'].rolling(window=20).std()
response_df['Upper Band'] = response_df['30 Day MA'] + (response_df['30 Day STD'] * 2)
response_df['Lower Band'] = response_df['30 Day MA'] - (response_df['30 Day STD'] * 2)
return response_df
def get_graphical_view(response_df, exchange_currency, desired_currency, today):
"""Function that returns a graphic view of the exchange rate in question
and the corresponding bollinger bands."""
# We only want to show the previous month, therefore subset the dataframe
one_month_ago = (today.replace(day=1) - datetime.timedelta(days=1)).replace(day=today.day).strftime("%Y-%m-%d")
date_15_days_ago = (today - datetime.timedelta(days=15)).strftime("%Y-%m-%d")
response_df = response_df.loc[(response_df.index >= one_month_ago) & (response_df.index <= today.strftime("%Y-%m-%d"))]
# set style, empty figure and axes
plt.style.use('fivethirtyeight')
fig = plt.figure(figsize=(12,6), facecolor='w')
ax = fig.add_subplot(111)
# Get index values for the X axis for exchange rate DataFrame
x_axis = response_df.index
# Plot shaded 21 Day Bollinger Band for exchange rate
ax.fill_between(x_axis, response_df['Upper Band'], response_df['Lower Band'], color='white')
# Plot Adjust Closing Price and Moving Averages
ax.plot(x_axis, response_df['Adj Close Price'], color='blue', lw=2)
#ax.plot(x_axis, response_df['30 Day MA'], color='black', lw=2)
ax.plot(x_axis, response_df['Upper Band'], color='green', lw=2)
ax.plot(x_axis, response_df['Lower Band'], color='red', lw=2)
ax.set_xticks([one_month_ago, date_15_days_ago, today.strftime("%Y-%m-%d")])
# Set Title & Show the Image
ax.set_title(f'30 Day Bollinger Band For {exchange_currency}/{desired_currency} rate')
ax.set_ylabel(f'Value of 1 {exchange_currency} in {desired_currency}')
ax.legend(['Adj Close Price', f'Strong {exchange_currency}', f'Weak {exchange_currency}'])
# Compare the value of the exchange rate currencies
compare = response_df.loc[response_df.index == today.strftime("%Y-%m-%d")]
if compare['Adj Close Price'].values > compare['Upper Band'].values:
print(f'The {exchange_currency} is strong, consider making your international transaction today.')
elif compare['Adj Close Price'].values > compare['Lower Band'].values:
print(f"The {exchange_currency} is currently trading according to its boundaries.")
else:
print(f"The {exchange_currency} is weak, consider making your international transaction another day.")
plt.savefig("")
return plt.show()
# Functions to calculate clusters
def get_distortion_plot(whole_corpus_notes):
"Function that retrieves the distortion measures for a range of k clusters."
# Calculate the vectors and store them in a list of arrays
_vectrs = []
for note in whole_corpus_notes:
v = np.array(vectorizer.infer_vector(note))
_vectrs.append(v)
# Calculate the distortion with the vector arrays
distortions = []
for k in range(1,10):
kclusterer = KMeansClusterer(k, distance=cosine_distance)
assigned_clusters = kclusterer.cluster(_vectrs, assign_clusters=True)
sum_of_squares = 0
current_cluster = 0
for centroid in kclusterer.means():
current_page = 0
for index_of_cluster_of_page in assigned_clusters:
if index_of_cluster_of_page == current_cluster:
y = _vectrs[current_page]
# Calculate SSE for different K
sum_of_squares += np.sum((centroid - y) ** 2)
current_page += 1
current_cluster += 1
distortions.append(round(sum_of_squares))
# Plot values of SSE
plt.figure(figsize=(15,8))
plt.subplot(121, title='Elbow curve')
plt.xlabel('k')
plt.plot(range(1, 10), distortions)
plt.grid(True)
return plt.show()
def get_cluster_topics_with_LDA(recomposed_note_stopped_em):
"Function that calculates cluster topics for documents in a corpus."
# Retrieve the different documents
fully_recomposed_notes = emojis_to_text(recomposed_note_stopped_em)
# Create the Inputs of LDA model: Dictionary and Corpus
dct = corpora.Dictionary(fully_recomposed_notes)
corpus = [dct.doc2bow(note) for note in fully_recomposed_notes]
# Train the LDA model
lda_model = LdaMulticore(corpus=corpus, id2word=dct, random_state=100,
num_topics=6, passes=10, chunksize=500,
batch=False, offset=64, eval_every=0, iterations=100,
gamma_threshold=0.001)
for idx, topic in lda_model.print_topics():
print('Topic: {} Word: {}'.format(idx, topic))
return lda_model
# Add extra text for commit
| [
2,
40480,
329,
8710,
5908,
17923,
1628,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
17331,
22163,
70,
17,
198,
11748,
279,
4948,
25162,
198,
11748,
33918,
198,
11748,
4818,
8079,
198,
11748,
229... | 2.366137 | 21,634 |
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
""" Sample environment for testing """
from collections import OrderedDict
from collections import namedtuple
from rainman2.lib.environment import environment_template
__author__ = 'Ari Saha (arisaha@icloud.com)'
__date__ = 'Friday, April 6th 2018, 11:18:56 am'
ACTIONS = {
0: 'UP',
1: 'DOWN',
2: 'LEFT',
3: 'RIGHT'
}
STATE_ATTRIBUTES = OrderedDict(
attr1=None,
attr2=None,
attr3=None,
)
STATE = namedtuple(
'STATE',
STATE_ATTRIBUTES.keys())
STATES_LIST = [STATE('initial', 1, 1),
STATE('next', 0.5, 0.5),
STATE('last', 0.2, 0.9)]
class SampleGeneralEnv(environment_template.Base):
"""
Sample environment
"""
def __init__(self, env_config):
"""
Initialize sample env
"""
self.env_config = env_config
self.states = iter(STATES_LIST)
def get_next_state(self):
"""
Fetches next state
"""
return next(self.states, None)
@property
def _actions(self):
"""
Sample actions allowed in the env
"""
return ACTIONS
@property
def _state_dim(self):
"""
Sample state dim
"""
return len(STATE_ATTRIBUTES)
def _reset(self):
"""
Sample action to reset the env
"""
self.states = iter(STATES_LIST)
return self.get_next_state()
def _step(self, state, action):
"""
Sample action to take a step
"""
next_state = self.get_next_state()
stop = False
reward = 1
if next_state.attr1 == 'last':
stop = True
return next_state, reward, stop
| [
2,
0,
1220,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
27565,
2858,
329,
4856,
37227,
198,
198,
6738,
17268,
1330,
14230,
1068,
35,
713,
198,
6738,
17268,
... | 2.15528 | 805 |
# Contar de trás pra frente
for c in range(6, 0, -1):
print(c)
# Pulando de 2 em 2
for c in range(0, 10, 2):
print(c)
# somar valores
s = 0
for c in range(0, 4):
n = int(input('Digite um valor: '))
s += n
print('Fim') | [
2,
2345,
283,
390,
491,
40138,
7201,
277,
1156,
68,
201,
198,
1640,
269,
287,
2837,
7,
21,
11,
657,
11,
532,
16,
2599,
201,
198,
220,
220,
220,
3601,
7,
66,
8,
201,
198,
2,
21624,
25440,
390,
362,
795,
362,
201,
198,
1640,
269... | 1.984 | 125 |
#%% train sac
import os
import gym
import numpy as np
from stable_baselines import SAC
from stable_baselines.sac.policies import MlpPolicy
from stable_baselines.sac.policies import LnMlpPolicy
from stable_baselines.common.noise import NormalActionNoise
from stable_baselines.bench import Monitor
from stable_baselines.common.callbacks import BaseCallback
import Arm2DEnv as ae
from utils import SaveOnBestTrainingRewardCallback, snap_code
#%% sac
log_dir_root = './sandbox/sac/'
os.makedirs(log_dir_root, exist_ok=True)
# create a snapshot of code
log_dir = snap_code(log_dir_root)
env = ae.ArmModel()
env = Monitor(env, log_dir)
model = SAC(LnMlpPolicy, env, buffer_size=int(5E5), batch_size=128, gamma=0.98, learning_rate = 0.001, tau = 0.01, verbose=1)
callback = SaveOnBestTrainingRewardCallback(check_freq=int(5E4), log_dir=log_dir)
model.learn(total_timesteps=int(3E6), log_interval=10, callback=callback)
# %%
model.save("twolink_arm_sac") | [
2,
16626,
4512,
5360,
198,
11748,
28686,
198,
11748,
11550,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
8245,
62,
12093,
20655,
1330,
311,
2246,
198,
6738,
8245,
62,
12093,
20655,
13,
30584,
13,
79,
4160,
444,
1330,
337,
34431,
36727... | 2.802941 | 340 |
# Generated by Django 3.2.6 on 2021-08-12 18:36
import ckeditor.fields
from django.db import migrations
| [
2,
2980,
515,
416,
37770,
513,
13,
17,
13,
21,
319,
33448,
12,
2919,
12,
1065,
1248,
25,
2623,
198,
198,
11748,
269,
9091,
2072,
13,
25747,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
628
] | 2.864865 | 37 |
from rest_framework import serializers
from core.models import Tag, Information, Recipe
class TagSerializer(serializers.ModelSerializer):
"""Serializer for tag object"""
class InformationSerializer(serializers.ModelSerializer):
"""Serializer for an Information object"""
class RecipeSerializer(serializers.ModelSerializer):
"""Serialize a recipe"""
information = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Information.objects.all()
)
tags = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Tag.objects.all()
)
class RecipeDetailSerializer(RecipeSerializer):
"""Serialize a recipe detail"""
information = InformationSerializer(many=True, read_only=True)
tags = TagSerializer(many=True, read_only=True)
class RecipeImageSerializer(serializers.ModelSerializer):
"""Serializer for uploading images to recipe"""
| [
6738,
1334,
62,
30604,
1330,
11389,
11341,
198,
6738,
4755,
13,
27530,
1330,
17467,
11,
6188,
11,
26694,
628,
198,
4871,
17467,
32634,
7509,
7,
46911,
11341,
13,
17633,
32634,
7509,
2599,
198,
220,
220,
220,
37227,
32634,
7509,
329,
762... | 3.214789 | 284 |
# -*- coding: utf-8 -*-
from .doyle import cli
__author__ = 'Martin Garcia'
__email__ = 'newluxfero@gmail.com'
__version__ = '0.1.0'
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
6738,
764,
67,
19802,
1330,
537,
72,
198,
198,
834,
9800,
834,
796,
705,
24778,
18555,
6,
198,
834,
12888,
834,
796,
705,
3605,
22564,
2232,
78,
31,
14816,
13,
... | 2.25 | 60 |
#!/usr/bin/env python
'''
BackdoorFactory (BDF) v3 - FOUNTAINPATCH
Many thanks to Ryan O'Neill --ryan 'at' codeslum <d ot> org--
Without him, I would still be trying to do stupid things
with the elf format.
Also thanks to Silvio Cesare with his 1998 paper
(http://vxheaven.org/lib/vsc01.html) which these ELF patching
techniques are based on.
Special thanks to Travis Morrow for poking holes in my ideas.
Copyright (c) 2013-2015, Joshua Pitts
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
from os.path import basename
from os import getcwd
from pebin import pebin
from elfbin import elfbin
from machobin import machobin
#start("PE", "Handle.exe", "192.168.1.34", 4444).patch()
#start("ELF", "Handle.exe", "192.168.1.34", "4444").patch()
#start("MACHO", "Handle.exe", "192.168.1.34", "4444").patch() | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
7061,
6,
198,
7282,
9424,
22810,
357,
33,
8068,
8,
410,
18,
532,
376,
19385,
30339,
47,
11417,
198,
198,
7085,
5176,
284,
6047,
440,
6,
26538,
1377,
29038,
705,
265,
6,
12416,
75,
... | 3.4 | 655 |
from pytest import mark
@mark.parametrize('hostname', ['google.com', 'cbaccesscontrol.xdl.dk'])
@mark.parametrize('hostname', ['google.com', 'cbaccesscontrol.xdl.dk'])
| [
6738,
12972,
9288,
1330,
1317,
628,
198,
31,
4102,
13,
17143,
316,
380,
2736,
10786,
4774,
3672,
3256,
37250,
13297,
13,
785,
3256,
705,
21101,
15526,
13716,
13,
24954,
75,
13,
34388,
6,
12962,
628,
198,
31,
4102,
13,
17143,
316,
380,... | 2.6875 | 64 |
#!/usr/bin/env python
import sys
import matplotlib as mpl
if len(sys.argv) > 1:
mpl.use('Agg')
from pylab import *
from diData import loadEVS, persons, CN
from scipy import stats
from collections import defaultdict
EVS = loadEVS(['Small scale de novo in SSC'])
CNT = defaultdict(lambda : {pid:0 for pid,pd in persons.items() if pd.coll == 'SSC'})
for e in EVS:
parent = e.atts[CN('from parent')]
if not parent: continue
if e.location.startswith('chrX'): continue
tp = "sub" if e.vtype == "sub" else "indel"
for pid in e.pids:
CNT[tp,parent][pid] += 1
clf()
subplot(2,2,1)
ylim([0,50])
p('sub','dad')
ylabel('power adjusted number of\nphased dn substitutions')
title('Father')
subplot(2,2,2)
ylim([0,50])
p('sub','mom')
title('Mother')
subplot(2,2,3)
ylim([0,12])
p('indel','dad')
ylabel('power adjusted number of\nphased dn indels')
xlabel("father's age at birth")
subplot(2,2,4)
ylim([0,10])
p('indel','mom')
xlabel("mother's age at birth")
gcf().set_size_inches(6.5,6.5)
tight_layout()
if len(sys.argv) > 1:
gcf().savefig(sys.argv[1])
else:
show()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
11748,
25064,
198,
11748,
2603,
29487,
8019,
355,
285,
489,
198,
361,
18896,
7,
17597,
13,
853,
85,
8,
1875,
352,
25,
198,
220,
220,
220,
285,
489,
13,
1904,
10786,
46384,
11537... | 2.324211 | 475 |
"""
Django settings for config project.
Generated by 'django-admin startproject' using Django 3.0.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
from pathlib import Path
from django.contrib.messages import constants as messages
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
# Dotenv support
# Django-environ support
import environ
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve(strict=True).parent.parent
env = environ.Env(
# set casting, default value
SECRET_KEY=(str, "this_is_just_a_temporary_secret_key"),
DEBUG=(bool, True),
ALLOWED_HOSTS=(list, ["127.0.0.1"]),
SENTRY_ENVIRONMENT=(str, "development"),
SENTRY_DSN=(str, ""),
EMAIL_HOST=(str, ""),
EMAIL_PORT=(str, ""),
EMAIL_HOST_USER=(str, ""),
EMAIL_HOST_PASSWORD=(str, ""),
EMAIL_USE_TLS=(bool, True),
DEFAULT_FROM_EMAIL=(str, ""),
REDIS_HOST=(str, "localhost"),
REDIS_PORT=(int, 6379),
REDIS_DB=(int, 0),
REDIS_DEFAULT_TIMEOUT=(int, 3600),
REDIS_PASSWORD=(str, ""),
ADMIN_URL=(str, "admin"),
USE_S3=(bool, False),
AWS_ACCESS_KEY_ID=(str, ""),
AWS_SECRET_ACCESS_KEY=(str, ""),
AWS_STORAGE_BUCKET_NAME=(str, ""),
AWS_S3_CUSTOM_DOMAIN=(str, ""),
DB_ENGINE=(str, "django.db.backends.sqlite3"),
DB_NAME=(str, BASE_DIR / "db.sqlite3"),
DB_USER=(str, ""),
DB_PASSWORD=(str, ""),
DB_HOST=(str, ""),
DB_PORT=(str, ""),
)
sentry_sdk.init(
dsn=env("SENTRY_DSN"),
integrations=[DjangoIntegration()],
traces_sample_rate=1.0,
environment=env("SENTRY_ENVIRONMENT"),
)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env("SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env("DEBUG")
ALLOWED_HOSTS = env("ALLOWED_HOSTS")
ADMIN_URL = env("ADMIN_URL")
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sites", # Required and added for django-allauth
"allauth", # Required and added for django-allauth
"allauth.account", # Required and added for django-allauth
"crispy_forms", # Required and added for django-crispy-forms
"django_rq", # Required and added for Django-RQ
"storages", # Required and added for Django-Storages
"checks.apps.ChecksConfig",
"website.apps.WebsiteConfig",
]
if DEBUG:
INSTALLED_APPS += [
"debug_toolbar", # Required and added for django-debug-toolbar
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
if DEBUG:
MIDDLEWARE += [
"debug_toolbar.middleware.DebugToolbarMiddleware", # noqa Required and added for django-debug-toolbar
]
ROOT_URLCONF = "config.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [BASE_DIR / "templates"],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "config.wsgi.application"
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": env("DB_ENGINE"),
"NAME": env("DB_NAME"),
"USER": env("DB_USER"),
"PASSWORD": env("DB_PASSWORD"),
"HOST": env("DB_HOST"),
"PORT": env("DB_PORT"),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # noqa
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "Pacific/Auckland"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
USE_S3 = env("USE_S3")
if USE_S3:
STATICFILES_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY")
AWS_STORAGE_BUCKET_NAME = env("AWS_STORAGE_BUCKET_NAME")
AWS_S3_OBJECT_PARAMETERS = {"ACL": "public-read", "CacheControl": "max-age=86400"}
if env("AWS_S3_CUSTOM_DOMAIN"):
AWS_S3_CUSTOM_DOMAIN = env("AWS_S3_CUSTOM_DOMAIN")
else:
AWS_S3_CUSTOM_DOMAIN = f"s3.amazonaws.com/{AWS_STORAGE_BUCKET_NAME}"
AWS_LOCATION = ""
STATIC_URL = f"https://{AWS_S3_CUSTOM_DOMAIN}/{AWS_LOCATION}/"
else:
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
STATICFILES_DIRS = [
BASE_DIR / "static",
]
# django-allauth configuration
SITE_ID = 1
LOGIN_REDIRECT_URL = "/"
ACCOUNT_LOGOUT_REDIRECT_URL = "/"
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
"django.contrib.auth.backends.ModelBackend",
# `allauth` specific authentication methods, such as login by e-mail
"allauth.account.auth_backends.AuthenticationBackend",
)
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_UNIQUE_EMAIL = True
ACCOUNT_USERNAME_REQUIRED = False
# Email configuration
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
EMAIL_HOST = env("EMAIL_HOST")
EMAIL_PORT = env("EMAIL_PORT")
EMAIL_HOST_USER = env("EMAIL_HOST_USER")
EMAIL_HOST_PASSWORD = env("EMAIL_HOST_PASSWORD")
EMAIL_USE_TLS = env("EMAIL_USE_TLS")
DEFAULT_FROM_EMAIL = env("DEFAULT_FROM_EMAIL")
# django-debug-toolbar
INTERNAL_IPS = ["127.0.0.1"]
# django-crispy-forms configuration
CRISPY_TEMPLATE_PACK = "bootstrap4"
# The following constants let us use Bootstrap alerts with messages
MESSAGE_TAGS = {
messages.DEBUG: "alert-info",
messages.INFO: "alert-info",
messages.SUCCESS: "alert-success",
messages.WARNING: "alert-warning",
messages.ERROR: "alert-danger",
}
# Media
MEDIA_ROOT = "media"
MEDIA_URL = "/media/"
# Django-RQ Configuration
RQ_SHOW_ADMIN_LINK = True
RQ_QUEUES = {
"checks": {
"HOST": env("REDIS_HOST"),
"PORT": env("REDIS_PORT"),
"DB": env("REDIS_DB"),
"DEFAULT_TIMEOUT": env("REDIS_DEFAULT_TIMEOUT"),
"PASSWORD": env("REDIS_PASSWORD"),
},
}
# Logging
if DEBUG:
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"rq_console": {
"format": "%(asctime)s %(message)s",
"datefmt": "%H:%M:%S",
},
"rich": {"datefmt": "[%X]"},
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "rich.logging.RichHandler",
"formatter": "rich",
},
"rq_console": {
"level": "DEBUG",
"class": "rich.logging.RichHandler",
"formatter": "rich",
},
},
"loggers": {
"django": {"handlers": ["console"]},
"rq.worker": {"handlers": ["rq_console"], "level": "DEBUG"},
},
"root": {
"handlers": ["console"],
"level": "DEBUG",
},
}
| [
37811,
198,
35,
73,
14208,
6460,
329,
4566,
1628,
13,
198,
198,
8645,
515,
416,
705,
28241,
14208,
12,
28482,
923,
16302,
6,
1262,
37770,
513,
13,
15,
13,
21,
13,
198,
198,
1890,
517,
1321,
319,
428,
2393,
11,
766,
198,
5450,
1378... | 2.232752 | 3,841 |
import pytest
from assertpy import assert_that
import year2017.day01.reader as reader
import year2017.day01.solver as solver
@pytest.mark.parametrize('puzzle, expected',
[([1, 1, 2, 2], 3),
([1, 1, 1, 1], 4),
([1, 2, 3, 4], 0),
([9, 1, 2, 1, 2, 1, 2, 9], 9),
])
@pytest.mark.solution
@pytest.mark.parametrize('puzzle, expected',
[([1, 2, 1, 2], 6),
([1, 2, 2, 1], 0),
([1, 2, 3, 4, 2, 5], 4),
([1, 2, 3, 1, 2, 3], 12),
([1, 2, 1, 3, 1, 4, 1, 5], 4),
])
@pytest.mark.solution
| [
11748,
12972,
9288,
198,
6738,
6818,
9078,
1330,
6818,
62,
5562,
198,
198,
11748,
614,
5539,
13,
820,
486,
13,
46862,
355,
9173,
198,
11748,
614,
5539,
13,
820,
486,
13,
82,
14375,
355,
1540,
332,
628,
198,
31,
9078,
9288,
13,
4102,... | 1.52381 | 504 |
# -*- coding: utf-8 -*-
"""
Test Homeshick object
"""
import logging
import unittest
import os
import tempfile
from homeslick import context
from homeslick.homeshick import Homeshick
DEV_LOGGER = logging.getLogger(__name__)
class TestHomeshick(unittest.TestCase):
'''
Test Castle object
'''
TEST_NAME = 'temp'
TEST_GIT_URI = 'https://github.com/cscutcher/safe_home.git'
def test_symlink_twice(self):
'''
Test symlink twice doesn't cause conflict
'''
self.castle.init()
self.castle.symlink()
self.castle.symlink()
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
14402,
34698,
71,
624,
2134,
198,
37811,
198,
11748,
18931,
198,
11748,
555,
715,
395,
198,
11748,
28686,
198,
11748,
20218,
7753,
198,
6738,
5682,
75,
624,
... | 2.346614 | 251 |
from rlgym.utils.reward_functions import RewardFunction
from rlgym.utils import math
from TouchArenaBoundariesTerminalCondition import CollidedTerminalCondition
from rlgym.utils.gamestates import GameState, PlayerData
import numpy as np
import timeit
import copy
| [
6738,
374,
75,
1360,
76,
13,
26791,
13,
260,
904,
62,
12543,
2733,
1330,
32307,
22203,
201,
198,
6738,
374,
75,
1360,
76,
13,
26791,
1330,
10688,
201,
198,
6738,
15957,
43199,
64,
49646,
3166,
44798,
282,
48362,
1330,
7778,
1384,
4479... | 3.277108 | 83 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Dusan Klinec, ph4r05, 2018
from monero_glue.messages import (
Failure,
FailureType,
MoneroExportedKeyImage,
MoneroKeyImageExportInitAck,
MoneroKeyImageExportInitRequest,
MoneroKeyImageSyncFinalAck,
MoneroKeyImageSyncStepAck,
MoneroKeyImageSyncStepRequest,
)
from monero_glue.trezor import wrapper as twrap
from monero_glue.xmr import common, crypto, key_image, monero
from monero_glue.xmr.enc import chacha_poly
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
6434,
25,
360,
385,
272,
509,
1370,
66,
11,
872,
19,
81,
2713,
11,
2864,
628,
198,
6738,
937,
3529,
62,
4743,
... | 2.566327 | 196 |
from typing import Dict, Tuple, List
from rial.ir.RIALVariable import RIALVariable
from rial.ir.modifier.AccessModifier import AccessModifier
| [
6738,
19720,
1330,
360,
713,
11,
309,
29291,
11,
7343,
198,
198,
6738,
374,
498,
13,
343,
13,
7112,
1847,
43015,
1330,
371,
12576,
43015,
198,
6738,
374,
498,
13,
343,
13,
4666,
7483,
13,
15457,
5841,
7483,
1330,
8798,
5841,
7483,
6... | 3.348837 | 43 |
import subprocess
import json
import settings
import os
| [
11748,
850,
14681,
198,
11748,
33918,
198,
11748,
6460,
198,
11748,
28686,
628
] | 4.384615 | 13 |
import torch
from torch import nn
import torch.nn.functional as F
import numpy as np
import torch_geometric.nn as gnn
from torch_geometric.data import Data, DataLoader, Batch
from torch_scatter import scatter
from torch_geometric.nn import GENConv, DeepGCNLayer, global_max_pool, global_add_pool, EdgeConv
from arena import Wrapper
import pickle, os
| [
11748,
28034,
198,
6738,
28034,
1330,
299,
77,
198,
11748,
28034,
13,
20471,
13,
45124,
355,
376,
198,
11748,
299,
32152,
355,
45941,
198,
198,
11748,
28034,
62,
469,
16996,
13,
20471,
355,
19967,
77,
198,
6738,
28034,
62,
469,
16996,
... | 3.320755 | 106 |
# Script by Thatrandomlurker/RhythmProtogen
from inc_noesis import *
import noesis
import rapi
import struct # for building the buffers
import os # check if file exists
int_Endian = 0
# Check that this is a supported file. won't immediately break if the model is a different format than expected, but will log a notice.
| [
2,
12327,
416,
1320,
25120,
75,
333,
6122,
14,
38576,
34853,
19703,
6644,
201,
198,
201,
198,
6738,
753,
62,
77,
3028,
271,
1330,
1635,
201,
198,
11748,
645,
9339,
201,
198,
11748,
4095,
72,
201,
198,
11748,
2878,
220,
1303,
329,
26... | 3.269231 | 104 |
from django import test
from django_cradmin.templatetags import cradmin_tags
| [
6738,
42625,
14208,
1330,
1332,
198,
198,
6738,
42625,
14208,
62,
6098,
28482,
13,
11498,
489,
265,
316,
3775,
1330,
1067,
28482,
62,
31499,
628
] | 3.16 | 25 |
import numpy
import sys
import png
# TODO: figure out argparse
# CELL SPEC: [[0, 0, 'G', sys.maxsize, sys.maxsize, sys.maxsize, sys.maxsize]]
# ^ ^ ^ ^ ^ ^ ^
# address | | | | | |
# generation | | | | |
# color | | | |
# east neighbor diff | | |
# north neighbor diff | |
# west neighbor diff |
# south neighbor diff
def qlao(code, tx, ty, direction) -> int:
"""
This is the circled plus operator from the paper. QLAO stands for quad location addition operator
:param code: address of cell
:param tx: constant mask for filtering x bits
:param ty: constant mask for filtering y bits
:param direction: direction vector in interleaved binary format
:return: the calculated neighbor address
"""
return (((code | ty) + (direction & tx)) & tx) | (((code | tx) + (direction & ty)) & ty)
| [
11748,
299,
32152,
198,
11748,
25064,
198,
11748,
279,
782,
198,
198,
2,
16926,
46,
25,
3785,
503,
1822,
29572,
198,
198,
2,
18671,
3069,
28196,
25,
16410,
15,
11,
657,
11,
705,
38,
3256,
25064,
13,
9806,
7857,
11,
25064,
13,
9806,
... | 1.828614 | 671 |
import sys
testcase = int(sys.stdin.readline().rstrip())
data = [ map(int, sys.stdin.readline().split()) for i in range(testcase)]
for array in data:
_arr = list(array)
_arr.sort()
print(_arr[-3]) | [
11748,
25064,
198,
198,
9288,
7442,
796,
493,
7,
17597,
13,
19282,
259,
13,
961,
1370,
22446,
81,
36311,
28955,
198,
198,
7890,
796,
685,
3975,
7,
600,
11,
25064,
13,
19282,
259,
13,
961,
1370,
22446,
35312,
28955,
329,
1312,
287,
2... | 2.482353 | 85 |
# Generated by Django 3.0 on 2019-12-25 08:17
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
513,
13,
15,
319,
13130,
12,
1065,
12,
1495,
8487,
25,
1558,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.966667 | 30 |
from django.shortcuts import render
from django.contrib.auth import authenticate,login
from django.http import HttpResponseRedirect
from django.urls import reverse
from .forms import SignUpForm
#이부분은 login과는 다르게 장고폼으로 구현함.
#장고폼을 이용해면 request를 통해 받아오는 작업을 따로 작성하지 않아도
#from에서 자동으로 처리해줌.
| [
6738,
42625,
14208,
13,
19509,
23779,
1330,
8543,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
1330,
8323,
5344,
11,
38235,
198,
6738,
42625,
14208,
13,
4023,
1330,
367,
29281,
31077,
7738,
1060,
198,
6738,
42625,
14208,
13,
6371,
... | 1.310502 | 219 |
# Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for mnist_benchmark."""
import os
import unittest
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_benchmarks import mnist_benchmark
if __name__ == '__main__':
unittest.main()
| [
2,
15069,
1853,
2448,
69,
20827,
44199,
4102,
263,
46665,
13,
1439,
2489,
10395,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
... | 3.593886 | 229 |
# contagem
n = int(input('n: '))
for i in range(0, n):
print(i)
# contagem do usuário
li = int(input('limite inferior: '))
lf = int(input('limite superior: '))
pa = int(input('passo: '))
for i in range(li, lf, pa):
print(i)
| [
2,
34335,
368,
198,
77,
796,
493,
7,
15414,
10786,
77,
25,
705,
4008,
198,
198,
1640,
1312,
287,
2837,
7,
15,
11,
299,
2599,
198,
220,
220,
220,
3601,
7,
72,
8,
198,
198,
2,
34335,
368,
466,
514,
84,
6557,
27250,
198,
4528,
79... | 2.259615 | 104 |
import os
import time
from st2common.runners.base_action import Action
from st2client.models.action_alias import ActionAliasMatch
from st2client.models.aliasexecution import ActionAliasExecution
from st2client.commands.action import (LIVEACTION_STATUS_REQUESTED,
LIVEACTION_STATUS_SCHEDULED,
LIVEACTION_STATUS_RUNNING,
LIVEACTION_STATUS_CANCELING)
from st2client.client import Client
| [
11748,
28686,
198,
11748,
640,
198,
198,
6738,
336,
17,
11321,
13,
36740,
13,
8692,
62,
2673,
1330,
7561,
198,
6738,
336,
17,
16366,
13,
27530,
13,
2673,
62,
26011,
1330,
7561,
40489,
23850,
198,
6738,
336,
17,
16366,
13,
27530,
13,
... | 2.140426 | 235 |
"""Provide access to selected git functions."""
import subprocess
GIT_PATH = '/usr/bin/git'
"""Where to find the "git" command?"""
def get_revision() -> str:
"""return the git revision of current directory as a human-readable string.
:raises OSError: Couldn't get revision from git.
"""
try:
job = subprocess.run([GIT_PATH, 'log', '-1', '--pretty=oneline'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
timeout=1)
except subprocess.TimeoutExpired as err:
raise OSError("Calling git took too long.") from err
except FileNotFoundError as err:
raise OSError("Git not found at '{}'.".format(GIT_PATH)) from err
try:
ret_string = job.stdout.decode().strip()
if not ret_string:
raise OSError("Git returned an empty string. STDERR is {}".format(job.stderr))
return ret_string
except AttributeError as err:
raise OSError("Git didn't return a bytestring, but {} instead.".format(job.stdout)) from err
| [
37811,
15946,
485,
1895,
284,
6163,
17606,
5499,
526,
15931,
198,
198,
11748,
850,
14681,
198,
198,
38,
2043,
62,
34219,
796,
31051,
14629,
14,
8800,
14,
18300,
6,
198,
37811,
8496,
284,
1064,
262,
366,
18300,
1,
3141,
1701,
15931,
19... | 2.386005 | 443 |
#!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2015,2016 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the add/del/show continent command."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
continents = {'af': ('Africa', 'ln'),
'as': ('Asia', 'hk'),
'au': ('Australia', 'hk'),
'eu': ('Europe', 'ln'),
'na': ('North America', 'ny'),
'sa': ('South America', 'ny')}
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestContinent)
unittest.TextTestRunner(verbosity=2).run(suite)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
269,
9078,
12,
521,
298,
12,
5715,
25,
604,
26,
33793,
12,
8658,
82,
12,
14171,
25,
18038,
532,
9,
12,
198,
2,
409,
25,
900,
4292,
8658,
2705,
8658,
11338,
28,
1... | 2.77453 | 479 |
import pytest
from pytest import param
import ibis
import ibis.expr.datatypes as dt
MYSQL_TYPES = [
("tinyint", dt.int8),
("int1", dt.int8),
("boolean", dt.int8),
("smallint", dt.int16),
("int2", dt.int16),
("mediumint", dt.int32),
("int3", dt.int32),
("int", dt.int32),
("int4", dt.int32),
("integer", dt.int32),
("bigint", dt.int64),
("decimal", dt.Decimal(10, 0)),
("decimal(5, 2)", dt.Decimal(5, 2)),
("dec", dt.Decimal(10, 0)),
("numeric", dt.Decimal(10, 0)),
("fixed", dt.Decimal(10, 0)),
("float", dt.float32),
("double", dt.float64),
("timestamp", dt.Timestamp("UTC")),
("date", dt.date),
("time", dt.time),
("datetime", dt.timestamp),
("year", dt.int16),
("char(32)", dt.string),
("char byte", dt.binary),
("varchar(42)", dt.string),
("mediumtext", dt.string),
("text", dt.string),
("binary(42)", dt.binary),
("varbinary(42)", dt.binary),
("bit(1)", dt.int8),
("bit(9)", dt.int16),
("bit(17)", dt.int32),
("bit(33)", dt.int64),
# mariadb doesn't have a distinct json type
("json", dt.string),
("enum('small', 'medium', 'large')", dt.string),
("inet6", dt.string),
("set('a', 'b', 'c', 'd')", dt.Set(dt.string)),
("mediumblob", dt.binary),
("blob", dt.binary),
("uuid", dt.string),
]
@pytest.mark.parametrize(
("mysql_type", "expected_type"),
[
param(mysql_type, ibis_type, id=mysql_type)
for mysql_type, ibis_type in MYSQL_TYPES
],
)
| [
11748,
12972,
9288,
198,
6738,
12972,
9288,
1330,
5772,
198,
198,
11748,
24283,
271,
198,
11748,
24283,
271,
13,
31937,
13,
19608,
265,
9497,
355,
288,
83,
198,
198,
44,
16309,
9711,
62,
9936,
47,
1546,
796,
685,
198,
220,
220,
220,
... | 2.03562 | 758 |
from service.models import *
from service.engine import *
import datetime
import random
import markdown
c = Course.objects.create(title="Quantum Mechanics", description="Understanding the foundations of modern physics.")
content1 = markdown.markdown("# Lesson 1 \n\n Let's learn using a **simple derivation** of the equation. \n\n [](http://www.youtube.com/watch?v=IsX5iUKNT2k)", safe_mode=True)
content2 = markdown.markdown("# Lesson 1 \n\n Let's learn through understanding the **concepts** of the equation. \n\n[](http://www.youtube.com/watch?v=aU_bd7fku90)", safe_mode=True)
test_content = markdown.markdown("Why don't we experience the wave nature of matter on our everyday lives? \n\n \n\n a) The particles are connected in a complex system which suppresses the effect. \n\n b) It is not yet clear to science. \n\n c) The wavelength is to small for our scales.", safe_mode=True)
t = Test.objects.create(course=c,content=test_content, correct_answer="c")
v1 = Variation.objects.create(course=c,description="Derivation")
l1 = Lesson.objects.create(variation=v1, index=0, content=content1)
v2 = Variation.objects.create(course=c,description="Conceptual")
l2 = Lesson.objects.create(variation=v2, index=0, content=content2)
variations = [v1, v2]
genders = ["male", "female"]
language_codes = ["en", "iw", "de"]
names = ["Lagrange", "Dirac", "Tesla", "Bohr", "Feynman",
"Planck", "Hopper", "Born", "Heisenberg", "Einstein",
"Zwicky", "Curie", "Euler", "Hamilton", "Cantor",
"Lie", "Newton", "Kepler", "Lorentz", "Halmholz"]
for i in range(20):
name = "SE_student" + i.__str__()
birthday = "199" + (i%10).__str__() + "-01-01"
gender = genders[i%2]
language_code = language_codes[i%3]
s = Student.objects.create(name=name, birthday=birthday, gender=gender, originLanguageCode=language_code)
variation = variations[(i//2) % 2]
ch = Choice.objects.create(variation=variation,
student=s,
startingTime="2016-01-01")
finished_course = True
if (random.randrange(100) < 25):
finished_course = False
r = Result.objects.create(test=t,
choice=ch,
score=30+random.randrange(30),
finished_course=finished_course) | [
6738,
2139,
13,
27530,
1330,
1635,
198,
6738,
2139,
13,
18392,
1330,
1635,
198,
11748,
4818,
8079,
198,
11748,
4738,
198,
11748,
1317,
2902,
198,
198,
66,
796,
20537,
13,
48205,
13,
17953,
7,
7839,
2625,
24915,
388,
47570,
1600,
6764,
... | 2.5359 | 961 |
"""V2E N-Caltech101 dataset maker.
Use V2E to simulate the entire dataset.
For Ideal range:
- threshold: 0.2-0.5
- sigma: 0.03-0.05
- cutoff_hz: 0
- leak_rate_hz: 0
- shot_noise_rate_hz: 0
For bright light range:
- threshold: 0.2-0.5
- sigma: 0.03-0.05
- cutoff_hz: 80-120
- leak_rate_hz: 0.1
- shot_noise_rate_hz: 0.1-5 Hz
For low light range:
- threshold: 0.2-0.5
- sigma: 0.03-0.05
- cutoff_hz: 20-60
- leak_rate_hz: 0.1
- shot_noise_rate_hz: 10-30 Hz
Author: Yuhuang Hu
Email : yuhuang.hu@ini.uzh.ch
"""
import argparse
import os
import glob
import subprocess
import random
import json
parser = argparse.ArgumentParser()
parser.add_argument("--data_root", type=str)
parser.add_argument("--output_root", type=str)
parser.add_argument("--dataset_config", type=str,
help="'ideal', 'bright', 'dark'")
args = parser.parse_args()
# set configs
if args.dataset_config == "ideal":
thre_low, thre_high = 0.05, 0.5
sigma_low, sigma_high = 0, 0
cutoff_hz_low, cutoff_hz_high = 0, 0
leak_rate_hz_low, leak_rate_hz_high = 0, 0
shot_noise_rate_hz_low, shot_noise_rate_hz_high = 0, 0
elif args.dataset_config == "bright":
thre_low, thre_high = 0.05, 0.5
sigma_low, sigma_high = 0.03, 0.05
cutoff_hz_low, cutoff_hz_high = 200, 200
leak_rate_hz_low, leak_rate_hz_high = 0.1, 0.5
shot_noise_rate_hz_low, shot_noise_rate_hz_high = 0, 0
elif args.dataset_config == "dark":
thre_low, thre_high = 0.05, 0.5
sigma_low, sigma_high = 0.03, 0.05
cutoff_hz_low, cutoff_hz_high = 10, 100
leak_rate_hz_low, leak_rate_hz_high = 0, 0
shot_noise_rate_hz_low, shot_noise_rate_hz_high = 1, 10
# get root folder list
valid_folders = sorted(
glob.glob(
os.path.join(args.data_root, "*", "image_*")))
valid_folders = [x for x in valid_folders if ".npz" not in x]
params_collector = {}
for folder in valid_folders:
out_filename = os.path.basename(folder)+".h5"
out_folder = os.path.dirname(folder)
out_folder = out_folder.replace(args.data_root, args.output_root)
if not os.path.exists(out_folder):
os.makedirs(out_folder)
folder = os.path.join(folder, "images")
# configure paramters
thres = random.uniform(thre_low, thre_high)
# sigma should be about 15%~25% range as low and high
# threshold higher than 0.2: 0.03-0.05
# threshold lower than 0.2: 15%~25%
# sigma = random.uniform(sigma_low, sigma_high)
sigma = random.uniform(
min(thres*0.15, sigma_low), min(thres*0.25, sigma_high)) \
if args.dataset_config != "ideal" else 0
leak_rate_hz = random.uniform(leak_rate_hz_low, leak_rate_hz_high)
shot_noise_rate_hz = random.uniform(
shot_noise_rate_hz_low, shot_noise_rate_hz_high)
if args.dataset_config == "dark":
# cutoff hz follows shot noise config
cutoff_hz = shot_noise_rate_hz*10
else:
cutoff_hz = random.uniform(cutoff_hz_low, cutoff_hz_high)
params_collector[os.path.join(out_folder, out_filename)] = {
"thres": thres,
"sigma": sigma,
"cutoff_hz": cutoff_hz,
"leak_rate_hz": leak_rate_hz,
"shot_noise_rate_hz": shot_noise_rate_hz}
# dump bias configs all the time
with open(os.path.join(args.output_root,
"dvs_params_settings.json"), "w") as f:
json.dump(params_collector, f, indent=4)
v2e_command = [
"v2e.py",
"-i", folder,
"-o", out_folder,
"--overwrite",
"--unique_output_folder", "false",
"--no_preview",
"--skip_video_output",
"--disable_slomo",
"--pos_thres", "{}".format(thres),
"--neg_thres", "{}".format(thres),
"--sigma_thres", "{}".format(sigma),
"--cutoff_hz", "{}".format(cutoff_hz),
"--leak_rate_hz", "{}".format(leak_rate_hz),
"--shot_noise_rate_hz", "{}".format(shot_noise_rate_hz),
"--input_frame_rate", "30",
"--input_slowmotion_factor", "17.866666708",
"--dvs_h5", out_filename,
"--dvs_aedat2", "None",
"--dvs_text", "None",
"--dvs_exposure", "duration", "0.001",
"--auto_timestamp_resolution", "false"]
subprocess.run(v2e_command)
| [
37811,
53,
17,
36,
399,
12,
9771,
13670,
8784,
27039,
16009,
13,
198,
198,
11041,
569,
17,
36,
284,
29308,
262,
2104,
27039,
13,
198,
198,
1890,
41765,
2837,
25,
198,
220,
220,
220,
532,
11387,
25,
657,
13,
17,
12,
15,
13,
20,
1... | 2.110783 | 2,031 |
"""
This is the Python HW 4
for PSTAT 160B
Prof Ichiba
TA: Mousavi
Section: W 1:00 - 1:50pm
"""
# Import libraries
from __future__ import division
import random
import math
import matplotlib.pyplot as plt
import numpy as np
# In this exercise we shall simulate the two-dimensional Poisson process
# (Poisson Point Process in the plane and its subsets such as a square and a disc).
Part_A()
Part_B()
Part_C()
Part_D() | [
37811,
198,
1212,
318,
262,
11361,
44884,
604,
198,
1640,
28220,
1404,
13454,
33,
198,
15404,
26364,
23718,
220,
198,
5603,
25,
42436,
15820,
198,
16375,
25,
370,
352,
25,
405,
532,
352,
25,
1120,
4426,
198,
37811,
198,
198,
2,
17267,... | 3.172932 | 133 |
from __future__ import print_function
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
import numpy as np
from skdecide.builders.discrete_optimization.vrp.vrp_model import length, BasicCustomer, VrpProblem, \
VrpProblem2D, VrpSolution, Customer2D
from skdecide.builders.discrete_optimization.vrp.vrp_toolbox import compute_length_matrix, build_graph
from skdecide.builders.discrete_optimization.generic_tools.do_solver import SolverDO, ResultStorage
from skdecide.builders.discrete_optimization.generic_tools.do_problem import ParamsObjectiveFunction,\
build_aggreg_function_and_params_objective
from enum import Enum
def create_data_model():
"""Stores the data for the problem."""
data = {}
data['distance_matrix'] = [
[
0, 548, 776, 696, 582, 274, 502, 194, 308, 194, 536, 502, 388, 354,
468, 776, 662
],
[
548, 0, 684, 308, 194, 502, 730, 354, 696, 742, 1084, 594, 480, 674,
1016, 868, 1210
],
[
776, 684, 0, 992, 878, 502, 274, 810, 468, 742, 400, 1278, 1164,
1130, 788, 1552, 754
],
[
696, 308, 992, 0, 114, 650, 878, 502, 844, 890, 1232, 514, 628, 822,
1164, 560, 1358
],
[
582, 194, 878, 114, 0, 536, 764, 388, 730, 776, 1118, 400, 514, 708,
1050, 674, 1244
],
[
274, 502, 502, 650, 536, 0, 228, 308, 194, 240, 582, 776, 662, 628,
514, 1050, 708
],
[
502, 730, 274, 878, 764, 228, 0, 536, 194, 468, 354, 1004, 890, 856,
514, 1278, 480
],
[
194, 354, 810, 502, 388, 308, 536, 0, 342, 388, 730, 468, 354, 320,
662, 742, 856
],
[
308, 696, 468, 844, 730, 194, 194, 342, 0, 274, 388, 810, 696, 662,
320, 1084, 514
],
[
194, 742, 742, 890, 776, 240, 468, 388, 274, 0, 342, 536, 422, 388,
274, 810, 468
],
[
536, 1084, 400, 1232, 1118, 582, 354, 730, 388, 342, 0, 878, 764,
730, 388, 1152, 354
],
[
502, 594, 1278, 514, 400, 776, 1004, 468, 810, 536, 878, 0, 114,
308, 650, 274, 844
],
[
388, 480, 1164, 628, 514, 662, 890, 354, 696, 422, 764, 114, 0, 194,
536, 388, 730
],
[
354, 674, 1130, 822, 708, 628, 856, 320, 662, 388, 730, 308, 194, 0,
342, 422, 536
],
[
468, 1016, 788, 1164, 1050, 514, 514, 662, 320, 274, 388, 650, 536,
342, 0, 764, 194
],
[
776, 868, 1552, 560, 674, 1050, 1278, 742, 1084, 810, 1152, 274,
388, 422, 764, 0, 798
],
[
662, 1210, 754, 1358, 1244, 708, 480, 856, 514, 468, 354, 844, 730,
536, 194, 798, 0
],
]
data['demands'] = [0, 1, 1, 2, 4, 2, 4, 8, 8, 1, 2, 1, 2, 4, 4, 8, 8]
data['vehicle_capacities'] = [15, 15, 15, 15]
data['num_vehicles'] = 4
data['depot'] = 0
return data
first_solution_map = {FirstSolutionStrategy.SAVINGS: routing_enums_pb2.FirstSolutionStrategy.SAVINGS,
FirstSolutionStrategy.PATH_MOST_CONSTRAINED_ARC: routing_enums_pb2.FirstSolutionStrategy.SAVINGS}
metaheuristic_map = {LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH:
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH,
LocalSearchMetaheuristic.SIMULATED_ANNEALING:
routing_enums_pb2.LocalSearchMetaheuristic.SIMULATED_ANNEALING}
| [
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
6738,
393,
31391,
13,
1102,
2536,
2913,
62,
82,
14375,
1330,
28166,
62,
268,
5700,
62,
40842,
17,
198,
6738,
393,
31391,
13,
1102,
2536,
2913,
62,
82,
14375,
1330,
12972,
37150,
1315... | 1.818936 | 2,049 |
from azure.cognitiveservices.vision.face import FaceClient
from msrest.authentication import CognitiveServicesCredentials
from azure.cognitiveservices.vision.face.models import TrainingStatusType, Person
import os
import uuid
import glob
import time
import sys
import json
#################################################
# #
# Collect the Config #
# #
#################################################
with open('config.json') as config_file:
data = json.load(config_file)
KEY = data['key']
ENDPOINT = data['endpoint']
# Create an authenticated FaceClient.
face_client = FaceClient(ENDPOINT, CognitiveServicesCredentials(KEY))
# Used in the Person Group Operations, Snapshot Operations, and Delete Person Group examples.
# You can call list_person_groups to print a list of preexisting PersonGroups.
# SOURCE_PERSON_GROUP_ID should be all lowercase and alphanumeric. For example, 'mygroupname' (dashes are OK).
PERSON_GROUP_ID = 'petecodes-person-group'
# Used for the Snapshot and Delete Person Group examples.
TARGET_PERSON_GROUP_ID = str(uuid.uuid4()) # assign a random ID (or name it anything)
'''
Delete the Person Group
'''
# Delete the main person group.
try:
face_client.person_group.delete(person_group_id=PERSON_GROUP_ID)
print("Deleted the person group {} from the source location.".format(PERSON_GROUP_ID))
print()
except:
print()
'''
Create the PersonGroup
'''
# Create empty Person Group. Person Group ID must be lower case, alphanumeric, and/or with '-', '_'.
print('Person group:', PERSON_GROUP_ID)
face_client.person_group.create(person_group_id=PERSON_GROUP_ID, name=PERSON_GROUP_ID)
# Define woman friend
woman = face_client.person_group_person.create(PERSON_GROUP_ID, "Woman")
# Define man friend
man = face_client.person_group_person.create(PERSON_GROUP_ID, "Man")
# Define child friend
child = face_client.person_group_person.create(PERSON_GROUP_ID, "Child")
'''
Detect faces and register to correct person
'''
directory_name = "set1"
# Find all jpeg images of friends in working directory
woman_images = [file for file in glob.glob1(directory_name, '*.jpg') if file.startswith("woman")]
man_images = [file for file in glob.glob1(directory_name, '*.jpg') if file.startswith("man")]
child_images = [file for file in glob.glob1(directory_name, '*.jpg') if file.startswith("child")]
'''
Check to make sure we have some images
'''
if len(woman_images) == 0 and len(man_images) == 0 and len(child_images) == 0:
print("No images found...")
# Delete the main person group.
face_client.person_group.delete(person_group_id=PERSON_GROUP_ID)
print("Deleted the person group {} from the source location.".format(PERSON_GROUP_ID))
print()
exit()
# Add to a woman person
for image in woman_images:
print(image)
w = open(directory_name + "/" + image, 'r+b')
face_client.person_group_person.add_face_from_stream(PERSON_GROUP_ID, woman.person_id, w)
# Add to a man person
for image in man_images:
print(image)
m = open(directory_name + "/" + image, 'r+b')
face_client.person_group_person.add_face_from_stream(PERSON_GROUP_ID, man.person_id, m)
# Add to a child person
for image in child_images:
print(image)
ch = open(directory_name + "/" + image, 'r+b')
face_client.person_group_person.add_face_from_stream(PERSON_GROUP_ID, child.person_id, ch)
'''
Train PersonGroup
'''
print()
print('Training the person group...')
# Train the person group
face_client.person_group.train(PERSON_GROUP_ID)
while (True):
training_status = face_client.person_group.get_training_status(PERSON_GROUP_ID)
print("Training status: {}.".format(training_status.status))
print()
if (training_status.status is TrainingStatusType.succeeded):
break
elif (training_status.status is TrainingStatusType.failed):
sys.exit('Training the person group has failed.')
time.sleep(5)
'''
Identify a face against a defined PersonGroup
'''
# Group image for testing against
group_photo = 'test-image-person-group.jpg'
# Get test image
test_image_array = glob.glob1(directory_name, group_photo)
image = open(directory_name + "/" + test_image_array[0], 'r+b')
# Detect faces
face_ids = []
faces = face_client.face.detect_with_stream(image)
for face in faces:
face_ids.append(face.face_id)
# Identify faces
results = face_client.face.identify(face_ids, PERSON_GROUP_ID)
print('Identifying faces in {}'.format(os.path.basename(image.name)))
if not results:
print('No person identified in the person group for faces from {}.'.format(os.path.basename(image.name)))
for person in results:
print('Person for face ID {} is identified in {} with a confidence of {}.'.format(person.face_id, os.path.basename(image.name), person.candidates[0].confidence)) # Get topmost confidence score
# Base url for the Verify and Facelist/Large Facelist operations
IMAGE_BASE_URL = 'https://csdx.blob.core.windows.net/resources/Face/Images/'
# Create a list to hold the target photos of the same person
target_image_file_names = ['Family1-Dad1.jpg', 'Family1-Dad2.jpg']
# The source photos contain this person
source_image_file_name1 = 'Family1-Dad3.jpg'
source_image_file_name2 = 'Family1-Son1.jpg'
# Detect face(s) from source image 1, returns a list[DetectedFaces]
detected_faces1 = face_client.face.detect_with_url(IMAGE_BASE_URL + source_image_file_name1)
# Add the returned face's face ID
source_image1_id = detected_faces1[0].face_id
print('{} face(s) detected from image {}.'.format(len(detected_faces1), source_image_file_name1))
# Detect face(s) from source image 2, returns a list[DetectedFaces]
detected_faces2 = face_client.face.detect_with_url(IMAGE_BASE_URL + source_image_file_name2)
# Add the returned face's face ID
source_image2_id = detected_faces2[0].face_id
print('{} face(s) detected from image {}.'.format(len(detected_faces2), source_image_file_name2))
# List for the target face IDs (uuids)
detected_faces_ids = []
# Detect faces from target image url list, returns a list[DetectedFaces]
for image_file_name in target_image_file_names:
detected_faces = face_client.face.detect_with_url(IMAGE_BASE_URL + image_file_name)
# Add the returned face's face ID
detected_faces_ids.append(detected_faces[0].face_id)
print('{} face(s) detected from image {}.'.format(len(detected_faces), image_file_name))
# Verification example for faces of the same person. The higher the confidence, the more identical the faces in the images are.
# Since target faces are the same person, in this example, we can use the 1st ID in the detected_faces_ids list to compare.
verify_result_same = face_client.face.verify_face_to_face(source_image1_id, detected_faces_ids[0])
print('Faces from {} & {} are of the same person, with confidence: {}'
.format(source_image_file_name1, target_image_file_names[0], verify_result_same.confidence)
if verify_result_same.is_identical
else 'Faces from {} & {} are of a different person, with confidence: {}'
.format(source_image_file_name1, target_image_file_names[0], verify_result_same.confidence))
# Verification example for faces of different persons.
# Since target faces are same person, in this example, we can use the 1st ID in the detected_faces_ids list to compare.
verify_result_diff = face_client.face.verify_face_to_face(source_image2_id, detected_faces_ids[0])
print('Faces from {} & {} are of the same person, with confidence: {}'
.format(source_image_file_name2, target_image_file_names[0], verify_result_diff.confidence)
if verify_result_diff.is_identical
else 'Faces from {} & {} are of a different person, with confidence: {}'
.format(source_image_file_name2, target_image_file_names[0], verify_result_diff.confidence))
# Delete the main person group.
face_client.person_group.delete(person_group_id=PERSON_GROUP_ID)
print("Deleted the person group {} from the source location.".format(PERSON_GROUP_ID))
print() | [
6738,
35560,
495,
13,
66,
2360,
20288,
712,
1063,
13,
10178,
13,
2550,
1330,
15399,
11792,
198,
6738,
13845,
2118,
13,
41299,
3299,
1330,
38655,
31007,
34,
445,
14817,
198,
6738,
35560,
495,
13,
66,
2360,
20288,
712,
1063,
13,
10178,
... | 2.940789 | 2,736 |
import functools
import http
from typing import Set
from flask import abort
from flask_jwt_extended import get_jwt_claims
| [
11748,
1257,
310,
10141,
198,
11748,
2638,
198,
6738,
19720,
1330,
5345,
198,
198,
6738,
42903,
1330,
15614,
198,
6738,
42903,
62,
73,
46569,
62,
2302,
1631,
1330,
651,
62,
73,
46569,
62,
6604,
82,
628,
628,
628
] | 3.368421 | 38 |
import statistics
import os
from scipy.stats import skew, mode
"""
This script reads in a list of all words and generates a file for each word in
subdirectories of a target directory:
words
and then calculates all the statistics for each word and generates a file
storing the statistics calculated in the direcotry:
wordStats
This script will make the directory if needed.
"""
| [
11748,
7869,
198,
11748,
28686,
198,
6738,
629,
541,
88,
13,
34242,
1330,
43370,
11,
4235,
198,
198,
37811,
198,
1212,
4226,
9743,
287,
257,
1351,
286,
477,
2456,
290,
18616,
257,
2393,
329,
1123,
1573,
287,
198,
7266,
12942,
1749,
28... | 4.020833 | 96 |
'''
STS-{2012,2013,2014,2015,2016} (monolingual) and
STS-2017 (cross-lingual)
'''
import io
import numpy as np
# STS13 here does not contain the "SMT" subtask due to LICENSE issue
| [
7061,
6,
198,
2257,
50,
12,
90,
6999,
11,
6390,
11,
4967,
11,
4626,
11,
5304,
92,
357,
2144,
40949,
723,
8,
290,
198,
2257,
50,
12,
5539,
357,
19692,
12,
1359,
723,
8,
198,
7061,
6,
198,
11748,
33245,
198,
11748,
299,
32152,
355... | 2.581081 | 74 |
__author__ = 'sugar'
__all__ = ['Sensor', 'Controller'] | [
834,
9800,
834,
796,
705,
82,
35652,
6,
198,
198,
834,
439,
834,
796,
37250,
47864,
3256,
705,
22130,
20520
] | 2.8 | 20 |
import numpy as np
import scipy.sparse
from ..mode_basis import ModeBasis
from .deformable_mirror import DeformableMirror
class SegmentedDeformableMirror(DeformableMirror):
'''A segmented deformable mirror.
This deformable mirror class can simulate devices such as those
made by IrisAO and BMC. All segments are controlled in piston,
tip and tilt.
Parameters
----------
segments : ModeBasis
A mode basis with all segments.
'''
@property
def segments(self):
'''The segments of this deformable mirror in a ModeBasis.
'''
return self._segments
@segments.setter
def get_segment_actuators(self, segment_id):
'''Get the actuators for an individual segment of the DM.
Parameters
----------
segment_id : int
The index of the segment for which to get the actuators.
Returns
-------
piston : scalar
The piston of the segment in meters.
tip : scalar
The tip of the segment in radians.
tilt : scalar
The tilt of the segment in radians.
'''
piston = self.actuators[segment_id]
tip = self.actuators[segment_id + len(self._segments)]
tilt = self.actuators[segment_id + 2 * len(self._segments)]
return (piston, tip, tilt)
def set_segment_actuators(self, segment_id, piston, tip, tilt):
'''Set the actuators for an individual segment of the DM.
Parameters
----------
segment_id : int
The index of the segment for which to get the actuators.
piston : scalar
The piston of the segment in meters.
tip : scalar
The tip of the segment in radians.
tilt : scalar
The tilt of the segment in radians.
'''
self.actuators[segment_id] = piston
self.actuators[segment_id + len(self._segments)] = tip
self.actuators[segment_id + 2 * len(self._segments)] = tilt
| [
11748,
299,
32152,
355,
45941,
198,
11748,
629,
541,
88,
13,
82,
29572,
198,
198,
6738,
11485,
14171,
62,
12093,
271,
1330,
10363,
15522,
271,
198,
6738,
764,
2934,
687,
540,
62,
10793,
1472,
1330,
1024,
687,
540,
27453,
1472,
198,
19... | 2.847541 | 610 |
import torch
from util.utils import Utils
from torch.autograd import Variable
__author__ = "Dublin City University"
__copyright__ = "Copyright 2019, Dublin City University"
__credits__ = ["Gideon Maillette de Buy Wenniger"]
__license__ = "Dublin City University Software License (enclosed)"
class ImageInputTransformer:
# This method takes an image and creates a transformed image, shifting the i-th row
# with i pixels. This corresponds to the transformation used in the
# pixel recurrent neural networks paper (https://arxiv.org/pdf/1601.06759.pdf)
# This trick can be used to efficiently compute Multi-dimensional RNNs, while
# keeping the input the same for every layer of the network
#
@staticmethod
# Non-optimized method, that computes the skewed images one at a time, then
# concatenates them in a for loop
@staticmethod
@staticmethod
@staticmethod
@staticmethod
# Optimized method computes the complete set of skewed images all in one go
# using pytorch tensor indexing to select slices of rows from multiple images
# at one, doing the operation for all images in parallel
# Requirement: all images must be of the same size. This implementation seems
# break the gradient, although this is not sure. In either case it is also slower
# than the pytorch.cat based implementation
@staticmethod
@staticmethod
# Optimized method computes the complete set of skewed images all in one go
# using pytorch tensor indexing to select slices of rows from multiple images
# at one, doing the operation for all images in parallel
# Requirement: all images must be of the same size
@staticmethod
# Alternative implementation of "create_row_diagonal_offset_tensors_parallel"
# that uses split instead of tensor slicing to go over the tensor rows.
# It was hoped this implementation might be faster than the original, but there
# seems to be no difference.
@staticmethod
@staticmethod
# Create a binary mask indicating which entries in the skewed image are valid and which not
@staticmethod
@staticmethod
@staticmethod
@staticmethod
@staticmethod
@staticmethod
@staticmethod
"""
Alternative implementation of extract_unskewed_activations_from_activation_tensor,
which avoids slicing, instead concatenating all activation_as_tensor rows in the
width direction, splitting this long tensor using a special activation and padding
parts split list, and finally extracting the activation parts from the activation
and padding parts
"""
@staticmethod
# activation_columns is a list of activation columns
@staticmethod
# Method that demonstrates and explains the bug of adding a superfluous variable
# wrapping. What happens is that the additional wrapping makes
# the variable into a leaf variable, with a non-existent (empty) gradient function
# graph trace. This breaks the path used by back-propagation to
# update previous upstream graph nodes, with catastrophic effect on the learning
# results
# See: https://pytorch.org/docs/0.2.0/_modules/torch/autograd/variable.html :
# "
# Variable is a thin wrapper around a Tensor object, that also holds
# the gradient w.r.t. to it, and a reference to a function that created it.
# This reference allows retracing the whole chain of operations that
# created the data. If the Variable has been created by the user, its grad_fn
# will be ``None`` and we call such objects *leaf* Variables.
# "
# So explicitly created Variables have an emtpy grad_fn field, in other words,
# the gradient backwards path is lost, and hence updating predecessor variables
# is made impossible, causing learning to fail.
#
@staticmethod
if __name__ == "__main__":
main() | [
11748,
28034,
198,
6738,
7736,
13,
26791,
1330,
7273,
4487,
198,
6738,
28034,
13,
2306,
519,
6335,
1330,
35748,
198,
198,
834,
9800,
834,
796,
366,
37590,
2815,
2254,
2059,
1,
198,
834,
22163,
4766,
834,
796,
366,
15269,
13130,
11,
18... | 3.517148 | 1,108 |
import logging
from .reporter import Reporter
log = logging.getLogger(__name__)
| [
11748,
18931,
198,
6738,
764,
260,
26634,
1330,
25869,
198,
198,
6404,
796,
18931,
13,
1136,
11187,
1362,
7,
834,
3672,
834,
8,
628
] | 3.416667 | 24 |
"""
Given a collection of intervals, find the minimum number of intervals you need to remove to make the rest of the intervals non-overlapping.
Note:
You may assume the interval's end point is always bigger than its start point.
Intervals like [1,2] and [2,3] have borders "touching" but they don't overlap each other.
Example 1:
Input: [ [1,2], [2,3], [3,4], [1,3] ]
Output: 1
Explanation: [1,3] can be removed and the rest of intervals are non-overlapping.
Example 2:
Input: [ [1,2], [1,2], [1,2] ]
Output: 2
Explanation: You need to remove two [1,2] to make the rest of intervals non-overlapping.
Example 3:
Input: [ [1,2], [2,3] ]
Output: 0
Explanation: You don't need to remove any of the intervals since they're already non-overlapping.
"""
# Definition for an interval.
# class Interval(object):
# def __init__(self, s=0, e=0):
# self.start = s
# self.end = e
| [
37811,
198,
15056,
257,
4947,
286,
20016,
11,
1064,
262,
5288,
1271,
286,
20016,
345,
761,
284,
4781,
284,
787,
262,
1334,
286,
262,
20016,
1729,
12,
2502,
75,
5912,
13,
198,
198,
6425,
25,
198,
1639,
743,
7048,
262,
16654,
338,
886... | 2.814465 | 318 |
import pandas as pd
import config.Text
| [
11748,
19798,
292,
355,
279,
67,
198,
198,
11748,
4566,
13,
8206,
628
] | 3.153846 | 13 |
import weakref
import copy
from ..cache.buffer_cache import buffer_cache
from ... import get_dict_hash
import json
import asyncio
import logging
logger = logging.getLogger("seamless")
RECOMPUTE_OVER_REMOTE = int(100e6) # after this threshold, better to recompute than to download remotely
# TODO: have some dynamic component based on:
# - stored recomputation time from provenance server
# - internet connection speed
_deep_buffer_coro_count = 0
deep_buffer_coros = []
from ..cell import Cell
from ..transformer import Transformer
from ..structured_cell import Inchannel
from ..reactor import Reactor
from .expression import Expression
from ..protocol.deep_structure import deep_structure_to_checksums
from ..protocol.deserialize import deserialize
from ..protocol.get_buffer import (
get_buffer_remote, get_buffer_length_remote, CacheMissError
)
from ..cache.transformation_cache import syntactic_to_semantic
from ..protocol.expression import set_subpath_checksum, access_hash_pattern | [
11748,
4939,
5420,
198,
11748,
4866,
198,
6738,
11485,
23870,
13,
22252,
62,
23870,
1330,
11876,
62,
23870,
198,
6738,
2644,
1330,
651,
62,
11600,
62,
17831,
198,
198,
11748,
33918,
198,
11748,
30351,
952,
198,
198,
11748,
18931,
198,
6... | 2.912 | 375 |