content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
"""
This script tests the library powerdaq (minimal API from python).
output 0 should be plugged in input 0.
"""
from __future__ import division, print_function
import numpy as np
import time
from fluidlab.powerdaq import PowerDAQBoard
from fluidlab.timer import Timer
from fluidlab.createfigs import CreateFigs
SAVE_FIG = 0
import matplotlib.pyplot as plt
period = 2 # (s)
# initialize the board
board = PowerDAQBoard()
timer = Timer(period)
for i in xrange(5):
time.sleep(1)
board.dout.write(0)
timer.wait_tick()
board.dout.write(2)
| [
37811,
198,
1212,
4226,
5254,
262,
5888,
1176,
48539,
357,
1084,
4402,
7824,
422,
21015,
737,
198,
198,
22915,
657,
815,
307,
30601,
287,
5128,
657,
13,
198,
37811,
198,
198,
6738,
11593,
37443,
834,
1330,
7297,
11,
3601,
62,
8818,
19... | 2.917098 | 193 |
from django.conf.urls.defaults import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from version1.api import *
from tastypie.api import Api
from django.contrib import admin
admin.autodiscover()
v1_api = Api(api_name='v1')
v1_api.register(CashWithdrawalResource())
v1_api.register(CashTransferResource())
v1_api.register(ServicesResource())
v1_api.register(ATMCardResource())
urlpatterns = patterns('',
url(r'^user/$', 'version1.views.main'),
url(r'^user/card/$', 'version1.views.index'),
url(r'^user/validatepin/$', 'version1.views.validatepin'),
url(r'^user/validatepasscode/$', 'version1.views.validatepasscode'),
url(r'^user/options/$', 'version1.views.options'),
url(r'^user/history/$', 'version1.views.history'),
url(r'^user/balanceenquiry/$', 'version1.views.balanceenquiry'),
url(r'^user/cashwithdrawal/$', 'version1.views.cashwithdrawal'),
url(r'^user/cashtransfer/$', 'version1.views.cashtransfer'),
url(r'^user/pinchange/$', 'version1.views.pinchange'),
url(r'^user/phonechange/$', 'version1.views.phonechange'),
url(r'^user/fastcash/$', 'version1.views.fastcash'),
url(r'^user/services/$', 'version1.views.services'),
url(r'^user/exit/$', 'version1.views.exit'),
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include(v1_api.urls)),
url(r'^BankServices/rest/biller/$', 'version1.views.services_mock_listBiller'),
url(r'^admin_user/$', 'version1.views.admin_index'),
url(r'^admin_user/logout/$', 'version1.views.admin_logout'),
url(r'^admin_user/verify_user/$', 'version1.views.admin_verify_user'),
url(r'^admin_user/main_page/$', 'version1.views.admin_main_page'),
url(r'^admin_user/add_new_card/$', 'version1.views.admin_add_card'),
url(r'^admin_user/add_atm_operation/$', 'version1.views.admin_add_card_operation'),
url(r'^admin_user/ATM_status/$', 'version1.views.admin_atm_status'),
url(r'^admin_user/update_refill/$', 'version1.views.admin_update_refill'),
url(r'^admin_user/update_card_details/$', 'version1.views.admin_update_card_details'),
url(r'^admin_user/update_card_details/validate_card/$', 'version1.views.admin_card_validation'),
url(r'^admin_user/update_card_details/main_page/$', 'version1.views.admin_update_card_main_page'),
url(r'^admin_user/update_card_details/block_card/$', 'version1.views.admin_block_card'),
url(r'^admin_user/update_card_details/block_card_operation/$', 'version1.views.admin_block_card_operation'),
url(r'^admin_user/update_card_details/activate_card/$', 'version1.views.admin_activate_card'),
url(r'^admin_user/update_card_details/activate_card_operation/$', 'version1.views.admin_activate_card_operation'),
url(r'^admin_user/update_card_details/reset_pincode/$', 'version1.views.admin_reset_pincode'),
url(r'^admin_user/update_card_details/reset_pincode_operation/$', 'version1.views.admin_reset_pincode_operation'),
url(r'^admin_user/update_card_details/reset_phone/$', 'version1.views.admin_reset_phone'),
url(r'^admin_user/update_card_details/reset_phone_operation/$', 'version1.views.admin_reset_phone_operation'),
url(r'^admin_user/update_card_details/view_history/$', 'version1.views.admin_view_history'),
url(r'^admin_user/update_card_details/update_date/$', 'version1.views.admin_update_date'),
url(r'^admin_user/update_card_details/update_date_operation/$', 'version1.views.admin_update_date_operation'),
)
urlpatterns += staticfiles_urlpatterns()
| [
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
13,
12286,
82,
1330,
7572,
11,
2291,
11,
19016,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
12708,
16624,
13,
6371,
82,
1330,
9037,
16624,
62,
6371,
33279,
82,
198,
6738,
2196,
16,
13,
15... | 2.594714 | 1,362 |
#coding: utf-8
from django.db import models
# Create your models here. | [
2,
66,
7656,
25,
3384,
69,
12,
23,
198,
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
198,
2,
13610,
534,
4981,
994,
13
] | 2.958333 | 24 |
"""Quick running benchmarks for :mod:`esmf_regrid.esmf_regridder`."""
import numpy as np
from esmf_regrid.esmf_regridder import GridInfo
def _make_small_grid_args():
"""
Not importing the one in test_GridInfo - if that changes, these benchmarks
would 'invisibly' change too.
"""
small_x = 2
small_y = 3
small_grid_lon = np.array(range(small_x)) / (small_x + 1)
small_grid_lat = np.array(range(small_y)) * 2 / (small_y + 1)
small_grid_lon_bounds = np.array(range(small_x + 1)) / (small_x + 1)
small_grid_lat_bounds = np.array(range(small_y + 1)) * 2 / (small_y + 1)
return (
small_grid_lon,
small_grid_lat,
small_grid_lon_bounds,
small_grid_lat_bounds,
)
| [
37811,
21063,
2491,
31747,
329,
1058,
4666,
25,
63,
45798,
69,
62,
260,
25928,
13,
45798,
69,
62,
260,
25928,
1082,
63,
526,
15931,
198,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
1658,
76,
69,
62,
260,
25928,
13,
45798,
69... | 2.294118 | 323 |
from __future__ import with_statement
import redis
import time
from redis_netlock import dist_lock
client = redis.Redis(connection_pool=redis.BlockingConnectionPool(max_connections=15, host='localhost', port=6379))
with dist_lock('test', client):
print 'hello'
| [
6738,
11593,
37443,
834,
1330,
351,
62,
26090,
198,
11748,
2266,
271,
198,
11748,
640,
198,
6738,
2266,
271,
62,
3262,
5354,
1330,
1233,
62,
5354,
198,
198,
16366,
796,
2266,
271,
13,
7738,
271,
7,
38659,
62,
7742,
28,
445,
271,
13,... | 3.216867 | 83 |
import sys
IS_PY_2 = sys.version_info.major < 3 | [
11748,
25064,
198,
198,
1797,
62,
47,
56,
62,
17,
796,
25064,
13,
9641,
62,
10951,
13,
22478,
1279,
513
] | 2.4 | 20 |
julia_doctest_factory = util.BuildFactory()
julia_doctest_factory.useProgress = True
julia_doctest_factory.addSteps([
# Fetch first (allowing failure if no existing clone is present)
steps.ShellCommand(
name="git fetch",
command=["git", "fetch", "--tags", "--all", "--force"],
flunkOnFailure=False
),
# Clone julia
steps.Git(
name="Julia checkout",
repourl=util.Property('repository', default='git://github.com/JuliaLang/julia.git'),
mode='full',
method='fresh',
submodules=True,
clobberOnFailure=True,
progress=True,
retryFetch=True,
getDescription={'--tags': True},
),
# Make Julia itself
steps.ShellCommand(
name="make release",
command=["/bin/sh", "-c", util.Interpolate("%(prop:make_cmd)s -j%(prop:nthreads)s JULIA_PRECOMPILE=0 %(prop:flags)s %(prop:extra_make_flags)s release")],
haltOnFailure = True,
# Fail out if 60 minutes have gone by with nothing printed to stdout
timeout=60*60,
# Kill everything if the overall job has taken more than 2 hours
maxTime=60*60*2,
# Give the process 10 seconds to print out the current backtraces when being killed
sigtermTime=10,
),
steps.ShellCommand(
name="make doctest",
command=["/bin/sh", "-c", util.Interpolate("%(prop:make_cmd)s -C doc JULIA_PRECOMPILE=0 -j%(prop:nthreads)s %(prop:flags)s %(prop:extra_make_flags)s doctest=true")],
haltOnFailure = True,
# Fail out if 60 minutes have gone by with nothing printed to stdout
timeout=60*60,
# Kill everything if the overall job has taken more than 2 hours
maxTime=60*60*2,
# Give the process 10 seconds to print out the current backtraces when being killed
sigtermTime=10,
),
steps.ShellCommand(
name="make deploy",
command=["/bin/sh", "-c", util.Interpolate("%(prop:make_cmd)s -C doc JULIA_PRECOMPILE=0 %(prop:flags)s %(prop:extra_make_flags)s deploy")],
haltOnFailure=True,
env={
'DOCUMENTER_KEY': DOCUMENTER_KEY,
'TRAVIS_PULL_REQUEST': 'false',
},
doStepIf=is_protected_pr,
),
# Get JULIA_VERSION and JULIA_COMMIT from the build system
steps.SetPropertyFromCommand(
name="Get JULIA_VERSION",
command=[util.Interpolate("%(prop:make_cmd)s"), "print-JULIA_VERSION"],
extract_fn=lambda rc, stdout, stderr: {"JULIA_VERSION": stdout[stdout.find('=')+1:].strip()}
),
steps.SetPropertyFromCommand(
name="Get JULIA_COMMIT",
command=[util.Interpolate("%(prop:make_cmd)s"), "print-JULIA_COMMIT"],
extract_fn=lambda rc, stdout, stderr: {"JULIA_COMMIT": stdout[stdout.find('=')+1:].strip()}
),
# We've already got Julia and the docs built; so let's build the source tarballs too
steps.ShellCommand(
name="clean out srccache",
command=["/bin/sh", "-c", "rm -rf deps/srccache"],
),
steps.ShellCommand(
name="make light-source-dist",
command=["/bin/sh", "-c", util.Interpolate("%(prop:make_cmd)s -j%(prop:nthreads)s JULIA_PRECOMPILE=0 USE_BINARYBUILDER=0 light-source-dist")],
haltOnFailure = True,
doStepIf=is_protected_pr,
),
steps.FileUpload(
name="Upload light source tarball",
workersrc=util.Interpolate("julia-%(prop:JULIA_VERSION)s_%(prop:JULIA_COMMIT)s.tar.gz"),
masterdest=util.Interpolate("/tmp/julia_package/julia-%(prop:JULIA_VERSION)s_%(prop:JULIA_COMMIT)s.tar.gz"),
doStepIf=is_protected_pr,
hideStepIf=lambda results, s: results==SKIPPED,
),
steps.ShellCommand(
name="clean out srccache",
command=["/bin/sh", "-c", "rm -rf deps/srccache"],
),
steps.ShellCommand(
name="make full-source-dist (without BB)",
command=["/bin/sh", "-c", util.Interpolate("%(prop:make_cmd)s -j%(prop:nthreads)s JULIA_PRECOMPILE=0 USE_BINARYBUILDER=0 full-source-dist")],
haltOnFailure = True,
doStepIf=is_protected_pr,
),
steps.FileUpload(
name="Upload full source tarball",
workersrc=util.Interpolate("julia-%(prop:JULIA_VERSION)s_%(prop:JULIA_COMMIT)s-full.tar.gz"),
masterdest=util.Interpolate("/tmp/julia_package/julia-%(prop:JULIA_VERSION)s_%(prop:JULIA_COMMIT)s-full.tar.gz"),
doStepIf=is_protected_pr,
hideStepIf=lambda results, s: results==SKIPPED,
),
steps.ShellCommand(
name="clean out srccache",
command=["/bin/sh", "-c", "rm -rf deps/srccache"],
),
steps.ShellCommand(
name="make full-source-dist (with BB)",
command=["/bin/sh", "-c", util.Interpolate("%(prop:make_cmd)s -j%(prop:nthreads)s JULIA_PRECOMPILE=0 USE_BINARYBUILDER=1 full-source-dist")],
haltOnFailure = True,
doStepIf=is_protected_pr,
),
steps.FileUpload(
name="Upload full source+bb tarball",
workersrc=util.Interpolate("julia-%(prop:JULIA_VERSION)s_%(prop:JULIA_COMMIT)s-full.tar.gz"),
masterdest=util.Interpolate("/tmp/julia_package/julia-%(prop:JULIA_VERSION)s_%(prop:JULIA_COMMIT)s-full+bb.tar.gz"),
doStepIf=is_protected_pr,
hideStepIf=lambda results, s: results==SKIPPED,
),
# Sign and upload on the master
steps.MasterShellCommand(
name="gpg sign light source tarball on master",
command=["sh", "-c", util.Interpolate("/root/sign_tarball.sh /tmp/julia_package/julia-%(prop:JULIA_VERSION)s_%(prop:JULIA_COMMIT)s.tar.gz")],
doStepIf=is_protected_pr,
hideStepIf=lambda results, s: results==SKIPPED,
),
steps.MasterShellCommand(
name="gpg sign full source tarball on master",
command=["sh", "-c", util.Interpolate("/root/sign_tarball.sh /tmp/julia_package/julia-%(prop:JULIA_VERSION)s_%(prop:JULIA_COMMIT)s-full.tar.gz")],
doStepIf=is_protected_pr,
hideStepIf=lambda results, s: results==SKIPPED,
),
steps.MasterShellCommand(
name="gpg sign full+bb source tarball on master",
command=["sh", "-c", util.Interpolate("/root/sign_tarball.sh /tmp/julia_package/julia-%(prop:JULIA_VERSION)s_%(prop:JULIA_COMMIT)s-full+bb.tar.gz")],
doStepIf=is_protected_pr,
hideStepIf=lambda results, s: results==SKIPPED,
),
steps.MasterShellCommand(
name="Upload source tarballs to AWS",
command=render_srcdist_upload_command,
haltOnFailure=True,
doStepIf=is_protected_pr,
hideStepIf=lambda results, s: results==SKIPPED,
),
steps.MasterShellCommand(
name="Cleanup Master",
command=["sh", "-c", util.Interpolate("rm -vf /tmp/julia_package/julia-%(prop:JULIA_VERSION)s_%(prop:JULIA_COMMIT)s* ;")],
flunkOnFailure=False,
haltOnFailure=False,
),
])
c['schedulers'].append(schedulers.AnyBranchScheduler(
name="Julia Doctesting and source upload",
change_filter=util.ChangeFilter(filter_fn=julia_branch_nonskip_filter),
builderNames=["doctest_linux64"],
treeStableTimer=1,
))
# Add workers for these jobs
c['builders'].append(util.BuilderConfig(
name="doctest_linux64",
workernames=builder_mapping["linux64"],
collapseRequests=False,
tags=["Packaging"],
factory=julia_doctest_factory,
))
# Add a scheduler for building release candidates/triggering builds manually
c['schedulers'].append(schedulers.ForceScheduler(
name="doctest",
label="Force doctest",
builderNames=["doctest_linux64"],
reason=util.FixedParameter(name="reason", default=""),
codebases=[
util.CodebaseParameter(
"",
name="",
branch=util.FixedParameter(name="branch", default=""),
repository=util.FixedParameter(name="repository", default=""),
project=util.FixedParameter(name="project", default="Packaging"),
)
],
properties=[
util.StringParameter(
name="extra_make_flags",
label="Extra Make Flags",
size=30,
default="",
),
],
))
| [
73,
43640,
62,
4598,
310,
395,
62,
69,
9548,
796,
7736,
13,
15580,
22810,
3419,
198,
73,
43640,
62,
4598,
310,
395,
62,
69,
9548,
13,
1904,
32577,
796,
6407,
198,
73,
43640,
62,
4598,
310,
395,
62,
69,
9548,
13,
2860,
8600,
82,
... | 2.257008 | 3,603 |
import arcade
import time
from packets import GameStateUpdate, CardPlaced, CardBurned, CardPull, InfoUsed, NextTurn
from gui_elements import NameTab, TextButton, CardTab, CardTabList
from settings import *
| [
11748,
27210,
198,
11748,
640,
198,
6738,
24624,
1330,
3776,
9012,
10260,
11,
5172,
3646,
2286,
11,
5172,
29053,
276,
11,
5172,
42940,
11,
14151,
38052,
11,
7406,
17278,
198,
6738,
11774,
62,
68,
3639,
1330,
6530,
33349,
11,
8255,
21864... | 3.833333 | 54 |
#==============================================================================
# IMPORT NECESSARY MODULES
#==============================================================================
# C:\temp>abaqus viewer -noGUI (this requires a CAE license)
# C:\temp>abaqus python (this does not require a license)
import numpy as np
from odbAccess import openOdb
from abaqusConstants import *
#==============================================================================
# DEFINE FUNCTIONS
#==============================================================================
def nodalAveraged(odbInstance,Frame,StressType,timestep):
"""
For a given solution step, timestep, and stress type
this function will return 100% nodal averaged results
sorted by node number. (Note that the element set is
hardcoded.)
"""
# Get number of nodes
Field = Frame[timestep].fieldOutputs['S'].getSubset(region = odbInstance.elementSets['TA_ELEM']).getSubset(position = ELEMENT_NODAL).getScalarField(invariant = StressType)
NumValues = len(Field.values)
# Create vector of element nodes and stresses
# (for some reason, ababqus breaks into blocks of data
# need to join the various data blocks into an array)
Values = Field.bulkDataBlocks[0].data
NodeLabels = Field.bulkDataBlocks[0].nodeLabels
for i in range(len(Field.bulkDataBlocks)-1):
Values = np.vstack((Values,Field.bulkDataBlocks[i+1].data))
NodeLabels = np.hstack((NodeLabels,Field.bulkDataBlocks[i+1].nodeLabels))
# Nodes are shared across multiple elements. Get unique node labels.
NodeLabels_unique, unq_idx = np.unique(NodeLabels, return_inverse=True)
NumNodes = len(NodeLabels_unique)
# Calculate nodal averaged stresses at timestep
Values_Averaged=np.zeros((NodeLabels_unique.size,Values.shape[1]))
unq_counts = np.bincount(unq_idx)
for i in xrange(0,Values.shape[1]):
ValuesTemp = [item[i] for item in Values]
unq_sum = np.bincount(unq_idx, weights=ValuesTemp)
Values_Averaged[:,i] = unq_sum / unq_counts
return NodeLabels_unique, Values_Averaged
#==============================================================================
# RUN THE PROGRAM
#==============================================================================
filename = 'Job-4e-SS-Pulse'
#
# LOAD ABAQUS SOLUTION DATA
#-------------------------------------------------------------------
odb = openOdb(filename+'.odb',readOnly=True)
# Get Instance
allInstances = (odb.rootAssembly.instances.keys())
odbInstance = odb.rootAssembly.instances[allInstances[-1]]
#
# PROCESS RESULTS
#-------------------------------------------------------------------
# Retrieve nodal averaged stresses at steady-state solution
timestep = 0
Frame = odb.steps['Step-3-Pulse'].frames
nodeNum, pressure = nodalAveraged(odbInstance,Frame,PRESS,timestep)
nodeNum, vonMises = nodalAveraged(odbInstance,Frame,MISES,timestep)
# Create a signed von Mises stress
vonMisesSigned = np.sign(-1.*pressure)*vonMises
# Save static stress and also initialize dynamic stress vectors
stressStatic = vonMisesSigned.copy()
stressDynamicMin = vonMisesSigned.copy()
stressDynamicMax = vonMisesSigned.copy()
# Get nodal coordinates
nodeList = Frame[0].fieldOutputs['S'].values[0].instance.nodes
nodeCoord = np.zeros((len(nodeList),4))
for item in range(len(nodeList)):
nodeCoord[item,0] = nodeList[item].label
nodeCoord[item,1] = nodeList[item].coordinates[0]
nodeCoord[item,2] = nodeList[item].coordinates[1]
nodeCoord[item,3] = nodeList[item].coordinates[2]
# Find max and min stress values at each node during pulse response
Frame = odb.steps['Step-4-Response'].frames
for timestep in range(len(Frame)):
nodeNum, pressure = nodalAveraged(odbInstance,Frame,PRESS,timestep)
nodeNum, vonMises = nodalAveraged(odbInstance,Frame,MISES,timestep)
vonMisesSigned = np.sign(-1.*pressure)*vonMises
stressDynamicMax = np.maximum(stressDynamicMax, vonMisesSigned)
stressDynamicMin = np.minimum(stressDynamicMin, vonMisesSigned)
Frame = odb.steps['Step-6-Response'].frames
for timestep in range(len(Frame)):
nodeNum, pressure = nodalAveraged(odbInstance,Frame,PRESS,timestep)
nodeNum, vonMises = nodalAveraged(odbInstance,Frame,MISES,timestep)
vonMisesSigned = np.sign(-1.*pressure)*vonMises
stressDynamicMax = np.maximum(stressDynamicMax, vonMisesSigned)
stressDynamicMin = np.minimum(stressDynamicMin, vonMisesSigned)
#
# SAVE DATA TO NUMPY COMPRESSED BINARY FILE FOR LATER USE
#-------------------------------------------------------------------
np.savez_compressed(filename,
nodeNum=nodeNum.flatten(),
vonMisesMax=stressDynamicMax.flatten(),
vonMisesMin=stressDynamicMin.flatten(),
vonMisesStatic=stressStatic.flatten(),
nodeCoord=nodeCoord)
#
# TERMINATE PROGRAM
#-------------------------------------------------------------------
#odb.save()
odb.close()
| [
2,
23926,
25609,
855,
198,
2,
30023,
9863,
41804,
7597,
13153,
19164,
6239,
1546,
198,
2,
23926,
25609,
855,
198,
2,
327,
7479,
29510,
29,
15498,
45260,
19091,
532,
3919,
40156,
220,
357,
5661,
4433,
257,
7257,
36,
5964,
8,
198,
2,
... | 2.943188 | 1,725 |
"""Module that holds success messages template"""
audit_messages = {
'created': 'created a new {} ',
'retrieved': 'retrieved {} successfully',
'updated': 'updated {} with id {} ',
'deleted': 'deleted favorite things with id {}'
}
| [
37811,
26796,
326,
6622,
1943,
6218,
11055,
37811,
198,
198,
3885,
270,
62,
37348,
1095,
796,
1391,
198,
220,
220,
220,
705,
25598,
10354,
705,
25598,
257,
649,
23884,
46083,
198,
220,
220,
220,
705,
1186,
28130,
10354,
705,
1186,
28130... | 3.166667 | 78 |
"""
ipy misc/gen_tar_lists.py -- --dataset YouCook2
"""
import copy
import json
import argparse
from typing import Dict, List, Tuple
from pathlib import Path
import tqdm
from beartype import beartype
from zsvision.zs_utils import load_json_config
from gen_readme import dataset_paths, model_specs2path
@beartype
if __name__ == "__main__":
main()
| [
37811,
198,
541,
88,
12747,
14,
5235,
62,
18870,
62,
20713,
13,
9078,
1377,
1377,
19608,
292,
316,
921,
28937,
17,
198,
37811,
198,
11748,
4866,
198,
11748,
33918,
198,
11748,
1822,
29572,
198,
6738,
19720,
1330,
360,
713,
11,
7343,
1... | 2.848 | 125 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 23 22:19:32 2022
@author: baecker
"""
import sys
sys.path.append('./napari_j/_tests/surrogate')
if __name__ == '__main__':
from surrogate.surrogate import surrogate
else:
from surrogate import surrogate
from unittest.mock import patch
from unittest.mock import Mock, MagicMock
import napari
import numpy as np
IJMock = Mock()
IJMock.getImage = getImage
HyperStackConverterMock = Mock()
@patch('napari.Viewer')
@surrogate('ij.measure.ResultsTable')
@surrogate('ij.IJ')
@patch('ij.IJ', IJMock)
@surrogate('ij.ImagePlus')
@surrogate('ij.WindowManager')
@surrogate('ij.plugin.HyperStackConverter')
@patch('ij.plugin.HyperStackConverter', HyperStackConverterMock)
@patch('napari.Viewer')
@surrogate('ij.measure.ResultsTable')
@surrogate('ij.IJ')
@patch('ij.IJ', IJMock)
@surrogate('ij.ImagePlus')
@surrogate('ij.WindowManager')
@surrogate('ij.plugin.HyperStackConverter')
@patch('ij.plugin.HyperStackConverter', HyperStackConverterMock)
@patch('napari.Viewer')
@surrogate('ij.measure.ResultsTable')
@surrogate('ij.IJ')
@patch('ij.IJ', IJMock)
@surrogate('ij.ImagePlus')
@surrogate('ij.WindowManager')
@surrogate('ij.plugin.HyperStackConverter')
@patch('ij.plugin.HyperStackConverter', HyperStackConverterMock)
@patch('napari.Viewer')
@surrogate('ij.measure.ResultsTable')
@surrogate('ij.IJ')
@patch('ij.IJ', IJMock)
@surrogate('ij.ImagePlus')
@surrogate('ij.WindowManager')
@surrogate('ij.plugin.HyperStackConverter')
@patch('ij.plugin.HyperStackConverter', HyperStackConverterMock)
@patch('napari.Viewer')
@surrogate('ij.measure.ResultsTable')
@surrogate('ij.IJ')
@patch('ij.IJ', IJMock)
@surrogate('ij.ImagePlus')
@surrogate('ij.WindowManager')
@surrogate('ij.plugin.HyperStackConverter')
@patch('ij.plugin.HyperStackConverter', HyperStackConverterMock)
@patch('napari.Viewer')
@surrogate('ij.measure.ResultsTable')
@surrogate('ij.IJ')
@patch('ij.IJ', IJMock)
@surrogate('ij.ImagePlus')
@surrogate('ij.WindowManager')
@surrogate('ij.plugin.HyperStackConverter')
@patch('ij.plugin.HyperStackConverter', HyperStackConverterMock)
if __name__ == '__main__':
test_constructor()
test_getActiveImageFromIJ()
test_getLabelsFromIJ()
test_getPixelsFromImageJ()
test_getMetadataFromImage()
test_toHyperstack()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
3825,
2365,
2242,
2534,
25,
1129,
25,
2624,
33160,
198,
198,
31,
9800,
25,
275,
3609,
152... | 2.537037 | 918 |
from __future__ import annotations
from datetime import datetime
from uuid import uuid4
import pytest
from protean import (
BaseCommand,
BaseCommandHandler,
BaseEvent,
BaseEventHandler,
BaseEventSourcedAggregate,
apply,
handle,
)
from protean.fields import DateTime, Identifier, String, Text
from protean.fields.basic import Boolean
from protean.globals import current_domain
published_count = 0
@pytest.mark.eventstore
| [
6738,
11593,
37443,
834,
1330,
37647,
198,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
6738,
334,
27112,
1330,
334,
27112,
19,
198,
198,
11748,
12972,
9288,
198,
198,
6738,
5915,
272,
1330,
357,
198,
220,
220,
220,
7308,
21575,
11,
... | 3.09396 | 149 |
# Copyright (c) 2015 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import pytest
import testtools
import time
from kmip.core.attributes import CryptographicAlgorithm
from kmip.core.attributes import CryptographicLength
from kmip.core.attributes import Name
from kmip.core import enums
from kmip.core.enums import AttributeType
from kmip.core.enums import CryptographicAlgorithm as CryptoAlgorithmEnum
from kmip.core.enums import CryptographicUsageMask
from kmip.core.enums import KeyFormatType as KeyFormatTypeEnum
from kmip.core.enums import CertificateType
from kmip.core.enums import NameType
from kmip.core.enums import ObjectType
from kmip.core.enums import OpaqueDataType
from kmip.core.enums import SecretDataType
from kmip.core.enums import ResultStatus
from kmip.core.enums import ResultReason
from kmip.core.enums import QueryFunction
from kmip.core.factories.attributes import AttributeFactory
from kmip.core.factories.credentials import CredentialFactory
from kmip.core.factories.secrets import SecretFactory
from kmip.core.misc import KeyFormatType
from kmip.core.objects import Attribute
from kmip.core.objects import KeyBlock
from kmip.core.objects import KeyMaterial
from kmip.core.objects import KeyValue
from kmip.core.objects import TemplateAttribute
from kmip.core.secrets import SymmetricKey
from kmip.core.secrets import PrivateKey
from kmip.core.secrets import PublicKey
from kmip.core.secrets import Certificate
from kmip.core.secrets import SecretData
from kmip.core.secrets import OpaqueObject
@pytest.mark.usefixtures("client")
| [
2,
15069,
357,
66,
8,
1853,
383,
25824,
21183,
2059,
14,
4677,
18511,
23123,
18643,
198,
2,
1439,
6923,
33876,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,
198,
... | 3.595 | 600 |
from fastapi_users import models
from fastapi_users.authentication.strategy.db import BaseAccessToken
| [
6738,
3049,
15042,
62,
18417,
1330,
4981,
198,
6738,
3049,
15042,
62,
18417,
13,
41299,
3299,
13,
2536,
4338,
13,
9945,
1330,
7308,
15457,
30642,
628,
628,
628
] | 3.821429 | 28 |
import unittest
from ..hashes import Hash
import random
if __name__ == '__main__':
unittest.main()
| [
11748,
555,
715,
395,
198,
6738,
11485,
71,
7465,
1330,
21059,
198,
11748,
4738,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
555,
715,
395,
13,
12417,
3419,
198
] | 2.789474 | 38 |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import os
# In[2]:
os.chdir('//172.17.129.234/d/gdrive/ueda-note/Outlookuファイル/recovered')
# In[3]:
#os.getcwd()
# In[26]:
import glob
targets = glob.glob('*.csv')
# In[5]:
#import pandas
#
#
## In[6]:
#
#
#import pandas as pd
#
#
## In[7]:
#
#
#file='01-family.csv'
#dir=file.replace('.csv','')
##(file,dir)
#
#
## In[8]:
#
#
#data = pd.read_csv(file)
#data
#
#
## In[9]:
#
#
#col=data.columns
#(col[col.str.contains('User')],col[col.str.contains('Header')],col[col.str.contains('To')],col[col.str.contains('Subject')],col[col.str.contains('Date')],col[col.str.contains('Time')],col[col.str.contains('to')])
#
#
## In[10]:
#
#
## NaN check
#data[data['TransportMessageHeaders'].isnull() ]
#
#
## In[11]:
#
#
#pd.set_option("display.max_rows", 200)
#pd.set_option('max_colwidth',3000)
#noheader0=data.loc[13,:].T
#noheader0
#
#
## In[12]:
#
#
#pd.set_option('max_colwidth',1000)
#noheader0[noheader0.str.contains('travelance').fillna(False)]
#
#
## In[13]:
#
#
#data.loc[10:10,('OriginalSubject', 'ClientSubmitTime','DisplayTo', 'DeferredDeliveryTime','InReplyToId')]
#
#
## In[14]:
#
#
#pd.set_option('max_colwidth',1000)
#data.loc[10:10,('SentRepresentingEmailAddress')]
#
#
## In[15]:
#
#
#data['alt_header'] = 'Subject: ' + data['Subject'].fillna('') + '; Date: ' + data['ClientSubmitTime'].fillna('') + '; To: ' + data['DisplayTo'].fillna('')
#data.loc[:,'alt_header']
#
#
## In[16]:
#
#
#pd.reset_option("display.max_rows")
#pd.reset_option("display.max_colwidth")
#
#
## In[17]:
#
#
#data0=data.fillna({'TransportMessageHeaders': data['alt_header']})
#
#
## In[18]:
#
#
#data1 = data0.loc[:,('TransportMessageHeaders','UserEntryId')]
#data1
#
#
## In[19]:
#
#
#os.makedirs(dir, exist_ok=True)
#
#
## In[20]:
#
#
#header = data.loc[0,'TransportMessageHeaders'].split(';')
#header
#
#
## In[21]:
#
#
import re
#header1 = []
#cur = header[0]
#for l in header[1:]:
# if re.match(r" [a-zA-Z][-_a-zA-Z0-9]*:", l) :
# header1.append( cur )
# cur = l[1:]
# elif re.match(r" [ \t]", l) :
# header1.append( cur )
# cur = l[1:]
# else :
# cur += ";"+l
#header1.append( cur )
#header1
#
#
## In[22]:
#
#
#
#reformat_header(header)
#
#
## In[23]:
#
#
#body = data1.loc[0,'UserEntryId'].split(';')
#body
#
#
## In[24]:
#
#
#index=0
#
#with open(os.path.join(dir, ('%03d.eml'%index)), 'wt', encoding='iso2022-jp') as fout:
# print( "\n".join(reformat_header(data1.loc[index,'TransportMessageHeaders'].split(';'))), file=fout)
# print( '', file=fout)
# print( "\n".join(data1.loc[index,'UserEntryId'].split(';')), file=fout)
#
#
## In[25]:
#
#
#os.linesep="\n"
#for index in range(0, len(data1)):
# with open(os.path.join(dir, ('%03d.eml'%index)), 'wt',encoding='iso2022-jp',errors='replace') as fout:
# print( "\n".join(reformat_header(data1.loc[index,'TransportMessageHeaders'].split(';'))), file=fout)
# print( '', file=fout)
# print( "\n".join(data1.fillna({'UserEntryId':''}).loc[index,'UserEntryId'].split(';')), file=fout)
#
#
## # メモ
## - EntryIdの長さが短すぎる。contentではないようだ。
#
## In[ ]:
#
#
#
#
#
## In[ ]:
#
#
#
#
#
## In[ ]:
#
#
#
#
#
## In[ ]:
import pandas as pd
for f in targets:
csv2eml(f)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
19617,
25,
3384,
69,
12,
23,
198,
198,
2,
554,
58,
16,
5974,
628,
198,
11748,
28686,
628,
198,
2,
554,
58,
17,
5974,
628,
198,
418,
13,
354,
15908,
10786,
1003,
23628,
13,
155... | 2.048673 | 1,582 |
# Copyright 2016, FBPIC contributors
# Authors: Remi Lehe, Manuel Kirchen
# License: 3-Clause-BSD-LBNL
"""
This file is part of the Fourier-Bessel Particle-In-Cell code (FB-PIC)
It defines the optimized fields methods that use numba on a CPU
"""
from scipy.constants import c, epsilon_0, mu_0
c2 = c**2
import numba
from fbpic.utils.threading import njit_parallel, prange
@njit_parallel
def numba_filter_scalar( field, Nz, Nr, filter_array_z, filter_array_r ) :
"""
Multiply the input field by the filter_array
Parameters :
------------
field : 2darray of complexs
An array that represent the fields in spectral space
filter_array_z, filter_array_r : 1darray of reals
An array that damps the fields at high k, in z and r respectively
Nz, Nr : ints
Dimensions of the arrays
"""
# Loop over the 2D grid (parallel in z, if threading is installed)
for iz in prange(Nz):
for ir in range(Nr):
field[iz,ir] = filter_array_z[iz]*filter_array_r[ir]*field[iz,ir]
@njit_parallel
def numba_filter_vector( fieldr, fieldt, fieldz, Nz, Nr,
filter_array_z, filter_array_r ):
"""
Multiply the input field by the filter_array
Parameters :
------------
field : 2darray of complexs
An array that represent the fields in spectral space
filter_array_z, filter_array_r : 1darray of reals
An array that damps the fields at high k, in z and r respectively
Nz, Nr : ints
Dimensions of the arrays
"""
# Loop over the 2D grid (parallel in z, if threading is installed)
for iz in prange(Nz):
for ir in range(Nr):
fieldr[iz,ir] = filter_array_z[iz]*filter_array_r[ir]*fieldr[iz,ir]
fieldt[iz,ir] = filter_array_z[iz]*filter_array_r[ir]*fieldt[iz,ir]
fieldz[iz,ir] = filter_array_z[iz]*filter_array_r[ir]*fieldz[iz,ir]
@njit_parallel
def numba_correct_currents_curlfree_standard( rho_prev, rho_next, Jp, Jm, Jz,
kz, kr, inv_k2, inv_dt, Nz, Nr ):
"""
Correct the currents in spectral space, using the curl-free correction
which is adapted to the standard psatd
"""
# Loop over the 2D grid (parallel in z, if threading is installed)
for iz in prange(Nz):
for ir in range(Nr):
# Calculate the intermediate variable F
F = - inv_k2[iz, ir] * (
(rho_next[iz, ir] - rho_prev[iz, ir])*inv_dt \
+ 1.j*kz[iz, ir]*Jz[iz, ir] \
+ kr[iz, ir]*( Jp[iz, ir] - Jm[iz, ir] ) )
# Correct the currents accordingly
Jp[iz, ir] += 0.5 * kr[iz, ir] * F
Jm[iz, ir] += -0.5 * kr[iz, ir] * F
Jz[iz, ir] += -1.j * kz[iz, ir] * F
return
@njit_parallel
def numba_correct_currents_crossdeposition_standard( rho_prev, rho_next,
rho_next_z, rho_next_xy, Jp, Jm, Jz, kz, kr, inv_dt, Nz, Nr ):
"""
Correct the currents in spectral space, using the cross-deposition
algorithm adapted to the standard psatd.
"""
# Loop over the 2D grid
for iz in prange(Nz):
# Loop through the radial points
# (Note: a while loop is used here, because numba 0.34 does
# not support nested prange and range loops)
ir = 0
while ir < Nr:
# Calculate the intermediate variable Dz and Dxy
# (Such that Dz + Dxy is the error in the continuity equation)
Dz = 1.j*kz[iz, ir]*Jz[iz, ir] + 0.5 * inv_dt * \
( rho_next[iz, ir] - rho_next_xy[iz, ir] + \
rho_next_z[iz, ir] - rho_prev[iz, ir] )
Dxy = kr[iz, ir]*( Jp[iz, ir] - Jm[iz, ir] ) + 0.5 * inv_dt * \
( rho_next[iz, ir] - rho_next_z[iz, ir] + \
rho_next_xy[iz, ir] - rho_prev[iz, ir] )
# Correct the currents accordingly
if kr[iz, ir] != 0:
inv_kr = 1./kr[iz, ir]
Jp[iz, ir] += -0.5 * Dxy * inv_kr
Jm[iz, ir] += 0.5 * Dxy * inv_kr
if kz[iz, ir] != 0:
inv_kz = 1./kz[iz, ir]
Jz[iz, ir] += 1.j * Dz * inv_kz
# Increment ir
ir += 1
return
@njit_parallel
def numba_push_eb_standard( Ep, Em, Ez, Bp, Bm, Bz, Jp, Jm, Jz,
rho_prev, rho_next,
rho_prev_coef, rho_next_coef, j_coef,
C, S_w, kr, kz, dt,
use_true_rho, Nz, Nr) :
"""
Push the fields over one timestep, using the standard psatd algorithm
See the documentation of SpectralGrid.push_eb_with
"""
# Loop over the 2D grid (parallel in z, if threading is installed)
for iz in prange(Nz):
for ir in range(Nr):
# Save the electric fields, since it is needed for the B push
Ep_old = Ep[iz, ir]
Em_old = Em[iz, ir]
Ez_old = Ez[iz, ir]
# Calculate useful auxiliary arrays
if use_true_rho:
# Evaluation using the rho projected on the grid
rho_diff = rho_next_coef[iz, ir] * rho_next[iz, ir] \
- rho_prev_coef[iz, ir] * rho_prev[iz, ir]
else:
# Evaluation using div(E) and div(J)
divE = kr[iz, ir]*( Ep[iz, ir] - Em[iz, ir] ) \
+ 1.j*kz[iz, ir]*Ez[iz, ir]
divJ = kr[iz, ir]*( Jp[iz, ir] - Jm[iz, ir] ) \
+ 1.j*kz[iz, ir]*Jz[iz, ir]
rho_diff = (rho_next_coef[iz, ir] - rho_prev_coef[iz, ir]) \
* epsilon_0 * divE - rho_next_coef[iz, ir] * dt * divJ
# Push the E field
Ep[iz, ir] = C[iz, ir]*Ep[iz, ir] + 0.5*kr[iz, ir]*rho_diff \
+ c2*S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Bz[iz, ir] \
+ kz[iz, ir]*Bp[iz, ir] - mu_0*Jp[iz, ir] )
Em[iz, ir] = C[iz, ir]*Em[iz, ir] - 0.5*kr[iz, ir]*rho_diff \
+ c2*S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Bz[iz, ir] \
- kz[iz, ir]*Bm[iz, ir] - mu_0*Jm[iz, ir] )
Ez[iz, ir] = C[iz, ir]*Ez[iz, ir] - 1.j*kz[iz, ir]*rho_diff \
+ c2*S_w[iz, ir]*( 1.j*kr[iz, ir]*Bp[iz, ir] \
+ 1.j*kr[iz, ir]*Bm[iz, ir] - mu_0*Jz[iz, ir] )
# Push the B field
Bp[iz, ir] = C[iz, ir]*Bp[iz, ir] \
- S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Ez_old \
+ kz[iz, ir]*Ep_old ) \
+ j_coef[iz, ir]*( -1.j*0.5*kr[iz, ir]*Jz[iz, ir] \
+ kz[iz, ir]*Jp[iz, ir] )
Bm[iz, ir] = C[iz, ir]*Bm[iz, ir] \
- S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Ez_old \
- kz[iz, ir]*Em_old ) \
+ j_coef[iz, ir]*( -1.j*0.5*kr[iz, ir]*Jz[iz, ir] \
- kz[iz, ir]*Jm[iz, ir] )
Bz[iz, ir] = C[iz, ir]*Bz[iz, ir] \
- S_w[iz, ir]*( 1.j*kr[iz, ir]*Ep_old \
+ 1.j*kr[iz, ir]*Em_old ) \
+ j_coef[iz, ir]*( 1.j*kr[iz, ir]*Jp[iz, ir] \
+ 1.j*kr[iz, ir]*Jm[iz, ir] )
return
@njit_parallel
def numba_push_eb_pml_standard( Ep_pml, Em_pml, Bp_pml, Bm_pml,
Ez, Bz, C, S_w, kr, kz, Nz, Nr):
"""
Push the PML split fields over one timestep, using the standard psatd algorithm
See the documentation of SpectralGrid.push_eb_with
"""
# Loop over the 2D grid
for iz in prange(Nz):
for ir in range(Nr):
# Push the PML E field
Ep_pml[iz, ir] = C[iz, ir]*Ep_pml[iz, ir] \
+ c2*S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Bz[iz, ir] )
Em_pml[iz, ir] = C[iz, ir]*Em_pml[iz, ir] \
+ c2*S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Bz[iz, ir] )
# Push the PML B field
Bp_pml[iz, ir] = C[iz, ir]*Bp_pml[iz, ir] \
- S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Ez[iz, ir] )
Bm_pml[iz, ir] = C[iz, ir]*Bm_pml[iz, ir] \
- S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Ez[iz, ir] )
return
@njit_parallel
def numba_correct_currents_curlfree_comoving( rho_prev, rho_next, Jp, Jm, Jz,
kz, kr, inv_k2,
j_corr_coef, T_eb, T_cc,
inv_dt, Nz, Nr ) :
"""
Correct the currents in spectral space, using the curl-free correction
which is adapted to the galilean/comoving-currents assumption
"""
# Loop over the 2D grid (parallel in z, if threading is installed)
for iz in prange(Nz):
for ir in range(Nr):
# Calculate the intermediate variable F
F = - inv_k2[iz, ir] * ( T_cc[iz, ir]*j_corr_coef[iz, ir] \
* (rho_next[iz, ir] - rho_prev[iz, ir]*T_eb[iz, ir]) \
+ 1.j*kz[iz, ir]*Jz[iz, ir] \
+ kr[iz, ir]*( Jp[iz, ir] - Jm[iz, ir] ) )
# Correct the currents accordingly
Jp[iz, ir] += 0.5 * kr[iz, ir] * F
Jm[iz, ir] += -0.5 * kr[iz, ir] * F
Jz[iz, ir] += -1.j * kz[iz, ir] * F
return
@njit_parallel
def numba_correct_currents_crossdeposition_comoving(
rho_prev, rho_next, rho_next_z, rho_next_xy, Jp, Jm, Jz,
kz, kr, j_corr_coef, T_eb, T_cc, inv_dt, Nz, Nr ) :
"""
Correct the currents in spectral space, using the cross-deposition
algorithm adapted to the galilean/comoving-currents assumption.
"""
# Loop over the 2D grid
for iz in prange(Nz):
# Loop through the radial points
# (Note: a while loop is used here, because numba 0.34 does
# not support nested prange and range loops)
ir = 0
while ir < Nr:
# Calculate the intermediate variable Dz and Dxy
# (Such that Dz + Dxy is the error in the continuity equation)
Dz = 1.j*kz[iz, ir]*Jz[iz, ir] \
+ 0.5 * T_cc[iz, ir]*j_corr_coef[iz, ir] * \
( rho_next[iz, ir] - T_eb[iz, ir] * rho_next_xy[iz, ir] \
+ rho_next_z[iz, ir] - T_eb[iz, ir] * rho_prev[iz, ir] )
Dxy = kr[iz, ir]*( Jp[iz, ir] - Jm[iz, ir] ) \
+ 0.5 * T_cc[iz, ir]*j_corr_coef[iz, ir] * \
( rho_next[iz, ir] + T_eb[iz, ir] * rho_next_xy[iz, ir] \
- rho_next_z[iz, ir] - T_eb[iz, ir] * rho_prev[iz, ir] )
# Correct the currents accordingly
if kr[iz, ir] != 0:
inv_kr = 1./kr[iz, ir]
Jp[iz, ir] += -0.5 * Dxy * inv_kr
Jm[iz, ir] += 0.5 * Dxy * inv_kr
if kz[iz, ir] != 0:
inv_kz = 1./kz[iz, ir]
Jz[iz, ir] += 1.j * Dz * inv_kz
# Increment ir
ir += 1
return
@njit_parallel
def numba_push_eb_comoving( Ep, Em, Ez, Bp, Bm, Bz, Jp, Jm, Jz,
rho_prev, rho_next,
rho_prev_coef, rho_next_coef, j_coef,
C, S_w, T_eb, T_cc, T_rho,
kr, kz, dt, V, use_true_rho, Nz, Nr):
"""
Push the fields over one timestep, using the psatd algorithm,
with the assumptions of comoving currents
(either with the galilean scheme or comoving scheme, depending on
the values of the coefficients that are passed)
See the documentation of SpectralGrid.push_eb_with
"""
# Loop over the grid (parallel in z, if threading is installed)
for iz in prange(Nz):
for ir in range(Nr):
# Save the electric fields, since it is needed for the B push
Ep_old = Ep[iz, ir]
Em_old = Em[iz, ir]
Ez_old = Ez[iz, ir]
# Calculate useful auxiliary arrays
if use_true_rho:
# Evaluation using the rho projected on the grid
rho_diff = rho_next_coef[iz, ir] * rho_next[iz, ir] \
- rho_prev_coef[iz, ir] * rho_prev[iz, ir]
else:
# Evaluation using div(E) and div(J)
divE = kr[iz, ir]*( Ep[iz, ir] - Em[iz, ir] ) \
+ 1.j*kz[iz, ir]*Ez[iz, ir]
divJ = kr[iz, ir]*( Jp[iz, ir] - Jm[iz, ir] ) \
+ 1.j*kz[iz, ir]*Jz[iz, ir]
rho_diff = ( T_eb[iz,ir] * rho_next_coef[iz, ir] \
- rho_prev_coef[iz, ir] ) \
* epsilon_0 * divE + T_rho[iz, ir] \
* rho_next_coef[iz, ir] * divJ
# Push the E field
Ep[iz, ir] = \
T_eb[iz, ir]*C[iz, ir]*Ep[iz, ir] + 0.5*kr[iz, ir]*rho_diff \
+ j_coef[iz, ir]*1.j*kz[iz, ir]*V*Jp[iz, ir] \
+ c2*T_eb[iz, ir]*S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Bz[iz, ir] \
+ kz[iz, ir]*Bp[iz, ir] - mu_0*T_cc[iz, ir]*Jp[iz, ir] )
Em[iz, ir] = \
T_eb[iz, ir]*C[iz, ir]*Em[iz, ir] - 0.5*kr[iz, ir]*rho_diff \
+ j_coef[iz, ir]*1.j*kz[iz, ir]*V*Jm[iz, ir] \
+ c2*T_eb[iz, ir]*S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Bz[iz, ir] \
- kz[iz, ir]*Bm[iz, ir] - mu_0*T_cc[iz, ir]*Jm[iz, ir] )
Ez[iz, ir] = \
T_eb[iz, ir]*C[iz, ir]*Ez[iz, ir] - 1.j*kz[iz, ir]*rho_diff \
+ j_coef[iz, ir]*1.j*kz[iz, ir]*V*Jz[iz, ir] \
+ c2*T_eb[iz, ir]*S_w[iz, ir]*( 1.j*kr[iz, ir]*Bp[iz, ir] \
+ 1.j*kr[iz, ir]*Bm[iz, ir] - mu_0*T_cc[iz, ir]*Jz[iz, ir] )
# Push the B field
Bp[iz, ir] = T_eb[iz, ir]*C[iz, ir]*Bp[iz, ir] \
- T_eb[iz, ir]*S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Ez_old \
+ kz[iz, ir]*Ep_old ) \
+ j_coef[iz, ir]*( -1.j*0.5*kr[iz, ir]*Jz[iz, ir] \
+ kz[iz, ir]*Jp[iz, ir] )
Bm[iz, ir] = T_eb[iz, ir]*C[iz, ir]*Bm[iz, ir] \
- T_eb[iz, ir]*S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Ez_old \
- kz[iz, ir]*Em_old ) \
+ j_coef[iz, ir]*( -1.j*0.5*kr[iz, ir]*Jz[iz, ir] \
- kz[iz, ir]*Jm[iz, ir] )
Bz[iz, ir] = T_eb[iz, ir]*C[iz, ir]*Bz[iz, ir] \
- T_eb[iz, ir]*S_w[iz, ir]*( 1.j*kr[iz, ir]*Ep_old \
+ 1.j*kr[iz, ir]*Em_old ) \
+ j_coef[iz, ir]*( 1.j*kr[iz, ir]*Jp[iz, ir] \
+ 1.j*kr[iz, ir]*Jm[iz, ir] )
return
@njit_parallel
def numba_push_eb_pml_comoving( Ep_pml, Em_pml, Bp_pml, Bm_pml,
Ez, Bz, C, S_w, T_eb, kr, kz, Nz, Nr):
"""
Push the PML split fields over one timestep,
using the galilean/comoving psatd algorithm
See the documentation of SpectralGrid.push_eb_with
"""
# Loop over the 2D grid
for iz in prange(Nz):
for ir in range(Nr):
# Push the E field
Ep_pml[iz, ir] = T_eb[iz, ir]*C[iz, ir]*Ep_pml[iz, ir] \
+ c2*T_eb[iz, ir]*S_w[iz, ir]*(-1.j*0.5*kr[iz, ir]*Bz[iz, ir])
Em_pml[iz, ir] = T_eb[iz, ir]*C[iz, ir]*Em_pml[iz, ir] \
+ c2*T_eb[iz, ir]*S_w[iz, ir]*(-1.j*0.5*kr[iz, ir]*Bz[iz, ir])
# Push the B field
Bp_pml[iz, ir] = T_eb[iz, ir]*C[iz, ir]*Bp_pml[iz, ir] \
- T_eb[iz, ir]*S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Ez[iz, ir] )
Bm_pml[iz, ir] = T_eb[iz, ir]*C[iz, ir]*Bm_pml[iz, ir] \
- T_eb[iz, ir]*S_w[iz, ir]*( -1.j*0.5*kr[iz, ir]*Ez[iz, ir] )
return
@njit_parallel
def numba_correct_divE( Ez, Ep, Em, inv_k2, kz, kr, rho, Nz, Nr ):
"""
Correct the electric field, so that it satisfies the equation
div(E) - rho/epsilon_0 = 0
"""
# Loop over the 2D grid
for iz in prange(Nz):
for ir in range(Nr):
F = - inv_k2[iz, ir] * (
- rho[iz, ir] / epsilon_0 \
+ 1.j*kz[iz, ir]*Ez[iz, ir] + kr[iz, ir]*( Ep[iz, ir] - Em[iz, ir] ) )
# Correct the current accordingly
Ep[iz, ir] += 0.5*kr[iz, ir]*F
Em[iz, ir] += -0.5*kr[iz, ir]*F
Ez[iz, ir] += -1.j*kz[iz, ir]*F
# -----------------------------------------------------------------------
# Parallel reduction of the global arrays for threads into a single array
# -----------------------------------------------------------------------
@njit_parallel
def numba_erase_threading_buffer( global_array ):
"""
Set the threading buffer `global_array` to 0
Parameter:
----------
global_array: 4darray of complexs
An array that contains the duplicated charge/current for each thread
"""
nthreads, Nm, Nz, Nr = global_array.shape
# Loop in parallel along nthreads
for i_thread in prange(nthreads):
# Loop through the modes and the grid
for m in range(Nm):
for iz in range(Nz):
for ir in range(Nr):
# Erase values
global_array[i_thread, m, iz, ir] = 0.
@njit_parallel
def sum_reduce_2d_array( global_array, reduced_array, m ):
"""
Sum the array `global_array` along its first axis and
add it into `reduced_array`, and fold the deposition guard cells of
global_array into the regular cells of reduced_array.
Parameters:
-----------
global_array: 4darray of complexs
Field array of shape (nthreads, Nm, 2+Nz+2, 2+Nr+2)
where the additional 2's in z and r correspond to deposition guard cells
that were used during the threaded deposition kernel.
reduced array: 2darray of complex
Field array of shape (Nz, Nr)
m: int
The azimuthal mode for which the reduction should be performed
"""
# Extract size of each dimension
Nz = reduced_array.shape[0]
# Parallel loop over z
for iz in prange(Nz):
# Get index inside reduced_array
iz_global = iz + 2
reduce_slice( reduced_array, iz, global_array, iz_global, m )
# Handle deposition guard cells in z
reduce_slice( reduced_array, Nz-2, global_array, 0, m )
reduce_slice( reduced_array, Nz-1, global_array, 1, m )
reduce_slice( reduced_array, 0, global_array, Nz+2, m )
reduce_slice( reduced_array, 1, global_array, Nz+3, m )
@numba.njit
def reduce_slice( reduced_array, iz, global_array, iz_global, m ):
"""
Sum the array `global_array` into `reduced_array` for one given slice in z
"""
Nreduce = global_array.shape[0]
Nr = reduced_array.shape[1]
# Loop over the reduction dimension (slow dimension)
for it in range( Nreduce ):
# First fold the low-radius deposition guard cells in
reduced_array[iz, 1] += global_array[it, m, iz_global, 0]
reduced_array[iz, 0] += global_array[it, m, iz_global, 1]
# Then loop over regular cells
for ir in range( Nr ):
reduced_array[iz, ir] += global_array[it, m, iz_global, ir+2]
# Finally fold the high-radius guard cells in
reduced_array[iz, Nr-1] += global_array[it, m, iz_global, Nr+2]
reduced_array[iz, Nr-1] += global_array[it, m, iz_global, Nr+3]
| [
2,
15069,
1584,
11,
13186,
47,
2149,
20420,
198,
2,
46665,
25,
3982,
72,
1004,
258,
11,
25995,
509,
1980,
831,
198,
2,
13789,
25,
513,
12,
2601,
682,
12,
21800,
12,
43,
15766,
43,
198,
37811,
198,
1212,
2393,
318,
636,
286,
262,
... | 1.779766 | 10,784 |
import binascii
import os
from base64 import urlsafe_b64encode
from urllib.parse import urlencode
from django.conf import settings
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.utils.encoding import force_str
from olympia.amo.utils import is_safe_url, use_fake_fxa
| [
11748,
9874,
292,
979,
72,
198,
11748,
28686,
198,
198,
6738,
2779,
2414,
1330,
2956,
7278,
8635,
62,
65,
2414,
268,
8189,
198,
6738,
2956,
297,
571,
13,
29572,
1330,
2956,
11925,
8189,
198,
198,
6738,
42625,
14208,
13,
10414,
1330,
6... | 3.088235 | 102 |
STRATEGY_NAME = "ultimate2"
IS_SUBMIT = False
EPOCH_NUM = 1
SUBMIT_NAME = "🍪CookEs-" + STRATEGY_NAME + "(Version " + str(EPOCH_NUM) + ") github@ToiletCommander" | [
18601,
6158,
31212,
62,
20608,
796,
366,
44818,
17,
1,
198,
1797,
62,
50,
10526,
36393,
796,
10352,
198,
8905,
46,
3398,
62,
41359,
796,
352,
198,
50,
10526,
36393,
62,
20608,
796,
366,
8582,
235,
103,
28937,
23041,
21215,
1343,
19269... | 2.318841 | 69 |
import numpy as np
# update m robots position: cur_position
# robot waiting at estimating running point
# robot running back to the center
# update position while pickers are picking
#update position of pickers while it is walking
# reset environment randomly
# execute one action
# render current state to picture
# update states
# check if there is idle robot in the center
| [
11748,
299,
32152,
355,
45941,
220,
198,
197,
197,
2,
4296,
285,
14193,
2292,
25,
1090,
62,
9150,
198,
197,
197,
2,
9379,
4953,
379,
39539,
2491,
966,
198,
197,
197,
2,
9379,
2491,
736,
284,
262,
3641,
198,
197,
197,
2,
4296,
2292... | 3.92 | 100 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-03-11 00:58
from __future__ import unicode_literals
from django.db import migrations, models
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2980,
515,
416,
37770,
352,
13,
940,
319,
2177,
12,
3070,
12,
1157,
3571,
25,
3365,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
6738,... | 2.8 | 55 |
def strategy(history, memory):
"""
If opponent defected, respond with defection. *UNLESS* we defected the turn before.
"""
opponents_last_move = history[1, -1] if history.shape[1] >= 1 else 1
our_second_last_move = history[0, -2] if history.shape[1] >= 2 else 1
choice = 1 if (opponents_last_move == 1 or our_second_last_move == 0) else 0
return choice, None
| [
4299,
4811,
7,
23569,
11,
4088,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1002,
6125,
825,
11197,
11,
3031,
351,
825,
3213,
13,
1635,
4944,
48481,
9,
356,
825,
11197,
262,
1210,
878,
13,
198,
220,
220,
220,
37227,
198,
... | 2.784173 | 139 |
'''
Copyright 2017, Fujitsu Network Communications, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
""" selenium verify operations library"""
from warrior.Framework.Utils.print_Utils import print_error, print_info, print_debug, print_exception
from selenium.webdriver.support import expected_conditions as EC
try:
from selenium import webdriver
except Exception as exception:
print_exception(exception)
class VerifyOperations(object):
"""Browser management class"""
def __init__(self, *args, **kwargs):
"""Verify operations constructor """
def get_page_property(self, browser_instance, value_type=None):
"""get page property"""
a = ["current_url", "name", "page_source", "title"]
return_value = False
if value_type is not None:
if value_type not in a:
print_error("Only {0}, {1}, {2}, {3} are supported value types.".format(a[0], a[1], a[2], a[3]))
else:
for i in range(0, len(a)):
if value_type == a[i]:
if i == 0:
return_value = browser_instance.current_url
if i == 1:
return_value = browser_instance.name
if i == 2:
return_value = browser_instance.page_source
if i == 3:
return_value = browser_instance.title
return return_value
def verify_alert_is_present(self, browser_instance, action="accept"):
"""verify alert is present"""
status = False
try:
if action.lower().strip() == "dismiss":
browser_instance.switch_to.alert.dismiss()
else:
browser_instance.switch_to.alert.accept()
status = True
except Exception:
print_error("No alert present!")
return status
| [
7061,
6,
198,
15269,
2177,
11,
32671,
19831,
7311,
14620,
11,
3457,
13,
198,
26656,
15385,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
5832,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
... | 2.376102 | 1,021 |
from oauth2_provider.contrib.rest_framework import authentication
from django.utils import timezone
| [
6738,
267,
18439,
17,
62,
15234,
1304,
13,
3642,
822,
13,
2118,
62,
30604,
1330,
18239,
198,
6738,
42625,
14208,
13,
26791,
1330,
640,
11340,
628
] | 3.884615 | 26 |
begin() | [
628,
198,
27471,
3419
] | 2.5 | 4 |
from django.core.urlresolvers import reverse
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.core.paginator import Paginator
from go.base import utils
from go.base.tests.helpers import GoDjangoTestCase, DjangoVumiApiHelper
from go.vumitools.api import VumiApi, VumiUserApi
from go.vumitools.conversation.definition import ConversationDefinitionBase
from go.conversation.view_definition import ConversationViewDefinitionBase
| [
6738,
42625,
14208,
13,
7295,
13,
6371,
411,
349,
690,
1330,
9575,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
1330,
651,
62,
7220,
62,
19849,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
27530,
1330,
2448,
3411,
198,... | 3.395833 | 144 |
#!/usr/bin/env python3
"""RSA"""
import math
import random
from tkinter import *
from tkinter import ttk
#SPECIFICATION
"""Public key
P and Q are prime number
N = P * Q
M = ( P - 1) * (Q -1)
C = prime number of M, GCD(C,M) = 1
Public key = (N,C)
"""
"""
Private key
We have need U
C * U + M * V = 1
U is ]2,M[
2 < U - K *M < M
PRIVATE KEY = (U,N)
"""
""" ENCRYPTION
1) Take the message
2) Convert all the character in ASCII
3) Calcul all ASCII code with (ASCII)^C mod(N)
4) Message is encrypted
"""
""" DECRYPTION
1) Take an encrypted message
2) Apply (code)^U mod(N) for all character
3) Converse ASCII in letter
4) Message is decrypted
"""
#KEY
# Generate a ramdom prime number
#Fermat's little theorem
#Implementation from http://www.daniweb.com/software-development/python/code/216880/check-if-a-number-is-a-prime-number-python
#Recursive implementation of gcd algorithme
#Implementation of introduction to IT security cours
#RSA
#INTERFACE
rsa=RSA()
root= Tk()
root.title("RSA Encrypt")
interface = Interface(root, rsa)
interface.mainloop()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
37811,
49,
4090,
37811,
198,
198,
11748,
10688,
198,
11748,
4738,
198,
6738,
256,
74,
3849,
1330,
1635,
198,
6738,
256,
74,
3849,
1330,
256,
30488,
198,
198,
2,
48451,
30643,
... | 2.470588 | 459 |
from flask import Blueprint
from flask import request
import sys, random, time
sys.path.append(sys.path[0] + '/Helpers')
from Motors_Helper import Motors_Helper
from Virtual_Rubiks_Cube_Helper import Virtual_Rubiks_Cube_Helper
| [
6738,
42903,
1330,
39932,
198,
6738,
42903,
1330,
2581,
198,
11748,
25064,
11,
4738,
11,
640,
198,
198,
17597,
13,
6978,
13,
33295,
7,
17597,
13,
6978,
58,
15,
60,
1343,
31051,
12621,
19276,
11537,
198,
6738,
19292,
62,
47429,
1330,
1... | 3.454545 | 66 |
# Generated by Django 3.2.4 on 2021-06-02 11:11
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
| [
2,
2980,
515,
416,
37770,
513,
13,
17,
13,
19,
319,
33448,
12,
3312,
12,
2999,
1367,
25,
1157,
198,
198,
6738,
42625,
14208,
13,
10414,
1330,
6460,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
198,
11748,
42625,
14... | 3.04918 | 61 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from glob import glob
from os.path import basename
from os.path import splitext
from setuptools import setup
from setuptools import find_packages
setup(
name="",
use_scm_version=True,
license="",
description="",
author="",
url="",
packages=find_packages("src"),
package_dir={"": "src"},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=False,
install_requires=_requires_from_file('requirements.txt'),
setup_requires=["pytest-runner"],
tests_require=["pytest", "pytest-cov"]
)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
15095,
1330,
15095,
198,
6738,
28686,
13,
6978,
1330,
1615,
12453,
198,
6738,
28686,
13,
6978,
1330,
4328,
578,
7... | 2.580645 | 248 |
import os
import random
import time
import json
from locust import HttpLocust, TaskSet, task
from task_sets.baseTaskSet import baseTaskSet
import task_funcs.keystone_v2_base as keystone_base
import task_funcs.heat_v1_base as heat_base
import task_funcs.heat_v1_utility as heat_util
import task_funcs.nova_v2_utility as nova_util
class HeatSuspendResume(baseTaskSet):
""" task set designed to do insane, random, and valid things
via the heat api
"""
tasks = {heat_base.suspend_stack: 3,
heat_base.resume_stack: 7,
heat_base.list_stack_detail:3,
heat_base.find_stack_events:2,
heat_base.list_stack_events:7,
heat_base.find_stack_resources:7,
heat_base.list_stack_resources:4,
heat_base.get_stack_template:3,
nova_util.refresh_auth_token:1
}
| [
11748,
28686,
198,
11748,
4738,
198,
11748,
640,
198,
11748,
33918,
198,
198,
6738,
1179,
436,
1330,
367,
29281,
33711,
436,
11,
15941,
7248,
11,
4876,
198,
198,
6738,
4876,
62,
28709,
13,
8692,
25714,
7248,
1330,
2779,
25714,
7248,
198... | 2.234848 | 396 |
import cv2
import os
import numpy as np
import pickle
from src.lib import detect_face
github_readme = "https://github.com/mlabarrere/webcam-recognition/blob/master/README.md"
dictionary_people = dict()
dir_name = "data"
subject_folder_names = os.listdir(dir_name)
assert subject_folder_names, "You forgot to place a training folder.\nPlease refer to {}".format(github_readme)
print("Preparing data...")
print('Training for {}'.format(', '.join(subject_folder_names)))
faces, labels = prepare_training_data()
print("Data prepared\n")
print("Training the model...")
face_recognizer = cv2.face.LBPHFaceRecognizer_create()
face_recognizer.train(faces, np.array(labels))
print("Training complete\n")
print("Saving model...")
face_recognizer.save('model/model.xml')
with open('model/dict.pickle', 'wb') as handle:
pickle.dump(dictionary_people, handle, protocol=pickle.HIGHEST_PROTOCOL)
print("Saving done")
| [
11748,
269,
85,
17,
198,
11748,
28686,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
2298,
293,
198,
6738,
12351,
13,
8019,
1330,
4886,
62,
2550,
198,
198,
12567,
62,
961,
1326,
796,
366,
5450,
1378,
12567,
13,
785,
14,
4029,
397,
... | 2.954839 | 310 |
# coding: utf-8
# Copyright (c) Max-Planck-Institut für Eisenforschung GmbH - Computational Materials Design (CM) Department
# Distributed under the terms of "New BSD License", see the LICENSE file.
import os
import unittest
from pyiron.base.project.generic import Project
from pyiron.base.job.wrapper import job_wrapper_function
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
2,
15069,
357,
66,
8,
5436,
12,
20854,
694,
12,
6310,
270,
315,
277,
25151,
22633,
69,
669,
354,
2150,
402,
2022,
39,
532,
22476,
864,
24310,
8495,
357,
24187,
8,
2732,
198,
2,
4307,
6169,
73... | 3.387755 | 98 |
from django.views.generic import TemplateView
| [
6738,
42625,
14208,
13,
33571,
13,
41357,
1330,
37350,
7680,
628,
628,
628,
198
] | 3.714286 | 14 |
from __future__ import absolute_import
from __future__ import unicode_literals
from dateutil.relativedelta import relativedelta
from custom.icds_reports.const import AGG_DAILY_FEEDING_TABLE
from custom.icds_reports.utils.aggregation_helpers import month_formatter
from custom.icds_reports.utils.aggregation_helpers.distributed.base import BaseICDSAggregationDistributedHelper
| [
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
6738,
3128,
22602,
13,
2411,
265,
1572,
12514,
1330,
48993,
1572,
12514,
198,
6738,
2183,
13,
291,
9310,
62,
4892... | 3.5 | 108 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import mechanize
import sys
reload(sys)#python的str默认是ascii编码,和unicode编码冲突,这一部分可以载入utf8
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_equiv(True)
br.set_handle_redirect(True)
br.set_handle_referer(True)
br.set_handle_robots(False)
br.set_handle_gzip(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
br.addheaders = [('User-agent','Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20')]
br.open('https://www.baidu.com')
for form in br.forms():
print(form)
br.select_form(name='f')
br.form['wd'] = 'Python 网络爬虫'
br.submit()
print(br.response().read()) | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
12,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
3962,
1096,
198,
11748,
25064,
198,
260,
2220,
7,
17597,
8,
2,
29412,
21410,
2536,
165,
119,
246,
164,
106... | 2.065527 | 351 |
from django.shortcuts import render
from config.utils import get_active_event
| [
6738,
42625,
14208,
13,
19509,
23779,
1330,
8543,
198,
198,
6738,
4566,
13,
26791,
1330,
651,
62,
5275,
62,
15596,
628,
628,
628,
198
] | 3.541667 | 24 |
from selenium.webdriver.common.by import By
import time
from random import uniform | [
6738,
384,
11925,
1505,
13,
12384,
26230,
13,
11321,
13,
1525,
1330,
2750,
198,
198,
11748,
640,
198,
6738,
4738,
1330,
8187
] | 3.772727 | 22 |
import numpy as np
import math | [
11748,
299,
32152,
355,
45941,
198,
11748,
10688
] | 3.75 | 8 |
#!/usr/bin/env python
# coding: utf-8
"""
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Python Dubbo Library Client Server - Setup
Created
2015-4-10 by Joe - https://github.com/JoeCao
"""
import os
from setuptools import setup, find_packages
THISDIR = os.path.dirname(os.path.abspath(__file__))
os.chdir(THISDIR)
VERSION = open("version.txt").readline().strip()
HOMEPAGE = "https://github.com/ofpay/dubbo-client-py"
DOWNLOAD_BASEURL = "https://github.com/ofpay/dubbo-client-py/raw/master/dist/"
DOWNLOAD_URL = DOWNLOAD_BASEURL + "dubbo-client-%s-py2.7.egg" % VERSION
setup(
name="dubbo-client",
version=VERSION,
description=(
"Python Dubbo Client"
),
long_description=open("README.md").read(),
keywords=(
"Dubbo, JSON-RPC, JSON, RPC, Client,"
"HTTP-Client, Remote Procedure Call, JavaScript Object Notation, "
),
author="Joe Cao",
author_email="chinalibra@gmail.com",
url=HOMEPAGE,
download_url=DOWNLOAD_URL,
packages=find_packages(),
classifiers=[
# "Development Status :: 1 - Planning",
# "Development Status :: 2 - Pre-Alpha",
# "Development Status :: 3 - Alpha",
"Development Status :: 4 - Beta",
# "Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Communications",
"Topic :: System :: Networking",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
install_requires=["kazoo>=2.0", "python-jsonrpc>=0.7.3"],
)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
19617,
25,
3384,
69,
12,
23,
198,
37811,
198,
49962,
284,
262,
24843,
10442,
5693,
357,
1921,
37,
8,
739,
530,
393,
517,
198,
18920,
5964,
11704,
13,
220,
4091,
262,
28536,
2393,
... | 2.921875 | 896 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slide_id>\w+)/$', views.slide),
url(r'^(?P<slug>\w+).dzi$', views.dzi),
url(r'^(?P<slug>\w+).dzi.json$', views.properties),
url(r'^(?P<slug>\w+)_files/(?P<level>\d+)/(?P<col>\d+)_(?P<row>\d+)\.(?P<slideformat>jpeg|png)$', views.dztile),
url(r'^(?P<slug>\w+)_map/(?P<level>\d+)/(?P<col>\d+)_(?P<row>\d+)\.(?P<slideformat>jpeg|png)$', views.gmtile),
]
| [
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
1330,
19016,
198,
198,
6738,
764,
1330,
5009,
198,
198,
6371,
33279,
82,
796,
685,
198,
220,
220,
220,
19016,
7,
81,
6,
61,
7,
30,
47,
27,
6649,
485,
62,
312,
29,
59,
86,
10,
20679,
... | 1.765625 | 256 |
# pip3 install gym
# pip3 install neat-python
# for gym stuff:
# apt install xvfb ffmpeg xorg-dev libsdl2-dev swig cmake
# pip3 install gym[box2d]
import multiprocessing
import os
import pickle
import neat
import numpy as np
import bird
import pygame
clock = pygame.time.Clock()
runs_per_net = 2
# Use the NN network phenotype and the discrete actuator force function.
if __name__ == '__main__':
run() | [
2,
7347,
18,
2721,
11550,
198,
2,
7347,
18,
2721,
15049,
12,
29412,
198,
198,
2,
329,
11550,
3404,
25,
220,
198,
2,
15409,
2721,
2124,
85,
21855,
31246,
43913,
2124,
2398,
12,
7959,
9195,
21282,
75,
17,
12,
7959,
1509,
328,
12067,
... | 2.923611 | 144 |
#!/usr/bin/env python3
# Check if number is prime, return TRUE or FALSE
print(is_prime(5))
print(is_prime(12))
print(is_prime(15))
print(is_prime(21))
print(is_prime(32))
print(is_prime(51))
print(is_prime(101))
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
2,
6822,
611,
1271,
318,
6994,
11,
1441,
26751,
393,
26563,
198,
198,
4798,
7,
271,
62,
35505,
7,
20,
4008,
198,
4798,
7,
271,
62,
35505,
7,
1065,
4008,
198,
4798,
7,
27... | 2.45977 | 87 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.891892 | 37 |
# import dependencies
import os
import sqlite3
import pandas as pd
# read the dataset in to panads
data_url = "https://github.com/matterholt/APP_CRUD-v1/blob/master/iris.csv"
headers = ["sepal_length", "sepal_width", "petal_length", "petal_width", "species"]
data_tabel = pd.read_csv(data_url, header=None, names=headers, converters={"zip": str})
# Cear example.bd if it exist
if os.path.exists("example.db"):
os.remove("example.db")
# Create a database
conn = sqlite3.connect("example.db")
# add the data to our database
data_tabel.to_sql(
"data_table",
conn,
dtype={
"sepal_length": "VARCHAR(256)",
"sepal_width": "VARCHAR(256)",
"petal_length": "VARCHAR(256)",
"petal_width": "VARCHAR(256)",
"species": "VARCHAR(256)",
},
)
"""
************************
Defining queries
SELECT, DELETE INSERT AND UPDATE queries
************************
"""
conn.row_factory = sqlite3.Row
# Make convenience functions for running SQL queries
#
# Select Query
# Insert query
# Delete Query
# Update Query
| [
2,
1330,
20086,
198,
11748,
28686,
198,
11748,
44161,
578,
18,
198,
11748,
19798,
292,
355,
279,
67,
198,
198,
2,
1100,
262,
27039,
287,
284,
3425,
5643,
198,
7890,
62,
6371,
796,
366,
5450,
1378,
12567,
13,
785,
14,
47635,
3937,
83... | 2.644279 | 402 |
#Faça um programa que leia uma temperatura em graus celsius e converta
#para fahrenheit.
celsius = float(input('Informe a temperatura em ºC: '))
fahrenheit = (celsius * 9/5) + 32
print('O valor convertido de {}ºC em fahrenheit é {}ºF'.format(celsius, fahrenheit))
| [
2,
50110,
50041,
23781,
1430,
64,
8358,
443,
544,
334,
2611,
4124,
2541,
64,
795,
7933,
385,
269,
32495,
304,
10385,
64,
198,
2,
1845,
64,
277,
993,
34032,
13,
198,
198,
5276,
82,
3754,
796,
12178,
7,
15414,
10786,
818,
687,
68,
2... | 2.598039 | 102 |
import time
import mlbgame
| [
11748,
640,
198,
11748,
285,
23160,
6057,
628,
628
] | 3.333333 | 9 |
""" Module for interacting with Grafana HTTP API
View https://docs.grafana.org/http_api/ for comprehensive
documentation
"""
from typing import Mapping, Optional, Any
from requests import Session, Response
from .generic import update
class BaseApi():
""" Base class for Grafana HTTP API
"""
| [
37811,
19937,
329,
24986,
351,
7037,
69,
2271,
14626,
7824,
198,
7680,
3740,
1378,
31628,
13,
70,
32188,
2271,
13,
2398,
14,
4023,
62,
15042,
14,
329,
9815,
198,
22897,
341,
198,
37811,
198,
6738,
19720,
1330,
337,
5912,
11,
32233,
11... | 3.619048 | 84 |
from pony import *
p = Print ()
filename = "-i hpgl/p2.hpgl"
num = 50
for i in range (1, num):
#[0,1)
scl = float(i)/float(num)
#reset
args = "-r -l "
#select pen
args += "-sp 8 "
#draw border
args += "-b "
#scale
args += "-sr " + str(scl) + " " + str(scl)+ " "
#line type
if (i % 5 ):
args += "-lt 6 " + str(scl * 5) + " "
args += "-fs " + str(i % 5) + " "
if (i % 2 == False):
args += "-t " + str(scl) + " -st .2 .2 "
else:
args += "-lt 2 " + str(100) + " "
args += "-fs " + str(8) + " "
args += "-t " + str(scl) + " -st .4 .2 "
#set force
#send to printer
args += "-p"
p.parse(args.split());
p.printer.instantiate()
p.send()
| [
6738,
26902,
1330,
1635,
198,
198,
79,
796,
12578,
7499,
198,
198,
34345,
796,
27444,
72,
27673,
4743,
14,
79,
17,
13,
24831,
4743,
1,
198,
198,
22510,
796,
2026,
198,
198,
1640,
1312,
287,
2837,
357,
16,
11,
997,
2599,
198,
220,
... | 1.974093 | 386 |
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from typing import Union, cast
| [
37811,
198,
492,
2438,
9800,
3712,
23459,
88,
13704,
367,
2381,
12144,
1279,
912,
4669,
13704,
13,
71,
2381,
12144,
31,
14816,
13,
785,
29,
198,
37811,
198,
198,
6738,
19720,
1330,
4479,
11,
3350,
628
] | 2.916667 | 36 |
#
# PySNMP MIB module CISCO-WPAN-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-WPAN-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:21:30 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
ifName, ifIndex = mibBuilder.importSymbols("IF-MIB", "ifName", "ifIndex")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
NotificationType, ModuleIdentity, TimeTicks, iso, ObjectIdentity, MibIdentifier, Unsigned32, Bits, Counter64, Gauge32, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "ModuleIdentity", "TimeTicks", "iso", "ObjectIdentity", "MibIdentifier", "Unsigned32", "Bits", "Counter64", "Gauge32", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Integer32")
TextualConvention, TruthValue, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "TruthValue", "DisplayString")
ciscoWpanMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 819))
ciscoWpanMIB.setRevisions(('2013-11-19 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoWpanMIB.setRevisionsDescriptions(('Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoWpanMIB.setLastUpdated('201311190000Z')
if mibBuilder.loadTexts: ciscoWpanMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoWpanMIB.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: cs-cgr@cisco.com')
if mibBuilder.loadTexts: ciscoWpanMIB.setDescription("This MIB module defines management objects for configuration and monitoring of Wireless Personal Area Network (WPAN). Personal Area Network (PAN) is a network for interconnecting devices centered on an individual person's workspace. A Wireless Personal Area Network (WPAN) is a PAN in which the connections are wireless. WPAN is based on the standard IEEE 802.15.4. *** ABBREVIATIONS, ACRONYMS, AND SYMBOLS *** PAN - Personal Area Network RPL - IPv6 Routing Protocol for Low power and Lossy networks WPAN - Wireless Personal Area Network ")
ciscoWpanMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 819, 0))
ciscoWpanMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 819, 1))
ciscoWpanMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 819, 2))
ciscoWpanConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 819, 1, 1))
cwpanInterfaceTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 819, 1, 1, 1), )
if mibBuilder.loadTexts: cwpanInterfaceTable.setStatus('current')
if mibBuilder.loadTexts: cwpanInterfaceTable.setDescription('This table contains a list of the WPAN interfaces on this device.')
cwpanInterfaceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 819, 1, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: cwpanInterfaceEntry.setStatus('current')
if mibBuilder.loadTexts: cwpanInterfaceEntry.setDescription('An entry containing the management information for a particular WPAN interface. An entry is created when a WPAN interface has been added to ifTable. An entry is deleted when a WPAN interface has been removed from ifTable.')
cwpanIfServiceStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 819, 1, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("start", 1), ("stop", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwpanIfServiceStatus.setStatus('current')
if mibBuilder.loadTexts: cwpanIfServiceStatus.setDescription('This object indicates the status of WPAN service associated with this given interface. start (1) - WPAN service start stop (2) - WPAN service stop')
cwpanIfServiceStatusReason = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 819, 1, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("unknown", 1), ("powerDown", 2), ("powerUp", 3), ("moduleRemove", 4), ("moduleReload", 5), ("driverStop", 6), ("driverStart", 7), ("firmwareUpgrade", 8), ("firmwareReset", 9), ("watchDog", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwpanIfServiceStatusReason.setStatus('current')
if mibBuilder.loadTexts: cwpanIfServiceStatusReason.setDescription('This object indicates the reason of the last state change of the WPAN service associated with this given interface. unknown (1) - unknown powerDown (2) - power down WPAN module powerUp (3) - power up WPAN module moduleRemove (4) - module removed moduleReload (5) - power cycle WPAN module driverStop (6) - driver stop driverStart (7) - driver start firmwareUpgrade (8) - firmware upgrade firmwareReset (9) - firmware reset watchDog (10) - watchdog triggered')
cwpanIfRplTableResetReason = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 819, 1, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("unknown", 1), ("manuallyClear", 2), ("configChange", 3), ("interfaceDown", 4), ("timeout", 5), ("serviceStop", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwpanIfRplTableResetReason.setStatus('current')
if mibBuilder.loadTexts: cwpanIfRplTableResetReason.setDescription('This object indicates the last reset reason of the WPAN RPL table associated with this given interface. unknown (1) - unknown manuallyClear (2) - RPL table was manually cleared configChange (3) - WPAN configuration changed interfaceDown (4) - WPAN interface was down timeout (5) - RPL table has not been updated for a long time serviceStop (6) - WPAN service has stopped')
cwpanIfRplTableNodes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 819, 1, 1, 1, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwpanIfRplTableNodes.setStatus('current')
if mibBuilder.loadTexts: cwpanIfRplTableNodes.setDescription('This object indicates the total number of entries in the WPAN RPL table for this given interface.')
cwpanIfRplTableMajorThreshNodes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 819, 1, 1, 1, 1, 5), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwpanIfRplTableMajorThreshNodes.setStatus('current')
if mibBuilder.loadTexts: cwpanIfRplTableMajorThreshNodes.setDescription('This object specifies the major node threshold for the WPAN RPL table. When the value of the corresponding instance of cwpanIfRplTableNodes is greater than this value, a cwpanRisingIfRplTblMajorThreshNodesNotif notification will be generated. After a cwpanRisingIfRplTblMajorThreshNodesNotif is generated, another such notification will not be generated if the value of the corresponding instance of cwpanIfRplTableNodes stays above this value. When the value of the corresponding instance of cwpanIfRplTableNodes becomes less than or equal to this value after a cwpanRisingIfRplTblMajorThreshNodesNotif notification is generated, a cwpanFallingIfRplTblMajorThreshNodesNotif notification will be generated. After a cwpanFallingIfRplTblMajorThreshNodesNotif is generated, another such notification will not be generated if the value of the corresponding instance of cwpanIfRplTableNodes does not exceed this value.')
cwpanIfRplTableMinorThreshNodes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 819, 1, 1, 1, 1, 6), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwpanIfRplTableMinorThreshNodes.setStatus('current')
if mibBuilder.loadTexts: cwpanIfRplTableMinorThreshNodes.setDescription('This object specifies the threshold for the RPL table nodes. When the value of the corresponding instance of cwpanIfRplTableNodes is greater than this threshold, a cwpanRisingIfRplTblMinorThreshNodesNotif notification will be generated. After a cwpanRisingIfRplTblMinorThreshNodesNotif is generated, another such notification will not be generated if the value of the corresponding instance of cwpanIfRplTableNodes stays above this threshold. When the value of the corresponding instance of cwpanIfRplTableNodes becomes less than or equal to this threshold after a cwpanRisingIfRplTblMinorThreshNodesNotif notification is generated, a cwpanFallingIfRplTblMinorThreshNodesNotif notification will be generated. After a cwpanFallingIfRplTblMinorThreshNodesNotif is generated, another such notification will not be generated if the value of the corresponding instance of cwpanIfRplTableNodes does not exceed this threshold.')
cwpanNotificationEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 819, 1, 1, 2), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwpanNotificationEnable.setStatus('current')
if mibBuilder.loadTexts: cwpanNotificationEnable.setDescription("This object specifies whether the system generates the following notifications: cwpanServiceStatusChangeNotif cwpanRplTableResetNotif cwpanRisingIfRplTblMinorThreshNodesNotif cwpanFallingIfRplTblMinorThreshNodesNotif cwpanRisingIfRplTblMajorThreshNodesNotif cwpanFallingIfRplTblMajorThreshNodesNotif A value of 'false' will prevent any of the above notifications from being generated by this system.")
cwpanServiceStatusChangeNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 819, 0, 1)).setObjects(("IF-MIB", "ifName"), ("CISCO-WPAN-MIB", "cwpanIfServiceStatusReason"))
if mibBuilder.loadTexts: cwpanServiceStatusChangeNotif.setStatus('current')
if mibBuilder.loadTexts: cwpanServiceStatusChangeNotif.setDescription('This notification is generated if the WPAN service status has been changed.')
cwpanRplTableResetNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 819, 0, 2)).setObjects(("IF-MIB", "ifName"), ("CISCO-WPAN-MIB", "cwpanIfRplTableResetReason"))
if mibBuilder.loadTexts: cwpanRplTableResetNotif.setStatus('current')
if mibBuilder.loadTexts: cwpanRplTableResetNotif.setDescription('This notification is generated if a WPAN RPL table has been reset.')
cwpanRisingIfRplTblMinorThreshNodesNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 819, 0, 3)).setObjects(("IF-MIB", "ifName"), ("CISCO-WPAN-MIB", "cwpanIfRplTableNodes"), ("CISCO-WPAN-MIB", "cwpanIfRplTableMinorThreshNodes"), ("CISCO-WPAN-MIB", "cwpanIfRplTableMajorThreshNodes"))
if mibBuilder.loadTexts: cwpanRisingIfRplTblMinorThreshNodesNotif.setStatus('current')
if mibBuilder.loadTexts: cwpanRisingIfRplTblMinorThreshNodesNotif.setDescription('This notification is generated if the number of nodes in the RPL table is greater than the threshold in cwpanIfRplTableMinorThreshNodes.')
cwpanFallingIfRplTblMinorThreshNodesNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 819, 0, 4)).setObjects(("IF-MIB", "ifName"), ("CISCO-WPAN-MIB", "cwpanIfRplTableNodes"), ("CISCO-WPAN-MIB", "cwpanIfRplTableMinorThreshNodes"), ("CISCO-WPAN-MIB", "cwpanIfRplTableMajorThreshNodes"))
if mibBuilder.loadTexts: cwpanFallingIfRplTblMinorThreshNodesNotif.setStatus('current')
if mibBuilder.loadTexts: cwpanFallingIfRplTblMinorThreshNodesNotif.setDescription('This notification is generated if the number of nodes in a RPL table become less than or equal to the cwpanIfRplTableMinorThreshNodes value after a cwpanRisingIfRplTblMinorThreshNodesNotif notification is generated.')
cwpanRisingIfRplTblMajorThreshNodesNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 819, 0, 5)).setObjects(("IF-MIB", "ifName"), ("CISCO-WPAN-MIB", "cwpanIfRplTableNodes"), ("CISCO-WPAN-MIB", "cwpanIfRplTableMajorThreshNodes"))
if mibBuilder.loadTexts: cwpanRisingIfRplTblMajorThreshNodesNotif.setStatus('current')
if mibBuilder.loadTexts: cwpanRisingIfRplTblMajorThreshNodesNotif.setDescription('This notification is generated if the number of nodes in a RPL table is greater than the value of cwpanIfRplTableMajorThreshNodes.')
cwpanFallingIfRplTblMajorThreshNodesNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 819, 0, 6)).setObjects(("IF-MIB", "ifName"), ("CISCO-WPAN-MIB", "cwpanIfRplTableNodes"), ("CISCO-WPAN-MIB", "cwpanIfRplTableMajorThreshNodes"))
if mibBuilder.loadTexts: cwpanFallingIfRplTblMajorThreshNodesNotif.setStatus('current')
if mibBuilder.loadTexts: cwpanFallingIfRplTblMajorThreshNodesNotif.setDescription('This notification is generated when the number of nodes in a RPL table become less than cwpanIfRplTableMajorThreshNodes value after a cwpanRisingIfRplTblMajorThreshNodesNotif notification is generated.')
ciscoWpanMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 819, 2, 1))
ciscoWpanMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 819, 2, 2))
ciscoWpanMIBModuleCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 819, 2, 1, 1)).setObjects(("CISCO-WPAN-MIB", "cwpanInterfaceInfoGroup"), ("CISCO-WPAN-MIB", "cwpanNotificationControlGroup"), ("CISCO-WPAN-MIB", "cwpanNotificationGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoWpanMIBModuleCompliance = ciscoWpanMIBModuleCompliance.setStatus('current')
if mibBuilder.loadTexts: ciscoWpanMIBModuleCompliance.setDescription('The compliance statement for entities which implement the WPAN interface.')
cwpanInterfaceInfoGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 819, 2, 2, 1)).setObjects(("CISCO-WPAN-MIB", "cwpanIfServiceStatus"), ("CISCO-WPAN-MIB", "cwpanIfServiceStatusReason"), ("CISCO-WPAN-MIB", "cwpanIfRplTableResetReason"), ("CISCO-WPAN-MIB", "cwpanIfRplTableNodes"), ("CISCO-WPAN-MIB", "cwpanIfRplTableMajorThreshNodes"), ("CISCO-WPAN-MIB", "cwpanIfRplTableMinorThreshNodes"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cwpanInterfaceInfoGroup = cwpanInterfaceInfoGroup.setStatus('current')
if mibBuilder.loadTexts: cwpanInterfaceInfoGroup.setDescription('A collection of objects providing the WPAN interface management information on the device.')
cwpanNotificationControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 819, 2, 2, 2)).setObjects(("CISCO-WPAN-MIB", "cwpanNotificationEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cwpanNotificationControlGroup = cwpanNotificationControlGroup.setStatus('current')
if mibBuilder.loadTexts: cwpanNotificationControlGroup.setDescription('A collection of objects providing control of WPAN interface related notifications.')
cwpanNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 819, 2, 2, 3)).setObjects(("CISCO-WPAN-MIB", "cwpanServiceStatusChangeNotif"), ("CISCO-WPAN-MIB", "cwpanRplTableResetNotif"), ("CISCO-WPAN-MIB", "cwpanRisingIfRplTblMinorThreshNodesNotif"), ("CISCO-WPAN-MIB", "cwpanFallingIfRplTblMinorThreshNodesNotif"), ("CISCO-WPAN-MIB", "cwpanRisingIfRplTblMajorThreshNodesNotif"), ("CISCO-WPAN-MIB", "cwpanFallingIfRplTblMajorThreshNodesNotif"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cwpanNotificationGroup = cwpanNotificationGroup.setStatus('current')
if mibBuilder.loadTexts: cwpanNotificationGroup.setDescription('A collection of notifications for WPAN service and WPAN RPL table.')
mibBuilder.exportSymbols("CISCO-WPAN-MIB", cwpanFallingIfRplTblMajorThreshNodesNotif=cwpanFallingIfRplTblMajorThreshNodesNotif, cwpanIfServiceStatus=cwpanIfServiceStatus, cwpanInterfaceTable=cwpanInterfaceTable, cwpanIfRplTableMinorThreshNodes=cwpanIfRplTableMinorThreshNodes, cwpanServiceStatusChangeNotif=cwpanServiceStatusChangeNotif, ciscoWpanMIBModuleCompliance=ciscoWpanMIBModuleCompliance, ciscoWpanMIBNotifs=ciscoWpanMIBNotifs, cwpanRisingIfRplTblMajorThreshNodesNotif=cwpanRisingIfRplTblMajorThreshNodesNotif, PYSNMP_MODULE_ID=ciscoWpanMIB, ciscoWpanMIBConform=ciscoWpanMIBConform, cwpanIfRplTableResetReason=cwpanIfRplTableResetReason, cwpanNotificationEnable=cwpanNotificationEnable, cwpanNotificationControlGroup=cwpanNotificationControlGroup, cwpanNotificationGroup=cwpanNotificationGroup, ciscoWpanMIB=ciscoWpanMIB, ciscoWpanConfig=ciscoWpanConfig, ciscoWpanMIBCompliances=ciscoWpanMIBCompliances, cwpanRisingIfRplTblMinorThreshNodesNotif=cwpanRisingIfRplTblMinorThreshNodesNotif, ciscoWpanMIBGroups=ciscoWpanMIBGroups, cwpanInterfaceInfoGroup=cwpanInterfaceInfoGroup, cwpanFallingIfRplTblMinorThreshNodesNotif=cwpanFallingIfRplTblMinorThreshNodesNotif, ciscoWpanMIBObjects=ciscoWpanMIBObjects, cwpanInterfaceEntry=cwpanInterfaceEntry, cwpanIfRplTableNodes=cwpanIfRplTableNodes, cwpanRplTableResetNotif=cwpanRplTableResetNotif, cwpanIfRplTableMajorThreshNodes=cwpanIfRplTableMajorThreshNodes, cwpanIfServiceStatusReason=cwpanIfServiceStatusReason)
| [
2,
198,
2,
9485,
15571,
7378,
337,
9865,
8265,
36159,
8220,
12,
25527,
1565,
12,
8895,
33,
357,
4023,
1378,
16184,
76,
489,
8937,
13,
785,
14,
79,
893,
11632,
8,
198,
2,
7054,
45,
13,
16,
2723,
2393,
1378,
14,
14490,
14,
67,
615... | 2.880211 | 5,877 |
"""
Exception raised when a body request can not be decode from JSON and/or it's content doesn't match the minimum
specified values by the app
"""
from rinzler.exceptions import RinzlerHttpException
__author__ = "Rinzler<github.com/feliphebueno>"
class InvalidInputException(RinzlerHttpException):
"""
InvalidInputException
"""
status_code = 400
exception_name = "Bad Request"
| [
37811,
198,
16922,
4376,
618,
257,
1767,
2581,
460,
407,
307,
36899,
422,
19449,
290,
14,
273,
340,
338,
2695,
1595,
470,
2872,
262,
5288,
198,
23599,
3815,
416,
262,
598,
198,
37811,
198,
6738,
374,
259,
89,
1754,
13,
1069,
11755,
... | 3.225806 | 124 |
from torch.autograd import Function
from .backend import _backend
__all__ = ['ball_query']
def ball_query(centers_coords, points_coords, radius, num_neighbors):
"""
:param centers_coords: coordinates of centers, FloatTensor[B, 3, M]
:param points_coords: coordinates of points, FloatTensor[B, 3, N]
:param radius: float, radius of ball query
:param num_neighbors: int, maximum number of neighbors
:return:
neighbor_indices: indices of neighbors, IntTensor[B, M, U]
"""
centers_coords = centers_coords.contiguous()
points_coords = points_coords.contiguous()
return _backend.ball_query(centers_coords, points_coords, radius, num_neighbors)
| [
6738,
28034,
13,
2306,
519,
6335,
1330,
15553,
198,
198,
6738,
764,
1891,
437,
1330,
4808,
1891,
437,
198,
198,
834,
439,
834,
796,
37250,
1894,
62,
22766,
20520,
628,
198,
4299,
2613,
62,
22766,
7,
1087,
364,
62,
1073,
3669,
11,
21... | 2.513699 | 292 |
'''
Created on Aug 1, 2012
@author: Lukasz Kreczko
Email: Lukasz.Kreczko@cern.ch
'''
import FILES
import tools.ROOTFileReader as FileReader
from ROOT import gROOT
import tools.FileUtilities as FileUtils
fileTemplate = 'data/correctionFactors/correctionFactors_%s_%s_JSON.txt'
samples = [
'TTJet',
'POWHEG',
'PYTHIA6',
'MCatNLO',
'TTJets-matchingdown',
'TTJets-matchingup',
'TTJets-scaledown',
'TTJets-scaleup',
]
metbins = [
'0-25',
'25-45',
'45-70',
'70-100',
'100-inf'
]
metTypes = ['patMETsPFlow', 'patType1CorrectedPFMet', 'patType1p2CorrectedPFMet' ]
metsystematics_sources = [
"patType1p2CorrectedPFMetElectronEnUp",
"patType1p2CorrectedPFMetElectronEnDown",
"patType1p2CorrectedPFMetMuonEnUp",
"patType1p2CorrectedPFMetMuonEnDown",
"patType1p2CorrectedPFMetTauEnUp",
"patType1p2CorrectedPFMetTauEnDown",
"patType1p2CorrectedPFMetJetResUp",
"patType1p2CorrectedPFMetJetResDown",
"patType1p2CorrectedPFMetJetEnUp",
"patType1p2CorrectedPFMetJetEnDown",
"patType1p2CorrectedPFMetUnclusteredEnUp",
"patType1p2CorrectedPFMetUnclusteredEnDown",
"patType1CorrectedPFMetElectronEnUp",
"patType1CorrectedPFMetElectronEnDown",
"patType1CorrectedPFMetMuonEnUp",
"patType1CorrectedPFMetMuonEnDown",
"patType1CorrectedPFMetTauEnUp",
"patType1CorrectedPFMetTauEnDown",
"patType1CorrectedPFMetJetResUp",
"patType1CorrectedPFMetJetResDown",
"patType1CorrectedPFMetJetEnUp",
"patType1CorrectedPFMetJetEnDown",
"patType1CorrectedPFMetUnclusteredEnUp",
"patType1CorrectedPFMetUnclusteredEnDown",
"patPFMetElectronEnUp",
"patPFMetElectronEnDown",
"patPFMetMuonEnUp",
"patPFMetMuonEnDown",
"patPFMetTauEnUp",
"patPFMetTauEnDown",
"patPFMetJetResUp",
"patPFMetJetResDown",
"patPFMetJetEnUp",
"patPFMetJetEnDown",
"patPFMetUnclusteredEnUp",
"patPFMetUnclusteredEnDown",
]
metTypes.extend(metsystematics_sources)
if __name__ == "__main__":
from optparse import OptionParser
gROOT.SetBatch(True)
gROOT.ProcessLine('gErrorIgnoreLevel = 1001;')
parser = OptionParser()
parser.add_option("-b", "--bjetbin", dest="bjetbin", default='2m',
help="set b-jet multiplicity for analysis. Options: exclusive: 0-3, inclusive (N or more): 0m, 1m, 2m, 3m, 4m")
parser.add_option("-a", "--analysisType", dest="analysisType", default='EPlusJets',
help="set analysis type: EPlusJets or MuPlusJets")
parser.add_option("-t", "--test",
action="store_true", dest="test", default=False,
help="Run test")
translateOptions = {
'0':'0btag',
'1':'1btag',
'2':'2btags',
'3':'3btags',
'0m':'0orMoreBtag',
'1m':'1orMoreBtag',
'2m':'2orMoreBtags',
'3m':'3orMoreBtags',
'4m':'4orMoreBtags',
}
(options, args) = parser.parse_args()
bjetbin = translateOptions[options.bjetbin]
analysisType = options.analysisType
correctionFactors = {}
for sample in samples:
correctionFactors[sample] = {}
for metType in metTypes:
# variables = getMETVariables(analysisType, sample, metType, bjetbin)
variables = getMETVariablesFrom2DPlot(analysisType, sample, metType, bjetbin)
correctionFactors[sample][metType] = variables['correctionFactors']
saveToFile(correctionFactors, analysisType, bjetbin)
| [
7061,
6,
198,
41972,
319,
2447,
352,
11,
2321,
198,
198,
31,
9800,
25,
28102,
292,
89,
509,
8344,
89,
7204,
198,
198,
15333,
25,
28102,
292,
89,
13,
42,
8344,
89,
7204,
31,
30903,
13,
354,
198,
7061,
6,
198,
198,
11748,
34020,
1... | 1.955655 | 2,007 |
# Copyright 2014-2017 Luc Saffre
# License: BSD (see file COPYING for details)
"""See :doc:`/specs/clients`.
.. autosummary::
:toctree:
desktop
"""
from lino.api import ad, _
| [
2,
15069,
1946,
12,
5539,
7598,
311,
2001,
260,
198,
2,
13789,
25,
347,
10305,
357,
3826,
2393,
27975,
45761,
329,
3307,
8,
628,
198,
37811,
6214,
1058,
15390,
25,
63,
14,
4125,
6359,
14,
565,
2334,
44646,
198,
198,
492,
44619,
388,... | 2.439024 | 82 |
#
# PySNMP MIB module AP553-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/AP553-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:22:57 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
linkProbe, = mibBuilder.importSymbols("INNOVX-CORE-MIB", "linkProbe")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Unsigned32, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, MibIdentifier, Gauge32, Counter32, ObjectIdentity, Integer32, TimeTicks, Counter64, ModuleIdentity, iso, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "MibIdentifier", "Gauge32", "Counter32", "ObjectIdentity", "Integer32", "TimeTicks", "Counter64", "ModuleIdentity", "iso", "Bits")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
probeConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1))
probeStat = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2))
probePerform = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3))
trapStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 4))
chanConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 1))
lmiConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 2))
proFunConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 3))
trapConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4))
pvcConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5))
probeVersion = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 6))
alarmStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 4, 1))
frProbeMIBversion = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 6, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 5))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: frProbeMIBversion.setStatus('mandatory')
if mibBuilder.loadTexts: frProbeMIBversion.setDescription("Identifies the version of the MIB as 'X.YZT' where: X: Major Revision (1-9) Y: Minor Revision (0-9) Z: Typographical Revision (0-9) T: Test Revision (A-Z) ")
lmiOperation = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("off", 1), ("on", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lmiOperation.setStatus('mandatory')
if mibBuilder.loadTexts: lmiOperation.setDescription(' LMI operation, on -- enabled, off -- disabled WEBFLAG Off On WEBEND ')
lmiDLCI = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("dlci-0", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lmiDLCI.setStatus('mandatory')
if mibBuilder.loadTexts: lmiDLCI.setDescription('The DLCI # used for LMI polls, the default value is dlci-0 WEBFLAG DLCI-0 WEBEND ')
ipDLCI = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipDLCI.setStatus('mandatory')
if mibBuilder.loadTexts: ipDLCI.setDescription('Valid Inband IP DLCI ranges from 16 to 991')
ipEncapp = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("rawIP", 1), ("ietf-IP", 2), ("snap-IP", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipEncapp.setStatus('mandatory')
if mibBuilder.loadTexts: ipEncapp.setDescription(' Defines the IP DLCI Encapsulation method WEBFLAG RAW IETF SNAP WEBEND ')
chanAggregateRate = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(56000, 2048000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: chanAggregateRate.setStatus('mandatory')
if mibBuilder.loadTexts: chanAggregateRate.setDescription('Displays the current aggregate channel rate.')
lmiType = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("annexA", 1), ("annexD", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lmiType.setStatus('mandatory')
if mibBuilder.loadTexts: lmiType.setDescription("Defines the network's LMI type. WEBFLAG Annex-A Annex-D WEBEND ")
lmiN391 = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lmiN391.setStatus('mandatory')
if mibBuilder.loadTexts: lmiN391.setDescription(' N391 Full Enquiry Interval Counter ranges from 1 to 255, the default value is 6.')
lmiNet392 = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lmiNet392.setStatus('mandatory')
if mibBuilder.loadTexts: lmiNet392.setDescription(' N392 Network Interface Down Threshold Counter ranges from 1 to 10, the default value is 3.')
lmiUser392 = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lmiUser392.setStatus('mandatory')
if mibBuilder.loadTexts: lmiUser392.setDescription(' N392 User Interface Down Threshold Counter ranges from 1 to 10, the default value is 3.')
lmiNet393 = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 2, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lmiNet393.setStatus('mandatory')
if mibBuilder.loadTexts: lmiNet393.setDescription(' N393 Network Interface Down Window Counter ranges from 1 to 10, the default value is 4.')
lmiUser393 = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 2, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lmiUser393.setStatus('mandatory')
if mibBuilder.loadTexts: lmiUser393.setDescription(' N393 User Interface Down Window Counter ranges from 1 to 10, the default value is 4.')
t391Timer = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 2, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: t391Timer.setStatus('mandatory')
if mibBuilder.loadTexts: t391Timer.setDescription(' T391 Status Poll Interval Timer ranges from 5 to 30, the default value is 10, which would always less than to T392 Timer.')
t392Timer = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 2, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 30))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: t392Timer.setStatus('mandatory')
if mibBuilder.loadTexts: t392Timer.setDescription(' T392 Polling Verification Interval Timer ranges from 5 to 30, the default value is 15 should always be greater than T391 Timer.')
maxInfoLength = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 2, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(400, 4000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: maxInfoLength.setStatus('mandatory')
if mibBuilder.loadTexts: maxInfoLength.setDescription('Maximum supported frame information length ranges from 400 to 4000, the default value is 4000.')
unitType = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("master", 1), ("remote", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: unitType.setStatus('mandatory')
if mibBuilder.loadTexts: unitType.setDescription('Defines the ap553 as a master or remote unit in the network. This option is used to define the probe(s) installed on the local management network. As a master, PVC hiding is disabled, which allows discovery of remotely managed probes. WEBFLAG Master Remote WEBEND ')
probeMode = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("generator", 1), ("responder", 2), ("both", 3), ("disabled", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: probeMode.setStatus('mandatory')
if mibBuilder.loadTexts: probeMode.setDescription(' Defines the probe mode as a generator, responder, or both or disabled. WEBFLAG Generator Responder Generator & Responder Disabled WEBEND ')
pollPeriod = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 3, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pollPeriod.setStatus('mandatory')
if mibBuilder.loadTexts: pollPeriod.setDescription('Defines the frequency of sending the polling frame by the link probe, from 1 to 60 seconds. pollPeriod applies to all PVCs that are probed.')
globalTC = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 3, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: globalTC.setStatus('mandatory')
if mibBuilder.loadTexts: globalTC.setDescription('This parameter defines the committed rate measurement time interval (in seconds) which can be calculated as Bc/CIR, currently we only allow this value to be 1 sec.')
pvcCount = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 3, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCount.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCount.setDescription('PVC count defines the total number of PVCs which are being probed. The value is between 1 to 127. ')
probeTokenSize = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 3, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(19, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: probeTokenSize.setStatus('mandatory')
if mibBuilder.loadTexts: probeTokenSize.setDescription('Defines the probe token size.')
pvcAdd = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 3, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: pvcAdd.setStatus('mandatory')
if mibBuilder.loadTexts: pvcAdd.setDescription('This object allows for manual addition of a PVC to the existing PVC table, the added PVC must not exist before it is added.')
pvcDelete = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 3, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: pvcDelete.setStatus('mandatory')
if mibBuilder.loadTexts: pvcDelete.setDescription('This object allows for manual deletion of a PVC from the existing PVC table, the deleted PVC must exist before it was deleted.')
pvcDiscovery = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 3, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("initiate", 1), ("inProgress", 2), ("complete", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pvcDiscovery.setStatus('mandatory')
if mibBuilder.loadTexts: pvcDiscovery.setDescription('This object automatically adds PVCs as seen by the LMI protocol. It controls the probe ability to read all pvcs as reported by the LMI process and update the probe pvc data table as controlled by the pvcCirEirTable. Values of 2 and 3 cannot be set and have no affect.')
pvcOperStateChangeTrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pvcOperStateChangeTrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: pvcOperStateChangeTrapSeverity.setDescription("Controls the reporting of and defines the severity of the pvc state change trap which may be sent when a PVC's operational status is changed via the pvcOperation object. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ")
realTimeTrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimeTrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: realTimeTrapSeverity.setDescription('Controls the reporting of and defines the severity of ALL real time alarm conditions: Channel Load To DTE, Channel Load TO DCE, PVC Load To DCE, PVC Load To DTE, and PVC Round Trip Delay. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
realTimeChanLoadToDCEThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimeChanLoadToDCEThresh.setStatus('mandatory')
if mibBuilder.loadTexts: realTimeChanLoadToDCEThresh.setDescription('Defines the maximum allowed real-time channel load calculated every second, expressed in percentage. If the actual load is more than this threshold, a realTimeTrapSeverity exception trap may be sent as specified by the realTimeTrapSeverity object value.')
realTimeChanLoadToDCEThreshVar = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimeChanLoadToDCEThreshVar.setStatus('mandatory')
if mibBuilder.loadTexts: realTimeChanLoadToDCEThreshVar.setDescription('Defines the variation range for real-time channel load, expressed in percentage.. If within the current TC interval, the variation of the actual utilization is more than that range compared with the previous trap value, another realTimeTrapSeverity exception trap may be sent as specified by the realTimeTrapSeverity object value.')
avgChanLoadToDCETrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgChanLoadToDCETrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgChanLoadToDCETrapSeverity.setDescription('Controls the reporting of and defines severity of the 15 minute average load exception trap. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgChanLoadToDCEThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgChanLoadToDCEThresh.setStatus('mandatory')
if mibBuilder.loadTexts: avgChanLoadToDCEThresh.setDescription('Defines the maximum allowed frame relay channel utilization in a 15 minute period, expressed in percentage. If the actual average utilization is more than this threshold, an avgChanLoadToDCETrap trap may be sent as specified by the avgChanLoadToDCETrapSeverity object value.')
realTimeChanLoadToDTEThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimeChanLoadToDTEThresh.setStatus('mandatory')
if mibBuilder.loadTexts: realTimeChanLoadToDTEThresh.setDescription('Defines the maximum allowed real-time channel load calculated every second, expressed in percentage. If the actual load is more than this threshold, a realTimeTrapSeverity exception trap may be sent as specified by the realTimeTrapSeverity object value.')
realTimeChanLoadToDTEThreshVar = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimeChanLoadToDTEThreshVar.setStatus('mandatory')
if mibBuilder.loadTexts: realTimeChanLoadToDTEThreshVar.setDescription('Defines the variation range for real-time channel load, expressed in percentage.. If within the current TC interval, the variation of the actual utilization is more than that range compared with the previous trap value, another realTimeTrapSeverity exception trap may be sent as specified by the realTimeTrapSeverity object value.')
avgChanLoadToDTETrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgChanLoadToDTETrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgChanLoadToDTETrapSeverity.setDescription('Controls the reporting of and defines severity of the 15 minute average load exception trap. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgChanLoadToDTEThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgChanLoadToDTEThresh.setStatus('mandatory')
if mibBuilder.loadTexts: avgChanLoadToDTEThresh.setDescription('Defines the maximum allowed frame relay channel utilization in a 15 minute period, expressed in percentage. If the actual average utilization is more than this threshold, an avgChanLoadToDTETrap trap may be sent as specified by the avgChanLoadToDTETrapSeverity object value.')
realTimePvcLoadToDCEThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2048))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimePvcLoadToDCEThresh.setStatus('mandatory')
if mibBuilder.loadTexts: realTimePvcLoadToDCEThresh.setDescription('Defines the maximum allowed real-time PVC load calculated every second, expressed in percentage. If the actual load is more than this threshold, a realTimeTrapSeverity exception trap may be sent as specified by the realTimeTrapSeverity object value.')
realTimePvcLoadToDCEThreshVar = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimePvcLoadToDCEThreshVar.setStatus('mandatory')
if mibBuilder.loadTexts: realTimePvcLoadToDCEThreshVar.setDescription('Defines the variation range for real-time PVC load, expressed in percentage.. If within the current TC interval, the variation of the actual utilization is more than that range compared with the previous trap value, another realTimeTrapSeverity exception trap may be sent as specified by the realTimeTrapSeverity object value.')
avgPvcLoadToDCETrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcLoadToDCETrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcLoadToDCETrapSeverity.setDescription('Controls the reporting of and defines severity of the 15 minute average load exception trap. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgPvcLoadToDCEThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2048))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcLoadToDCEThresh.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcLoadToDCEThresh.setDescription('Defines the maximum allowed PVC load in a 15 minute period, expressed in percentage. If the actual average PVC loading is more than this threshold, an avgPvcLoadToDCETrap trap may be sent as specified by the avgPvcLoadToDCETrapSeverity object value.')
realTimePvcLoadToDTEThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2048))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimePvcLoadToDTEThresh.setStatus('mandatory')
if mibBuilder.loadTexts: realTimePvcLoadToDTEThresh.setDescription('Defines the maximum allowed real-time PVC load calculated every second, expressed in percentage. If the actual load is more than this threshold, a realTimeTrapSeverity exception trap may be sent as specified by the realTimeTrapSeverity object value.')
realTimePvcLoadToDTEThreshVar = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimePvcLoadToDTEThreshVar.setStatus('mandatory')
if mibBuilder.loadTexts: realTimePvcLoadToDTEThreshVar.setDescription('Defines the variation range for real-time PVC load, expressed in percentage. If within the current TC interval, the variation of the actual utilization is more than that range compared with the previous trap value, another realTimeTrapSeverity exception trap may be sent as specified by the realTimeTrapSeverity object value.')
avgPvcLoadToDTETrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcLoadToDTETrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcLoadToDTETrapSeverity.setDescription('Controls the reporting of and defines severity of the 15 minute average load exception trap. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgPvcLoadToDTEThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2048))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcLoadToDTEThresh.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcLoadToDTEThresh.setDescription('Defines the maximum allowed PVC load in a 15 minute period, expressed in percentage. If the actual average utilization is more than this threshold, an avgPvcLoadToDTETrap trap may be sent as specified by the avgPvcLoadToDTETrapSeverity object value.')
realTimePvcRoundTripDelayThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2048))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimePvcRoundTripDelayThresh.setStatus('mandatory')
if mibBuilder.loadTexts: realTimePvcRoundTripDelayThresh.setDescription('Defines the maximum allowed real-time PVC round trip delay, calculated every second, expressed in percentage. If the actual load is more than this threshold, a realTimeTrapSeverity exception trap may be sent as specified by the realTimeTrapSeverity object value.')
realTimePvcRoundTripDelayThreshVar = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: realTimePvcRoundTripDelayThreshVar.setStatus('mandatory')
if mibBuilder.loadTexts: realTimePvcRoundTripDelayThreshVar.setDescription('Defines the variation range for real-time PVC round trip delay, expressed in percentage.. If within the current TC interval, the variation of the actual round trip delay is more than that range compared with the previous trap value, another realTimeTrapSeverity exception trap may be sent as specified by the realTimeTrapSeverity object value.')
avgPvcRoundTripDelayTrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcRoundTripDelayTrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcRoundTripDelayTrapSeverity.setDescription('Controls the reporting of and defines severity of the 15 minute average round trip delay exception trap. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgPvcRoundTripDelayThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2048))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcRoundTripDelayThresh.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcRoundTripDelayThresh.setDescription('Defines the maximum allowed PVC round trip delay in a 15 minute period, expressed in percentage. If the actual average PVC round trip delay is more than this threshold, an avgPvcRoundTripDelayTrap trap may be sent as specified by the avgPvcRoundTripDelayTrapSeverity object value.')
avgPvcNotAvailToDCETrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcNotAvailToDCETrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcNotAvailToDCETrapSeverity.setDescription('Controls the reporting of and defines severity of the avgPvcNotAvailToDCE exception trap. This condition may be reported when the avgPvcNotAvailToDCEThresh object value is exceeded within a 15 minute period. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgPvcNotAvailToDCEThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcNotAvailToDCEThresh.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcNotAvailToDCEThresh.setDescription('Defines the maximum allowed average unavailability in a 15 minute period, expressed in percentage. If the actual value is more than this threshold, an avgPvcNotAvailToDCE trap may be sent as specified by the avgPvcNotAvailToDCETrapSeverity object value.')
avgPvcNotAvailToDTETrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcNotAvailToDTETrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcNotAvailToDTETrapSeverity.setDescription('Controls the reporting of and defines severity of the avgPvcNotAvailToDTE exception trap. This condition may be reported when the avgPvcNotAvailToDTEThresh object value is exceeded within a 15 minute period. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgPvcNotAvailToDTEThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcNotAvailToDTEThresh.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcNotAvailToDTEThresh.setDescription('Defines the maximum allowed average unavailability in a 15 minute period, expressed in percentage. If the actual value is more than this threshold, an avgPvcNotAvailToDTE trap may be sent as specified by the avgPvcNotAvailToDTETrapSeverity object value.')
avgPvcFecnFramesTrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcFecnFramesTrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcFecnFramesTrapSeverity.setDescription('Controls the reporting of and defines severity of the avgPvcFecnFrames exception trap. This condition may be reported when the avgPvcFecnFramesThresh object value is exceeded within a 15 minute period. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgPvcFecnFramesThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 28), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2048))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcFecnFramesThresh.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcFecnFramesThresh.setDescription('Defines the maximum allowed number of FECN frames which may be received in a 15 minute period.')
avgPvcBecnFramesTrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcBecnFramesTrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcBecnFramesTrapSeverity.setDescription('Controls the reporting of and defines severity of the avgPvcBecnFrames exception trap. This condition may be reported when the avgPvcFecnFramesThresh object value is exceeded within a 15 minute period. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgPvcBecnFramesThresh = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 30), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2048))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcBecnFramesThresh.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcBecnFramesThresh.setDescription('Defines the maximum allowed number of BECN frames which may be received in a 15 minute period.')
avgPvcCIRExceedToDTETrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcCIRExceedToDTETrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcCIRExceedToDTETrapSeverity.setDescription('Controls the reporting of and defines severity of the avgPvcCIRExceedToDTE exception trap. This trap may be reported if this condition occurs over a 15 minute period. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgPvcEIRExceedToDTETrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcEIRExceedToDTETrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcEIRExceedToDTETrapSeverity.setDescription('Controls the reporting of and defines severity of the avgPvcEIRExceedToDTE exception trap. This trap may be reported if this condition occurs over a 15 minute period. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgPvcLossFrameTxTrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 33), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcLossFrameTxTrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcLossFrameTxTrapSeverity.setDescription('Controls the reporting of and defines severity of the avgPvcLossFrameTx exception trap. This trap may be reported if this condition occurs over a 15 minute period. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
avgPvcLossFrameRxTrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: avgPvcLossFrameRxTrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: avgPvcLossFrameRxTrapSeverity.setDescription('Controls the reporting of and defines severity of the avgPvcLossFrameTx exception trap. This trap may be reported if this condition occurs over a 15 minute period. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
networkLmiInterfaceDownTrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 35), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: networkLmiInterfaceDownTrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: networkLmiInterfaceDownTrapSeverity.setDescription('Controls the reporting of and defines severity of the networkLmiInterfaceDown exception trap. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
userLmiInterfaceDownTrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 4, 36), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("inhibit", 1), ("critical", 2), ("major", 3), ("minor", 4), ("warning", 5), ("info", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userLmiInterfaceDownTrapSeverity.setStatus('mandatory')
if mibBuilder.loadTexts: userLmiInterfaceDownTrapSeverity.setDescription('Controls the reporting of and defines severity of the userLmiInterfaceDown exception trap. WEBFLAG Inhibit Critical Major Minor Warning Info WEBEND ')
pvcCirEirTable = MibTable((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5, 1), )
if mibBuilder.loadTexts: pvcCirEirTable.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCirEirTable.setDescription('PVC table, which lists PVCs for which the statistic data are collected.')
pvcCirEirEntry = MibTableRow((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5, 1, 1), ).setIndexNames((0, "AP553-MIB", "pvcCirEirTableIndex"))
if mibBuilder.loadTexts: pvcCirEirEntry.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCirEirEntry.setDescription('Identifies an entry (row) in PVC CIR/EIR table.')
pvcCirEirTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCirEirTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCirEirTableIndex.setDescription('Identifies an entry (row) in PVC table.')
pvcCirToDTE = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2048000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pvcCirToDTE.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCirToDTE.setDescription('Defines the committed information rate, in bps, under normal conditions in the direction of to DTE.')
pvcCirToDCE = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2048000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pvcCirToDCE.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCirToDCE.setDescription('Defines the committed information rate, in bps, under normal conditions in the direction of to DCE.')
pvcEirToDTE = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2048000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pvcEirToDTE.setStatus('mandatory')
if mibBuilder.loadTexts: pvcEirToDTE.setDescription('Defines the maximum transfer information rate, in bps, which the network is accepting in the direction of to DTE.')
pvcEirToDCE = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2048000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pvcEirToDCE.setStatus('mandatory')
if mibBuilder.loadTexts: pvcEirToDCE.setDescription('Defines the maximum transfer information rate, in bps, which the network is accepting in the direction of to DCE.')
pvcOperation = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("pvcNotProbed", 1), ("pvcWaitForProbe", 2), ("pvcProbed", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pvcOperation.setStatus('mandatory')
if mibBuilder.loadTexts: pvcOperation.setDescription('Defines the monitored state of a PVC. pvcNotProbed - specifies that the PVC will not be probed, round trip delay and frame loss measurements will not be tabulated. pvcWaitForProbe specifies that this PVC is in a transition state, and will be probed at the start of the next 15 minute poll cycle for statistic data collection. pvcProbed is a read only value that indicates this PVC is used for both statistic data and probing. WEBFLAG Not Probed Probe Pending Probed WEBEND ')
pvcState = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unavailable", 1), ("inactive", 2), ("active", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcState.setStatus('mandatory')
if mibBuilder.loadTexts: pvcState.setDescription("Reflects the state of the PVC as seen by the probe. `unavailable' indicates that the PVC is not included in the DLCI list. `inactive indicates that the PVC is no longer responding. `active' indicates that the PVC is in operational state. ")
pvcIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 1, 5, 1, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pvcIdentifier.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIdentifier.setDescription('Comment field for annotation.')
chStCurrent = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1))
chCurrDteFrames = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrDteFrames.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrDteFrames.setDescription(' Identifies total frames received from DTE device within current 15 minutes interval. ')
chCurrDceFrames = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrDceFrames.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrDceFrames.setDescription(' Identifies total frames received from DCE device within current 15 minutes interval. ')
chCurrDteOctets = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrDteOctets.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrDteOctets.setDescription(' Identifies total octets received from DTE device within current 15 minutes interval. ')
chCurrDceOctets = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrDceOctets.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrDceOctets.setDescription(' Identifies total octets received from DTE device within current 15 minutes interval. ')
chCurrLmiTxEnq = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrLmiTxEnq.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrLmiTxEnq.setDescription(' Identifies the total number of LMI enquiries sent to DCE within current 15 minutes interval. ')
chCurrLmiTxResp = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrLmiTxResp.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrLmiTxResp.setDescription(' Identifies the total number of LMI responses sent to DTE within current 15 minutes interval. ')
chCurrLmiRxEnq = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrLmiRxEnq.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrLmiRxEnq.setDescription(' Identifies the total number of LMI enquiries from the DTE within current 15 minutes interval. ')
chCurrLmiRxResp = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrLmiRxResp.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrLmiRxResp.setDescription(' Identifies the total number of LMI status responses received from the DCE within current 15 minutes interval. ')
chCurrDTELmiTimeout = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrDTELmiTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrDTELmiTimeout.setDescription(' Identifies the total number of lmi timeouts received within current 15 minutes interval. ')
chCurrDCELmiTimeout = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrDCELmiTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrDCELmiTimeout.setDescription(' Identifies the total number of lmi timeouts received within current 15 minutes interval. ')
chCurrFecnFrames = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrFecnFrames.setDescription(' Identifies total number of frames with FECN set received within current 15 minutes interval. ')
chCurrBecnFrames = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrBecnFrames.setDescription(' Identifies total number of frames with BECN set received within current 15 minutes interval. ')
chCurrTxIp = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrTxIp.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrTxIp.setDescription(' Identifies total number of IP packets sent on inband DLCI within current 15 minutes interval. ')
chCurrRxIp = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrRxIp.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrRxIp.setDescription(' Identifies total number of IP packets received on inband DLCI within current 15 minutes interval. ')
chCurrTxPoll = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrTxPoll.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrTxPoll.setDescription(' Identifies total number of probe polls sent to DCE within current 15 minutes interval. ')
chCurrTxResp = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrTxResp.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrTxResp.setDescription(' Identifies total number of probe responses sent to DCE within current 15 minutes interval. ')
chCurrRxPoll = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrRxPoll.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrRxPoll.setDescription(' Identifies total number of probe polls received from DCE within current 15 minutes interval. ')
chCurrRxResp = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chCurrRxResp.setStatus('mandatory')
if mibBuilder.loadTexts: chCurrRxResp.setDescription(' Identifies total number of probe responses received from DCE within current 15 minutes interval. ')
intervalComplete = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: intervalComplete.setStatus('mandatory')
if mibBuilder.loadTexts: intervalComplete.setDescription(' This object identifies the the percent completeion of the interval. ')
chStIntervalTable = MibTable((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2), )
if mibBuilder.loadTexts: chStIntervalTable.setStatus('mandatory')
if mibBuilder.loadTexts: chStIntervalTable.setDescription(' probe statistic table, row 1 to row 96, which defines the time interval for every 15 minutes. ')
chStIntervalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1), ).setIndexNames((0, "AP553-MIB", "chIntvIndex"))
if mibBuilder.loadTexts: chStIntervalEntry.setStatus('mandatory')
if mibBuilder.loadTexts: chStIntervalEntry.setDescription(' Identifies an entry (row) in probe statistic table.')
chIntvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 96))).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvIndex.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvIndex.setDescription(' Identifies an entry (row) in channel statistic table, 1-96 time intervals. ')
chIntvDteFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvDteFrames.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvDteFrames.setDescription(' Identifies total frames received from DTE device within a 15 minutes interval. ')
chIntvDceFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvDceFrames.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvDceFrames.setDescription(' Identifies total frames received from DCE device within a 15 minutes interval. ')
chIntvDteOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvDteOctets.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvDteOctets.setDescription(' Identifies total octets received from DTE device within a 15 minutes interval. ')
chIntvDceOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvDceOctets.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvDceOctets.setDescription(' Identifies total octets received from DTE device within a 15 minutes interval. ')
chIntvLmiTxEnq = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvLmiTxEnq.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvLmiTxEnq.setDescription(' Identifies the total number of LMI enquiries sent to DCE within a 15 minutes interval. ')
chIntvLmiTxResp = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvLmiTxResp.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvLmiTxResp.setDescription(' Identifies the total number of LMI responses send to DTE within a 15 minutes interval. ')
chIntvLmiRxEnq = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvLmiRxEnq.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvLmiRxEnq.setDescription(' Identifies the total number of LMI enquiries received within a 15 minutes interval. ')
chIntvLmiRxResp = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvLmiRxResp.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvLmiRxResp.setDescription(' Identifies the total number of LMI status responses received from the DCE within a 15 minutes interval. ')
chIntvDTELmiTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvDTELmiTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvDTELmiTimeout.setDescription(' Identifies the total number of LMI timeouts received within a 15 minutes interval. ')
chIntvDCELmiTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvDCELmiTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvDCELmiTimeout.setDescription(' Identifies the total number of LMI timeouts received within a 15 minutes interval. ')
chIntvFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvFecnFrames.setDescription(' Identifies total number of frames with FECN set received within a 15 minutes interval. ')
chIntvBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvBecnFrames.setDescription(' Identifies total number of frames with BECN set received within a 15 minutes interval. ')
chIntvTxIp = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvTxIp.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvTxIp.setDescription(' Identifies total number of IP packets sent on inband DLCI within a 15 minutes interval. ')
chIntvRxIp = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvRxIp.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvRxIp.setDescription(' Identifies total number of IP packets received on inband DLCI within a 15 minutes interval. ')
chIntvTxPoll = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvTxPoll.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvTxPoll.setDescription(' Identifies total number of probe polls sent to DCE within a 15 minutes interval. ')
chIntvTxResp = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvTxResp.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvTxResp.setDescription(' Identifies total number of probe response sent to DCE within a 15 minutes interval. ')
chIntvRxPoll = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvRxPoll.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvRxPoll.setDescription(' Identifies total number of probe polls received from DCE within a 15 minutes interval. ')
chIntvRxResp = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 2, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chIntvRxResp.setStatus('mandatory')
if mibBuilder.loadTexts: chIntvRxResp.setDescription(' Identifies total number of probe responses received from DCE within a 15 minutes interval. ')
pvcStCurrentTable = MibTable((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3), )
if mibBuilder.loadTexts: pvcStCurrentTable.setStatus('mandatory')
if mibBuilder.loadTexts: pvcStCurrentTable.setDescription(' PVC statistic table.')
pvcStCurrentEntry = MibTableRow((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1), ).setIndexNames((0, "AP553-MIB", "pvcCurrDlciIndex"))
if mibBuilder.loadTexts: pvcStCurrentEntry.setStatus('mandatory')
if mibBuilder.loadTexts: pvcStCurrentEntry.setDescription(' Identifies an entry (row) in PVC statistic table')
pvcCurrDlciIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrDlciIndex.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrDlciIndex.setDescription(' Index in PVC statistics table, which identifies the PVC number, up to 20 PVCs can exist. ')
pvcCurrDteFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrDteFrames.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrDteFrames.setDescription(' Identifies total frames received from DTE device for a specific PVC within current 15 minutes interval. ')
pvcCurrDceFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrDceFrames.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrDceFrames.setDescription(' Identifies total frames received from DCE device for a specific PVC within current 15 minutes interval. ')
pvcCurrDteOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrDteOctets.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrDteOctets.setDescription(' Identifies total octets received from DTE device for a specific PVC within current 15 minutes interval. ')
pvcCurrDceOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrDceOctets.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrDceOctets.setDescription(' Identifies total octets received from DTE device for a specific PVC within current 15 minutes interval. ')
pvcCurrDteFramesWithDE = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrDteFramesWithDE.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrDteFramesWithDE.setDescription(' Identifies total frames with DE bit set received from DTE device for a specific PVC within current 15 minutes interval. ')
pvcCurrDceFramesWithDE = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrDceFramesWithDE.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrDceFramesWithDE.setDescription(' Identifies total frames with DE bit set received from DCE device for a specific PVC within current 15 minutes interval. ')
pvcCurrFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrFecnFrames.setDescription(' Identifies total number of frames for a specific PVC with FECN set received within current 15 minutes interval. ')
pvcCurrBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrBecnFrames.setDescription(' Identifies total number of frames for a specific PVC with BECN set received within current 15 minutes interval. ')
pvcCurrTxPoll = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrTxPoll.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrTxPoll.setDescription(' Identifies total number of probe polls sent to DCE within current 15 minutes interval. ')
pvcCurrTxResp = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrTxResp.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrTxResp.setDescription(' Identifies total number of probe responses sent to DCE within current 15 minutes interval. ')
pvcCurrRxPoll = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrRxPoll.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrRxPoll.setDescription(' Identifies total number of probe polls received from DCE on a specific PVC within current 15 minutes interval. ')
pvcCurrRxResp = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrRxResp.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrRxResp.setDescription(' Identifies total number of probe responses received from DCE on a specific PVC within current 15 minutes interval. ')
pvcCurrLoopback = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 3, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("loopback", 1), ("noLoopback", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcCurrLoopback.setStatus('mandatory')
if mibBuilder.loadTexts: pvcCurrLoopback.setDescription(" Identifies whether the specific PVC has been found to be in a loop back state by link probe's generator process. ")
pvcStIntervalTable = MibTable((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4), )
if mibBuilder.loadTexts: pvcStIntervalTable.setStatus('mandatory')
if mibBuilder.loadTexts: pvcStIntervalTable.setDescription(' PVC statistic interval table: ')
pvcStIntervalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1), ).setIndexNames((0, "AP553-MIB", "pvcIntvDlciIndex"), (0, "AP553-MIB", "pvcIntvIndex"))
if mibBuilder.loadTexts: pvcStIntervalEntry.setStatus('mandatory')
if mibBuilder.loadTexts: pvcStIntervalEntry.setDescription(' Identifies an entry (row) in pvc statistic table')
pvcIntvDlciIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvDlciIndex.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvDlciIndex.setDescription(' This index value which uniquely identifies the PVC number to which this entry is applicable. ')
pvcIntvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 96))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvIndex.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvIndex.setDescription(' A number between 1 and 96, where 1 is the most recently completed 15 minutes interval and 96 is the least recently completed 15 minutes interval. ')
pvcIntvDteFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvDteFrames.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvDteFrames.setDescription(' Identifies total frames received from DTE device for a specific PVC within a 15 minutes interval. ')
pvcIntvDceFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvDceFrames.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvDceFrames.setDescription(' Identifies total frames received from DCE device for a specific PVC within a 15 minutes interval. ')
pvcIntvDteOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvDteOctets.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvDteOctets.setDescription(' Identifies total octets received from DTE device for a specific PVC within a 15 minutes interval. ')
pvcIntvDceOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvDceOctets.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvDceOctets.setDescription(' Identifies total octets received from DCE device for a specific PVC within a 15 minutes interval. ')
pvcIntvDteFramesWithDE = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvDteFramesWithDE.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvDteFramesWithDE.setDescription(' Identifies the frames with DE bit set received from DTE device for a specific PVC within a 15 minutes interval. ')
pvcIntvDceFramesWithDE = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvDceFramesWithDE.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvDceFramesWithDE.setDescription(' Identifies the frames with DE bit set received from DCE device for a specific PVC within a 15 minutes interval. ')
pvcIntvFecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvFecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvFecnFrames.setDescription(' Identifies total number of frames for a specific PVC with FECN set received within a 15 minutes interval. ')
pvcIntvBecnFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvBecnFrames.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvBecnFrames.setDescription(' Identifies total number of frames for a specific PVC with BECN set received within a 15 minutes interval. ')
pvcIntvTxPoll = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvTxPoll.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvTxPoll.setDescription(' Identifies total number of probe polls sent to DCE within a 15 minutes interval. ')
pvcIntvTxResp = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvTxResp.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvTxResp.setDescription(' Identifies total number of probe responses sent to DCE within a 15 minutes interval. ')
pvcIntvRxPoll = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvRxPoll.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvRxPoll.setDescription(' Identifies total number of probe polls received from DCE on a specific PVC within a 15 minutes interval. ')
pvcIntvRxResp = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvRxResp.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvRxResp.setDescription(' Identifies total number of probe responses received from DCE on a specific PVC within a 15 minutes interval. ')
pvcIntvLoopback = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 2, 4, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("loopback", 1), ("noLoopback", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcIntvLoopback.setStatus('mandatory')
if mibBuilder.loadTexts: pvcIntvLoopback.setDescription(" Identifies whether the specific PVC has been found to be in a loop back state by link probe's generator process. ")
chanPerfCurr = MibIdentifier((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 1))
chPerfCurrUnavailToDte = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfCurrUnavailToDte.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfCurrUnavailToDte.setDescription(' This object identifies the channel unavailability due to remote DTE fault within the latest 15 minutes. This statistic datum is calculated by the channel unavailable time over the full time interval.')
chPerfCurrUnavailToDce = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfCurrUnavailToDce.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfCurrUnavailToDce.setDescription(' This object identifies the channel unavailability due to local DTE fault within the latest 15 minutes. This statistic datum is calculated by the channel unavailable time over the full time interval.')
chPerfCurrTxLoad = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfCurrTxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfCurrTxLoad.setDescription(' channel average user tx load within the latest 15 minutes, this statistic datum is calculated by user data octets sent by the channel within the 15 minutes over the total channel available load ( which depends on physical interface speed). ')
chPerfCurrRxLoad = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfCurrRxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfCurrRxLoad.setDescription(' Channel average user rx load within the latest 15 minutes, this statistic datum is calculated by user data octets received by the channel within the 15 minutes over the total channel available load ( which depends on physical interface speed). ')
chPerfCurrTotalTxLoad = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfCurrTotalTxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfCurrTotalTxLoad.setDescription(' channel total tx load within the latest 15 minutes, this statistic datum is calculated by the total data octets sent by the channel within the 15 minutes over the channel available load ( which depends on physical interface speed). ')
chPerfCurrTotalRxLoad = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfCurrTotalRxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfCurrTotalRxLoad.setDescription('channel total rx load within the latest 15 minutes, this statistic datum is calculated by total data octets received by the channel within the 15 minutes over the channel total available load ( which depends on physical interface speed). ')
chPerfCurrLoadToDceRealTime = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfCurrLoadToDceRealTime.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfCurrLoadToDceRealTime.setDescription(' This object displays the channel load tx within current TC ( 1 sec.), this variable is calculated by total octets transmitted over the capacity of this channel. ')
chPerfCurrLoadToDteRealTime = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfCurrLoadToDteRealTime.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfCurrLoadToDteRealTime.setDescription(' This object displays the channel load rx within current TC ( 1 sec.), this variable is calculated by total octets received over the capacity of this channel. ')
chanPerfIntvTable = MibTable((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 2), )
if mibBuilder.loadTexts: chanPerfIntvTable.setStatus('mandatory')
if mibBuilder.loadTexts: chanPerfIntvTable.setDescription(' Probe channel performance table, which displays the performance statistic data based on the probe channel within previous 24 hours.')
chanPerfIntvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 2, 1), ).setIndexNames((0, "AP553-MIB", "chPerfIntvIndex"))
if mibBuilder.loadTexts: chanPerfIntvEntry.setStatus('mandatory')
if mibBuilder.loadTexts: chanPerfIntvEntry.setDescription(' Identifies an entry (row) in probe channel performance table.')
chPerfIntvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 96))).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfIntvIndex.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfIntvIndex.setDescription(' Identifies an entry (row) in historical PVC performance table, total 96 time intervals, where 1 is the most recently completed 15 minutes interval and 96 is the least recently completed 15 minutes interval (assuming that all 96 intervals are valid). ')
chPerfIntvUnavailToDte = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfIntvUnavailToDte.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfIntvUnavailToDte.setDescription(' This object identifies the channel unavailability due to remote DTE fault within a 15 minutes interval. This statistic datum is calculated by the channel unavailable time over the full time interval.')
chPerfIntvUnavailToDce = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfIntvUnavailToDce.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfIntvUnavailToDce.setDescription(' This object identifies the channel unavailability due to local DTE fault within a 15 minutes interval. This statistic datum is calculated by the channel unavailable time over the full time interval.')
chPerfIntvTxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfIntvTxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfIntvTxLoad.setDescription(' channel average user tx load within a 15 minutes interval,this statistic datum is calculated by user data octets sent by the channel within the 15 minutes over the total channel available load ( which depends on physical interface speed). ')
chPerfIntvRxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfIntvRxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfIntvRxLoad.setDescription(' channel average user rx load within a 15 minutes interval, this statistic datum is calculated by user data octets received by the channel within the 15 minutes over the total channel available load( which depends on physical interface speed). ')
chPerfIntvTotalTxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfIntvTotalTxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfIntvTotalTxLoad.setDescription(' channel total tx load within a 15 minutes interval, this statistic datum is calculated by the total data octets sent by the channel within the 15 minutes over the total channel available load ( which depends on physical interface speed). ')
chPerfIntvTotalRxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chPerfIntvTotalRxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: chPerfIntvTotalRxLoad.setDescription(' Channel total rx load within a 15 minutes interval, this statistic datum is calculated by the total data octets received by the channel within the 15 minutes over the total channel available load ( which depends on physical interface speed). ')
pvcPerfCurrTable = MibTable((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3), )
if mibBuilder.loadTexts: pvcPerfCurrTable.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrTable.setDescription(' PVC performance table, which displays the performance statistic data based on each PVC.')
pvcPerfCurrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1), ).setIndexNames((0, "AP553-MIB", "pvcPerfCurrDlciIndex"))
if mibBuilder.loadTexts: pvcPerfCurrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrEntry.setDescription(' Identifies an entry (row) in pvc statistic table')
pvcPerfCurrDlciIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrDlciIndex.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrDlciIndex.setDescription(' This index value which uniquely identifies the PVC number to which this entry is applicable. ')
pvcPerfCurrRtd = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrRtd.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrRtd.setDescription(' Probe token average round trip time within the latest 15 minutes for a specific PVC. ')
pvcPerfCurrUnavailToDte = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrUnavailToDte.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrUnavailToDte.setDescription(' This object identifies the PVC average unavailability within the latest 15 minutes. This statistic datum is calculated by a PVC unavailable time due to the network or remote DTE fault over the full time interval.')
pvcPerfCurrUnavailToDce = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrUnavailToDce.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrUnavailToDce.setDescription(' This object identifies the PVC average unavailability within the latest 15 minutes. This statistic datum is calculated by a PVC unavailable time due to the local DTE fault over the full time interval.')
pvcPerfCurrTxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrTxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrTxLoad.setDescription(' This object identifies the PVC average user tx load within the latest 15 minutes. This statistic datum is calculated by user data octets sent by a specific PVC within the 15 minutes over the PVC total available load. If the CIR is correctly defined for this PVC, PVC available load will depend on CIR, otherwise, it will get from interface speed. ')
pvcPerfCurrRxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrRxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrRxLoad.setDescription(' This object identifies the PVC average user rx load within the latest 15 minutes. This statistic datum is calculated by user data octets received by a specific PVC within the 15 minutes over the PVC total available load. If the CIR is correctly defined for this PVC, PVC available load will depend on CIR, otherwise, it will get from interface speed. ')
pvcPerfCurrTotalTxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrTotalTxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrTotalTxLoad.setDescription(' This object identifies the PVC total tx load within the latest 15 minutes. This statistic datum is calculated by total data octets sent by a specific PVC within the 15 minutes over the PVC total available load. If the CIR is correctly defined for this PVC, PVC available load will depend on CIR, otherwise, it will get from interface speed. ')
pvcPerfCurrTotalRxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrTotalRxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrTotalRxLoad.setDescription(' This object identifies the PVC total rx load within the latest 15 minutes. This statistic datum is calculated by total data octets received by a specific PVC within the 15 minutes over the PVC total available load. If the CIR is correctly defined for this PVC, PVC available load will depend on CIR, otherwise, it will get from interface speed. ')
pvcPerfCurrCirToNetExceed = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrCirToNetExceed.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrCirToNetExceed.setDescription(' This object identifies the number of exceeding CIR times that the network detected within the current interval. The measurement is done for every Tc, and the direction is to the network.')
pvcPerfCurrEirToNetExceed = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrEirToNetExceed.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrEirToNetExceed.setDescription(' This object identifies the number of exceeding EIR times that the network detected within the current interval. The measurement is done for every Tc, and the direction is to the network.')
pvcPerfCurrTxFrameLoss = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(255))).clone(namedValues=NamedValues(("notAvail", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrTxFrameLoss.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrTxFrameLoss.setDescription(' This object identifies the percentage of tx loss of frames within the total tx frames for this PVC.')
pvcPerfCurrRxFrameLoss = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 3, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(255))).clone(namedValues=NamedValues(("notAvail", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfCurrRxFrameLoss.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfCurrRxFrameLoss.setDescription(' This object identifies the percentage of rx loss of frames within the total rx frames for this PVC.')
pvcPerfIntvTable = MibTable((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4), )
if mibBuilder.loadTexts: pvcPerfIntvTable.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvTable.setDescription(' PVC performance interval table, which displays the performance statistic data based on each PVC in previous 24 hours.')
pvcPerfIntvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1), ).setIndexNames((0, "AP553-MIB", "pvcPerfIntvDlciIndex"), (0, "AP553-MIB", "pvcPerfIntvIndex"))
if mibBuilder.loadTexts: pvcPerfIntvEntry.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvEntry.setDescription(' Identifies an entry (row) in pvc statistic table')
pvcPerfIntvDlciIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvDlciIndex.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvDlciIndex.setDescription(' This index value which uniquely identifies the PVC number to which this entry is applicable. ')
pvcPerfIntvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 96))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvIndex.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvIndex.setDescription(' Identifies an entry (row) in historical PVC performance table,total 96 time intervals, where 1 is the most recently completed 15 minutes interval and 96 is the least recently completed 15 minutes interval (assuming that all 96 intervals are valid). ')
pvcPerfIntvRtd = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvRtd.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvRtd.setDescription(' Probe token average round trip time within a 15 minutes interval for a specific PVC. ')
pvcPerfIntvUnavailToDte = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvUnavailToDte.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvUnavailToDte.setDescription(' This object identifies the PVC average unavailability within a 15 minutes. This statistic datum is calculated by a PVC unavailable time due to the network or remote DTE fault over the full time interval.')
pvcPerfIntvUnavailToDce = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvUnavailToDce.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvUnavailToDce.setDescription(' This object identifies the PVC average unavailability within a 15 minutes. This statistic datum is calculated by a PVC unavailable time due to the local DTE fault over the full time interval.')
pvcPerfIntvTxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvTxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvTxLoad.setDescription(' This object identifies the PVC average user tx load within a 15 minutes interval. This statistic datum is calculated by user data octets sent by a specific PVC within the 15 minutes over the PVC total available load. If the CIR is correctly defined for this PVC, PVC available load will depend on CIR, otherwise, it will get from interface speed. ')
pvcPerfIntvRxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvRxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvRxLoad.setDescription(' This object identifies the PVC average user rx load within a 15 minutes interval. This statistic datum is calculated by user data octets received by a specific PVC within the 15 minutes over the PVC total available load. If the CIR is correctly defined for this PVC, PVC available load will depend on CIR, otherwise, it will get from interface speed. ')
pvcPerfIntvTotalTxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvTotalTxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvTotalTxLoad.setDescription(' This object identifies the PVC total tx load within a 15 minutes interval. This statistic datum is calculated by total data octets sent by a specific PVC within the 15 minutes over the PVC total available load. If the CIR is correctly defined for this PVC, PVC available load will depend on CIR, otherwise, it will get from interface speed. ')
pvcPerfIntvTotalRxLoad = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvTotalRxLoad.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvTotalRxLoad.setDescription(' This object identifies the PVC total rx load within a 15 minutes interval. This statistic datum is calculated by total data octets received by a specific PVC within the 15 minutes over the PVC total available load. If the CIR is correctly defined for this PVC, PVC available load will depend on CIR, otherwise, it will get from interface speed. ')
pvcPerfIntvCirToNetExceed = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvCirToNetExceed.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvCirToNetExceed.setDescription(' This object identifies the number of exceeding CIR times that the network detected within an interval. The measurement is done for every Tc, and the direction is to the network.')
pvcPerfIntvEirToNetExceed = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvEirToNetExceed.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvEirToNetExceed.setDescription(' This object identifies the number of exceeding EIR times that the network detected within an interval. The measurement is done for every Tc, and the direction is to the network.')
pvcPerfIntvTxFrameLoss = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(255))).clone(namedValues=NamedValues(("notAvail", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvTxFrameLoss.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvTxFrameLoss.setDescription(' This object identifies the percentage of tx loss of frames within the total tx frames for this PVC in this interval.')
pvcPerfIntvRxFrameLoss = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 4, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(255))).clone(namedValues=NamedValues(("notAvail", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfIntvRxFrameLoss.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfIntvRxFrameLoss.setDescription(' This object identifies the percentage of rx loss of frames within the total rx frames for this PVC in this interval.')
pvcPerfRealTimeTable = MibTable((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 5), )
if mibBuilder.loadTexts: pvcPerfRealTimeTable.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfRealTimeTable.setDescription(' PVC performance table, which displays the performance statistic data based on each PVC.')
pvcPerfRealTimeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 5, 1), ).setIndexNames((0, "AP553-MIB", "pvcPerfRealTimeDlciIndex"))
if mibBuilder.loadTexts: pvcPerfRealTimeEntry.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfRealTimeEntry.setDescription(' Identifies an entry (row) in pvc statistic table')
pvcPerfRealTimeDlciIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfRealTimeDlciIndex.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfRealTimeDlciIndex.setDescription(' This index value which uniquely identifies the PVC number to which this entry is applicable. ')
pvcPerfRealTimeRtd = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 5, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfRealTimeRtd.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfRealTimeRtd.setDescription(' Probe token real time round trip delay in msec. for a PVC, it is calculated every Probe Poll interval (1 to 60 sec.). ')
pvcPerfRealTimeLoadToDCE = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 5, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfRealTimeLoadToDCE.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfRealTimeLoadToDCE.setDescription(' This object displays the PVC load to DCE within current TC ( 1 sec.), this variable is calculated by total octets transmitted to DTE over the CIR of this PVC.')
pvcPerfRealTimeLoadToDTE = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 3, 5, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pvcPerfRealTimeLoadToDTE.setStatus('mandatory')
if mibBuilder.loadTexts: pvcPerfRealTimeLoadToDTE.setDescription(' This object displays the PVC load to DTE within current TC ( 1 sec.), this variable is calculated by total octets transmitted to DCE over the CIR of this PVC.')
alarmCurrentStatusBitsMap = MibScalar((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readonly")
if mibBuilder.loadTexts: alarmCurrentStatusBitsMap.setStatus('mandatory')
if mibBuilder.loadTexts: alarmCurrentStatusBitsMap.setDescription('This parameter is a bitmap for the 6 enterprise specific traps. A bit value of one means the trap is active. A bit value of zero means the trap is clear. 1st bit = avgChanLoadToDTE_trap; 2nd bit = realTimeChanLoadToDTE_trap; 3rd bit = avgChanLoadToDCE_trap; 4th bit = realTimeChanLoadToDCE_trap; 5th bit = networkLmiInterfaceDown_trap; 6th bit = userLmiInterfaceDown_trap; ')
alarmCurrentStatusTable = MibTable((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 4, 1, 2), )
if mibBuilder.loadTexts: alarmCurrentStatusTable.setStatus('mandatory')
if mibBuilder.loadTexts: alarmCurrentStatusTable.setDescription(' PVC table which lists PVCs and alarm status. ')
alarmStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 4, 1, 2, 1), ).setIndexNames((0, "AP553-MIB", "alarmStatusTableIndex"))
if mibBuilder.loadTexts: alarmStatusEntry.setStatus('mandatory')
if mibBuilder.loadTexts: alarmStatusEntry.setDescription(' Identifies an entry (row) in PVC alarm status table.')
alarmStatusTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 4, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 991))).setMaxAccess("readonly")
if mibBuilder.loadTexts: alarmStatusTableIndex.setStatus('mandatory')
if mibBuilder.loadTexts: alarmStatusTableIndex.setDescription(' Identifies an entry (row) in the alarm status table. ')
currentStatusBitsMap = MibTableColumn((1, 3, 6, 1, 4, 1, 498, 22, 1, 2, 4, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentStatusBitsMap.setStatus('mandatory')
if mibBuilder.loadTexts: currentStatusBitsMap.setDescription('This parameter is a bitmap for the 14 enterprise specific traps. A bit value of one means the trap is active. A bit value of zero means the trap is clear. 1st bit = avgPvcNotAvailToDCE_trap; 2nd bit = avgPvcNotAvailToDTE_trap; 3rd bit = avgPvcRoundTripDelay_trap; 4th bit = realTimePvcRoundTripDelay_trap; 5th bit = avgPvcBecnFrames_trap; 6th bit = avgPvcFecnFrames_trap; 7th bit = avgPvcLoadToDTE_trap; 8th bit = realTimePvcLoadToDTE_trap; 9th bit = avgPvcLoadToDCE_trap; 10th bit = realTimePvcLoadToDCE_trap; 11th bit = avgPvcCIRExceedToDTE_trap; 12th bit = avgPvcEIRExceedToDTE_trap; 13th bit = avgPvcLossFrameTx_trap; 14th bit = avgPvcLossFrameRx_trap; ')
mibBuilder.exportSymbols("AP553-MIB", pollPeriod=pollPeriod, realTimeChanLoadToDTEThreshVar=realTimeChanLoadToDTEThreshVar, chCurrTxResp=chCurrTxResp, pvcIntvDteOctets=pvcIntvDteOctets, alarmCurrentStatusBitsMap=alarmCurrentStatusBitsMap, lmiDLCI=lmiDLCI, pvcPerfIntvTxLoad=pvcPerfIntvTxLoad, avgPvcNotAvailToDCEThresh=avgPvcNotAvailToDCEThresh, frProbeMIBversion=frProbeMIBversion, pvcCurrLoopback=pvcCurrLoopback, chIntvLmiRxResp=chIntvLmiRxResp, chPerfIntvRxLoad=chPerfIntvRxLoad, pvcPerfIntvEntry=pvcPerfIntvEntry, pvcPerfRealTimeDlciIndex=pvcPerfRealTimeDlciIndex, chCurrRxPoll=chCurrRxPoll, chPerfIntvIndex=chPerfIntvIndex, realTimePvcLoadToDTEThreshVar=realTimePvcLoadToDTEThreshVar, avgPvcFecnFramesThresh=avgPvcFecnFramesThresh, pvcPerfIntvRxLoad=pvcPerfIntvRxLoad, avgChanLoadToDTETrapSeverity=avgChanLoadToDTETrapSeverity, pvcCurrFecnFrames=pvcCurrFecnFrames, pvcIntvFecnFrames=pvcIntvFecnFrames, chanPerfCurr=chanPerfCurr, avgChanLoadToDTEThresh=avgChanLoadToDTEThresh, pvcPerfRealTimeLoadToDTE=pvcPerfRealTimeLoadToDTE, pvcPerfCurrUnavailToDte=pvcPerfCurrUnavailToDte, proFunConfig=proFunConfig, realTimeTrapSeverity=realTimeTrapSeverity, chPerfCurrTxLoad=chPerfCurrTxLoad, t391Timer=t391Timer, chanPerfIntvEntry=chanPerfIntvEntry, pvcDiscovery=pvcDiscovery, pvcCirToDTE=pvcCirToDTE, pvcCirEirTable=pvcCirEirTable, chanAggregateRate=chanAggregateRate, chIntvDCELmiTimeout=chIntvDCELmiTimeout, pvcState=pvcState, chCurrDceOctets=chCurrDceOctets, pvcPerfRealTimeRtd=pvcPerfRealTimeRtd, chPerfCurrTotalTxLoad=chPerfCurrTotalTxLoad, maxInfoLength=maxInfoLength, pvcStIntervalEntry=pvcStIntervalEntry, chPerfCurrUnavailToDte=chPerfCurrUnavailToDte, pvcIntvDteFramesWithDE=pvcIntvDteFramesWithDE, pvcIntvTxPoll=pvcIntvTxPoll, unitType=unitType, avgPvcRoundTripDelayThresh=avgPvcRoundTripDelayThresh, pvcPerfIntvTotalTxLoad=pvcPerfIntvTotalTxLoad, avgPvcBecnFramesTrapSeverity=avgPvcBecnFramesTrapSeverity, probePerform=probePerform, currentStatusBitsMap=currentStatusBitsMap, chIntvDteOctets=chIntvDteOctets, chIntvDceFrames=chIntvDceFrames, pvcAdd=pvcAdd, pvcPerfIntvTxFrameLoss=pvcPerfIntvTxFrameLoss, probeVersion=probeVersion, avgPvcLoadToDTETrapSeverity=avgPvcLoadToDTETrapSeverity, pvcPerfIntvEirToNetExceed=pvcPerfIntvEirToNetExceed, trapConfig=trapConfig, pvcPerfIntvUnavailToDce=pvcPerfIntvUnavailToDce, avgChanLoadToDCETrapSeverity=avgChanLoadToDCETrapSeverity, chanConfig=chanConfig, chPerfCurrTotalRxLoad=chPerfCurrTotalRxLoad, chIntvRxPoll=chIntvRxPoll, pvcCurrTxResp=pvcCurrTxResp, pvcCirEirTableIndex=pvcCirEirTableIndex, pvcIdentifier=pvcIdentifier, lmiN391=lmiN391, chCurrDceFrames=chCurrDceFrames, chIntvTxPoll=chIntvTxPoll, pvcIntvDteFrames=pvcIntvDteFrames, realTimeChanLoadToDCEThresh=realTimeChanLoadToDCEThresh, chCurrRxIp=chCurrRxIp, pvcIntvRxPoll=pvcIntvRxPoll, pvcPerfCurrEirToNetExceed=pvcPerfCurrEirToNetExceed, chCurrLmiTxEnq=chCurrLmiTxEnq, avgPvcBecnFramesThresh=avgPvcBecnFramesThresh, chCurrLmiRxResp=chCurrLmiRxResp, chStIntervalEntry=chStIntervalEntry, pvcCurrDceOctets=pvcCurrDceOctets, chIntvLmiTxResp=chIntvLmiTxResp, avgPvcLoadToDCEThresh=avgPvcLoadToDCEThresh, pvcStCurrentTable=pvcStCurrentTable, lmiUser393=lmiUser393, realTimePvcRoundTripDelayThreshVar=realTimePvcRoundTripDelayThreshVar, chIntvLmiTxEnq=chIntvLmiTxEnq, chPerfIntvTxLoad=chPerfIntvTxLoad, realTimeChanLoadToDCEThreshVar=realTimeChanLoadToDCEThreshVar, pvcPerfCurrEntry=pvcPerfCurrEntry, pvcPerfCurrCirToNetExceed=pvcPerfCurrCirToNetExceed, pvcCurrTxPoll=pvcCurrTxPoll, chCurrDteFrames=chCurrDteFrames, chanPerfIntvTable=chanPerfIntvTable, avgPvcNotAvailToDTEThresh=avgPvcNotAvailToDTEThresh, networkLmiInterfaceDownTrapSeverity=networkLmiInterfaceDownTrapSeverity, pvcCurrBecnFrames=pvcCurrBecnFrames, lmiConfig=lmiConfig, pvcPerfCurrRxLoad=pvcPerfCurrRxLoad, pvcPerfIntvRtd=pvcPerfIntvRtd, realTimeChanLoadToDTEThresh=realTimeChanLoadToDTEThresh, pvcPerfCurrTable=pvcPerfCurrTable, pvcCurrDceFramesWithDE=pvcCurrDceFramesWithDE, pvcIntvDlciIndex=pvcIntvDlciIndex, lmiUser392=lmiUser392, pvcPerfIntvDlciIndex=pvcPerfIntvDlciIndex, t392Timer=t392Timer, alarmStatusTableIndex=alarmStatusTableIndex, avgPvcCIRExceedToDTETrapSeverity=avgPvcCIRExceedToDTETrapSeverity, chPerfIntvTotalTxLoad=chPerfIntvTotalTxLoad, realTimePvcRoundTripDelayThresh=realTimePvcRoundTripDelayThresh, chIntvDteFrames=chIntvDteFrames, pvcCurrDceFrames=pvcCurrDceFrames, pvcStIntervalTable=pvcStIntervalTable, alarmCurrentStatusTable=alarmCurrentStatusTable, pvcIntvTxResp=pvcIntvTxResp, probeConfig=probeConfig, avgPvcNotAvailToDTETrapSeverity=avgPvcNotAvailToDTETrapSeverity, userLmiInterfaceDownTrapSeverity=userLmiInterfaceDownTrapSeverity, pvcDelete=pvcDelete, pvcIntvDceFramesWithDE=pvcIntvDceFramesWithDE, pvcIntvLoopback=pvcIntvLoopback, realTimePvcLoadToDCEThreshVar=realTimePvcLoadToDCEThreshVar, avgPvcRoundTripDelayTrapSeverity=avgPvcRoundTripDelayTrapSeverity, pvcPerfCurrUnavailToDce=pvcPerfCurrUnavailToDce, globalTC=globalTC, pvcOperation=pvcOperation, ipDLCI=ipDLCI, chCurrRxResp=chCurrRxResp, chCurrLmiRxEnq=chCurrLmiRxEnq, chIntvDceOctets=chIntvDceOctets, avgPvcLossFrameTxTrapSeverity=avgPvcLossFrameTxTrapSeverity, pvcPerfCurrTxLoad=pvcPerfCurrTxLoad, ipEncapp=ipEncapp, chPerfCurrLoadToDteRealTime=chPerfCurrLoadToDteRealTime, lmiNet392=lmiNet392, pvcCount=pvcCount, chPerfIntvUnavailToDce=chPerfIntvUnavailToDce, chPerfCurrRxLoad=chPerfCurrRxLoad, pvcPerfRealTimeLoadToDCE=pvcPerfRealTimeLoadToDCE, lmiOperation=lmiOperation, chCurrLmiTxResp=chCurrLmiTxResp, pvcPerfCurrRtd=pvcPerfCurrRtd, pvcConfig=pvcConfig, avgPvcFecnFramesTrapSeverity=avgPvcFecnFramesTrapSeverity, chCurrTxPoll=chCurrTxPoll, chStIntervalTable=chStIntervalTable, avgPvcNotAvailToDCETrapSeverity=avgPvcNotAvailToDCETrapSeverity, pvcCurrDteOctets=pvcCurrDteOctets, pvcEirToDTE=pvcEirToDTE, pvcPerfCurrTxFrameLoss=pvcPerfCurrTxFrameLoss, chCurrBecnFrames=chCurrBecnFrames, pvcPerfCurrRxFrameLoss=pvcPerfCurrRxFrameLoss, avgPvcEIRExceedToDTETrapSeverity=avgPvcEIRExceedToDTETrapSeverity, pvcCurrDteFrames=pvcCurrDteFrames, pvcStCurrentEntry=pvcStCurrentEntry, trapStatus=trapStatus, pvcIntvDceFrames=pvcIntvDceFrames, pvcIntvIndex=pvcIntvIndex, pvcPerfCurrTotalTxLoad=pvcPerfCurrTotalTxLoad, chPerfIntvUnavailToDte=chPerfIntvUnavailToDte, probeMode=probeMode, pvcEirToDCE=pvcEirToDCE, chIntvTxIp=chIntvTxIp, avgChanLoadToDCEThresh=avgChanLoadToDCEThresh, pvcPerfRealTimeTable=pvcPerfRealTimeTable, pvcPerfIntvUnavailToDte=pvcPerfIntvUnavailToDte, chIntvRxResp=chIntvRxResp, chPerfCurrUnavailToDce=chPerfCurrUnavailToDce, chIntvDTELmiTimeout=chIntvDTELmiTimeout, lmiNet393=lmiNet393, probeStat=probeStat, chCurrFecnFrames=chCurrFecnFrames, pvcPerfIntvTable=pvcPerfIntvTable, chIntvBecnFrames=chIntvBecnFrames, chIntvLmiRxEnq=chIntvLmiRxEnq, chPerfIntvTotalRxLoad=chPerfIntvTotalRxLoad, pvcCurrDteFramesWithDE=pvcCurrDteFramesWithDE, chIntvFecnFrames=chIntvFecnFrames, realTimePvcLoadToDTEThresh=realTimePvcLoadToDTEThresh, pvcPerfIntvRxFrameLoss=pvcPerfIntvRxFrameLoss, pvcPerfIntvCirToNetExceed=pvcPerfIntvCirToNetExceed, lmiType=lmiType, intervalComplete=intervalComplete, chIntvTxResp=chIntvTxResp, pvcCurrRxResp=pvcCurrRxResp, pvcCirToDCE=pvcCirToDCE, pvcIntvDceOctets=pvcIntvDceOctets, chIntvIndex=chIntvIndex, chPerfCurrLoadToDceRealTime=chPerfCurrLoadToDceRealTime, pvcPerfCurrTotalRxLoad=pvcPerfCurrTotalRxLoad, pvcPerfIntvTotalRxLoad=pvcPerfIntvTotalRxLoad, alarmStatus=alarmStatus, probeTokenSize=probeTokenSize, avgPvcLoadToDTEThresh=avgPvcLoadToDTEThresh, chCurrDCELmiTimeout=chCurrDCELmiTimeout, pvcPerfCurrDlciIndex=pvcPerfCurrDlciIndex, alarmStatusEntry=alarmStatusEntry, avgPvcLoadToDCETrapSeverity=avgPvcLoadToDCETrapSeverity, pvcOperStateChangeTrapSeverity=pvcOperStateChangeTrapSeverity, chCurrTxIp=chCurrTxIp, pvcPerfIntvIndex=pvcPerfIntvIndex, pvcIntvBecnFrames=pvcIntvBecnFrames, realTimePvcLoadToDCEThresh=realTimePvcLoadToDCEThresh, chStCurrent=chStCurrent, chCurrDteOctets=chCurrDteOctets, pvcCurrRxPoll=pvcCurrRxPoll, pvcCurrDlciIndex=pvcCurrDlciIndex, pvcPerfRealTimeEntry=pvcPerfRealTimeEntry, chIntvRxIp=chIntvRxIp, avgPvcLossFrameRxTrapSeverity=avgPvcLossFrameRxTrapSeverity, chCurrDTELmiTimeout=chCurrDTELmiTimeout, pvcCirEirEntry=pvcCirEirEntry, pvcIntvRxResp=pvcIntvRxResp)
| [
2,
198,
2,
9485,
15571,
7378,
337,
9865,
8265,
3486,
48096,
12,
8895,
33,
357,
4023,
1378,
16184,
76,
489,
8937,
13,
785,
14,
79,
893,
11632,
8,
198,
2,
7054,
45,
13,
16,
2723,
2393,
1378,
14,
14490,
14,
67,
615,
47562,
19,
14,
... | 2.868472 | 34,312 |
# Usage: python3 main.py --mode dev --model bilstm \
# --load tmp/lstm_epochs=4 --num_examples 100
from collections import defaultdict
import time
import datetime
import biLstm_with_chars # word + char model biLSTM model
import biLstm_char_only # char only biLSTM model
import biLstm
#from biLstm_with_chars import BiLSTM
# from CNN import CNN
# from RNN_with_char import RNN
import argparse
from random import shuffle
import sys
import string
from nltk.corpus import stopwords as SW
#import hunspell_checker
#from hunspell_checker import HunspellChecker
import attacks
from tqdm import tqdm
import pickle
import dynet_config
dynet_config.set(random_seed=42)
import dynet as dy
import numpy as np
np.random.seed(42)
import random
random.seed(42)
sys.path.insert(0, 'defenses/scRNN/')
sys.path.append('spell_checkers/')
from spell_checkers.atd_checker import ATDChecker
from corrector import ScRNNChecker
import sys, os
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../..")
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/..")
from spell_checkers.corrector_elmosclstm import CorrectorElmoSCLstm
ELMOSCRNN_DATA_FOLDER_PATH = "../../../data"
# personal logging lib
import log
log.DEBUG = True
stopwords = set(SW.words("english")) | set(string.punctuation)
# Functions to read in the corpus
w2i = defaultdict(lambda: len(w2i))
t2i = defaultdict(lambda: len(t2i))
s2i = defaultdict(lambda: len(s2i))
c2i = defaultdict(lambda: len(c2i))
UNK = w2i["<unk>"]
CHAR_UNK = c2i["<unk>"]
NUM_EXAMPLES = 100
vocab_set = set()
char_vocab_set = set()
def read_valid_lines(filename):
"""reads files (ignores the neutral reviews)
Arguments:
filename -- data file
Returns:
lines, tags: list of reviews, and their tags
"""
print("starting to read %s" %(filename))
lines, tags = [], []
with open(filename, 'r') as f:
for line in f:
tag, words = line.lower().strip().split(" ||| ")
if tag == '0' or tag == '1': tag = '0'
if tag == '3' or tag == '4': tag = '1'
if tag == '2': continue
tags.append(tag)
lines.append(words)
return lines, tags
def read_dataset(filename, drop=False, swap=False, key=False, add=False, all=False):
"""creates a dataset from reading reviews; uses word and tag dicts
Arguments:
filename -- input file
"""
lines, tags = read_valid_lines(filename)
ans = []
for line, tag in zip(lines, tags):
words = [x for x in line.split(" ")]
word_idxs = [w2i[x] for x in line.split(" ")]
char_idxs = []
for word in words: char_idxs.append([c2i[i] for i in word])
tag = t2i[tag]
ans.append((word_idxs, char_idxs, tag))
if (drop or swap or key or add or all) and random.random() < char_drop_prob:
if drop:
line = drop_a_char(line)
elif swap:
line = swap_a_char(line)
elif key:
line = key_a_char(line)
elif add:
line = add_a_char(line)
elif all:
perturbation_fns = [drop_a_char, swap_a_char, add_a_char, swap_a_char]
perturbation_fn = np.random.choice(perturbation_fns, 1)[0]
line = perturbation_fn(line)
words = [x for x in line.split(" ")]
word_idxs = [w2i[x] for x in line.split(" ")]
char_idxs = []
for word in words: char_idxs.append([c2i[i] for i in word])
ans.append((word_idxs, char_idxs, tag))
return ans
def normalize(x):
""" normalizes the scores in x, works only for 1D """
e_x = np.exp(x - np.max(x))
return e_x / e_x.sum()
def create_vocabulary(filename):
"""given a file, creates the vocab set from its words
Arguments:
filename -- input file
"""
global vocab_set
lines, _ = read_valid_lines(filename)
for line in lines:
for word in line.split(" "):
vocab_set.add(word)
for char in word:
char_vocab_set.add(char)
return
# make argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--load', dest='input_file', type=str, default="",
help = 'load already trained model')
parser.add_argument('--save', dest='output_file', type=str, default="",
help = 'save existing model')
parser.add_argument('--model', dest='model_type', type=str, default="lstm",
help = 'architecture of the model: lstm or rnn or cnn')
parser.add_argument('--mode', dest='mode', type=str, default="dev",
help = 'training or dev?')
parser.add_argument('--attack', dest='type_of_attack', type=str, default=None,
help='type of attack you want, swap/drop/add/key/all')
parser.add_argument('--small', dest='small', action='store_true')
parser.add_argument('--debug', dest='debug', action='store_true')
parser.add_argument('--backoff', dest='backoff_mode', type=str, default="",
help = 'neutral or pass-through')
parser.add_argument('--defense', dest='is_spell_check', action='store_true')
parser.add_argument('--sc-neutral', dest='unk_output', action='store_true')
parser.add_argument('--sc-background', dest='sc_background', action='store_true')
parser.add_argument('--analyse', dest='analyse', action='store_true')
parser.set_defaults(is_spell_check=False)
parser.add_argument('--include-ends', dest='include_ends', action='store_true')
parser.set_defaults(include_ends=False)
# data augmentation flags
parser.add_argument('--da-drop', dest='da_drop', action='store_true')
parser.add_argument('--da-key', dest='da_key', action='store_true')
parser.add_argument('--da-add', dest='da_add', action='store_true')
parser.add_argument('--da-swap', dest='da_swap', action='store_true')
parser.add_argument('--da-all', dest='da_all', action='store_true')
parser.add_argument('--da-drop-prob', dest='da_drop_prob', type=float, default=0.5)
parser.add_argument('--num-attacks', dest='num_attacks', type=int, default=0)
parser.add_argument('--dynet-seed', dest='dynet-seed', type=int, default=42)
# adversarial training flags
parser.add_argument('--adv-drop', dest='adv_drop', action='store_true')
parser.add_argument('--adv-swap', dest='adv_swap', action='store_true')
parser.add_argument('--adv-key', dest='adv_key', action='store_true')
parser.add_argument('--adv-add', dest='adv_add', action='store_true')
parser.add_argument('--adv-all', dest='adv_all', action='store_true')
parser.add_argument('--adv-prob', dest='adv_prob', type=float, default=0.1)
# model names for spell check models
parser.add_argument('--sc-model-path', dest='sc_model_path', type=str, default=None,
help = 'the model path for ScRNN model')
parser.add_argument('--sc-model-path-bg', dest='sc_model_path_bg', type=str, default=None,
help = 'the model path for ScRNN background model')
parser.add_argument('--sc-elmo', dest='sc_elmo', action='store_true')
parser.add_argument('--sc-elmo-bg', dest='sc_elmo_bg', action='store_true')
parser.add_argument('--sc-atd', dest='sc_atd', action='store_true')
parser.add_argument('--sc-elmoscrnn', dest='sc_elmoscrnn', action='store_true')
parser.add_argument('--sc-vocab-size', dest='sc_vocab_size', type=int, default=9999)
parser.add_argument('--sc-vocab-size-bg', dest='sc_vocab_size_bg', type=int, default=78470)
parser.add_argument('--task-name', dest='task_name', type=str, default="")
params = vars(parser.parse_args())
# logging details
log.DEBUG = params['debug']
model_type = params['model_type']
input_file = params['input_file']
mode = params['mode']
type_of_attack = params['type_of_attack']
char_drop_prob = params['da_drop_prob']
NUM_ATTACKS = params['num_attacks']
SC_MODEL_PATH = params['sc_model_path']
SC_MODEL_PATH_BG = params['sc_model_path_bg']
if params['sc_atd']:
checker = ATDChecker()
elif params['sc_elmoscrnn']:
print("###########")
print("using new spell corrector")
print(f"using backoff={params['backoff_mode']}")
print("###########")
checker = CorrectorElmoSCLstm(DATA_FOLDER_PATH=ELMOSCRNN_DATA_FOLDER_PATH, backoff=params['backoff_mode'])
elif SC_MODEL_PATH_BG is None or params['sc_background']:
# only foreground spell correct model...
checker = ScRNNChecker(model_name=SC_MODEL_PATH, use_background=False,
unk_output=params['unk_output'], use_elmo=params['sc_elmo'],
task_name=params['task_name'], vocab_size=params['sc_vocab_size'],
vocab_size_bg=params['sc_vocab_size_bg'])
else:
checker = ScRNNChecker(model_name=SC_MODEL_PATH, model_name_bg=SC_MODEL_PATH_BG,
use_background=True, unk_output=params['unk_output'],
use_elmo=params['sc_elmo'], use_elmo_bg=params['sc_elmo_bg'],
task_name=params['task_name'], vocab_size=params['sc_vocab_size'],
vocab_size_bg=params['sc_vocab_size_bg'])
model = None
train = read_dataset("data/classes/train.txt")
# modify the dicts so that they return unk for unseen words/chars
w2i = defaultdict(lambda: UNK, w2i)
c2i = defaultdict(lambda: CHAR_UNK, c2i)
dev = read_dataset("data/classes/dev.txt")
test = read_dataset("data/classes/test.txt")
main()
| [
2,
29566,
25,
21015,
18,
1388,
13,
9078,
1377,
14171,
1614,
1377,
19849,
275,
11750,
76,
3467,
198,
2,
1377,
2220,
45218,
14,
75,
301,
76,
62,
538,
5374,
82,
28,
19,
1377,
22510,
62,
1069,
12629,
1802,
198,
6738,
17268,
1330,
4277,
... | 2.467853 | 3,764 |
from django.db import migrations, models, reset_queries
import django.db.models.deletion
def code_remap_document_version_pages_ocr_content(apps, schema_editor):
"""
The document_page_id field is pointing to the document file page.
This migration will remap that to point it for the corresponding
document version page that is connected to the original document file
page.
"""
cursor_primary = schema_editor.connection.create_cursor(name='merged_content_page_id')
cursor_secondary = schema_editor.connection.cursor()
query = '''
SELECT
{ocr_documentpageocrcontent}.{content},
{documents_documentversionpage}.{id}
FROM {ocr_documentpageocrcontent}
LEFT OUTER JOIN
{documents_documentversionpage} ON (
{documents_documentversionpage}.{object_id} = {ocr_documentpageocrcontent}.{document_page_id}
)
'''.format(
content=schema_editor.connection.ops.quote_name('content'),
document_page_id=schema_editor.connection.ops.quote_name('document_page_id'),
documents_documentversionpage=schema_editor.connection.ops.quote_name('documents_documentversionpage'),
id=schema_editor.connection.ops.quote_name('id'),
object_id=schema_editor.connection.ops.quote_name('object_id'),
ocr_documentpageocrcontent=schema_editor.connection.ops.quote_name('ocr_documentpageocrcontent')
)
cursor_primary.execute(query)
insert_query = '''
INSERT INTO {ocr_documentversionpageocrcontent} (
content,document_version_page_id
) VALUES {{}};
'''.format(
ocr_documentversionpageocrcontent=schema_editor.connection.ops.quote_name('ocr_documentversionpageocrcontent')
)
FETCH_SIZE = 10000
while True:
rows = cursor_primary.fetchmany(FETCH_SIZE)
if not rows:
break
insert_query_final = insert_query.format(
('(%s,%s),' * len(rows))[:-1]
)
tuples = []
for row in rows:
tuples += row
cursor_secondary.execute(insert_query_final, tuples)
reset_queries()
| [
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
11,
13259,
62,
421,
10640,
198,
11748,
42625,
14208,
13,
9945,
13,
27530,
13,
2934,
1616,
295,
628,
198,
4299,
2438,
62,
2787,
499,
62,
22897,
62,
9641,
62,
31126,
62,
1696,
... | 2.501155 | 866 |
# Encoding: utf-8
""" Behavior module for loading data from RT linux behavior controller.
"""
from __future__ import print_function
import copy
import matplotlib.pyplot as plt
import logging
import numpy as np
import os
import sys
from prep.Embedding import getExampleVol
from prep.HeaderReader import HeaderReader
from prep.IO import loadmat
# Setup logging
logger = logging.getLogger(__name__)
logger.handlers = []
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
ch.setFormatter(logging.Formatter("%(name)s @ %(asctime)s - [%(levelname)s] %(module)s::%(funcName)s: %(message)s"))
ch.setLevel(logging.INFO)
logger.addHandler(ch)
def loadBehavior(sc, session, filename='Behavior'):
""" Loads behavior data from RT linux mat file and the timestamps from
the imaging files
:param sc: SparkContext
:param session: current session object
:param filename: file name
:return behavDict: Behavior dictionary
"""
behavDict = dict()
behavDict['filename'] = filename
behavDict['path'] = session.path
behavDict['nPlanes'] = session.nPlanes
if hasattr(session, 'start') and session.start is not None:
behavDict['start'] = session.start
else:
behavDict['start'] = 1
# from RT linux
fullPath = os.path.join(behavDict['path'], behavDict['filename'])
BehavData = loadmat(fullPath)
behavDict['rawData'] = BehavData
Saved = BehavData['saved']
behavDict['Delay'] = Saved['TimesSection_Delay_period']
if hasattr(session, 'stop') and session.stop is not None:
behavDict['stop'] = session.stop
else:
behavDict['stop'] = len(behavDict['rawData']['saved_history']['TimesSection_title'])
try:
behavDict['CorrectR'] = Saved['yes_no_multi_pole_akobj_correct_R_history']
behavDict['CorrectL'] = Saved['yes_no_multi_pole_akobj_correct_L_history']
behavDict['IncorrectR'] = Saved['yes_no_multi_pole_akobj_incorrect_R_history']
behavDict['IncorrectL'] = Saved['yes_no_multi_pole_akobj_incorrect_L_history']
behavDict['EarlyLick'] = Saved['yes_no_multi_pole_akobj_early_history']
except KeyError:
behavDict['CorrectR'] = Saved['yes_no_multi_pole_switchobj_correct_R_history']
behavDict['CorrectL'] = Saved['yes_no_multi_pole_switchobj_correct_L_history']
behavDict['IncorrectR'] = Saved['yes_no_multi_pole_switchobj_incorrect_R_history']
behavDict['IncorrectL'] = Saved['yes_no_multi_pole_switchobj_incorrect_L_history']
behavDict['EarlyLick'] = Saved['yes_no_multi_pole_switchobj_early_history']
# verify consistency with image timestamps
History = BehavData['saved_history']
Events = History['RewardsSection_LastTrialEvents']
Events = Events[1:] # disregard first trial!!!
behaviorTime = np.zeros(len(Events))
behavDict['behaviorTime'] = behaviorTime
# get the transition to state 40 from RT linux as the trigger time
i = 0
for event in Events:
FirstArray = np.array([np.array(xi) for xi in event])
Index40 = np.where(FirstArray[:, 0] == 40)[0][1]
behaviorTime[i] = FirstArray[Index40, 2]
i += 1
behaviorTime = np.diff(behaviorTime)
# get the timestamps from tiff header
hrObj = HeaderReader(sc)
stamps = hrObj.read(os.path.join(behavDict['path'], ''), behavDict['nPlanes'])
behavDict['stamps'] = stamps
behavDict['nRecords'] = len(stamps)
# check missing frames
stamps_frames = np.asarray(stamps)
sz = stamps_frames.shape
stamps_frames = np.reshape(stamps_frames, (sz[0] * sz[1], sz[2]))
behavDict['stamps_frames'] = stamps_frames
if np.sum(np.diff(stamps_frames[:, 0]) - 1) > 0:
logger.info('loadBehavior:: missing frames')
else:
logger.info('loadBehavior:: no missing frames')
# get the time where the timestamp of the trial changed
ImageTimes = np.diff(stamps_frames[np.where(np.diff(stamps_frames[:, 3]) > 0), 3])
ImageTimes = np.squeeze(ImageTimes.T)
behavDict['imageTime'] = ImageTimes
# plot times
plt.plot(behaviorTime, label='Behavior')
plt.plot(ImageTimes, '*', label='Imaging')
plt.legend()
plt.xlabel('Trial #')
plt.ylabel('Trial duration (s)')
# Num of Correct trials:
CR = np.array(behavDict['CorrectR'], dtype=np.bool)
CL = np.array(behavDict['CorrectL'], dtype=np.bool)
EL = np.array(behavDict['EarlyLick'], dtype=np.bool)
numCR = sum(np.logical_and(CR, np.logical_not(EL)))
numCL = sum(np.logical_and(CL, np.logical_not(EL)))
total = sum(np.logical_not(EL))
behavDict['numCR'] = numCR
behavDict['numCL'] = numCL
behavDict['numER'] = sum(np.logical_and(behavDict['IncorrectR'], np.logical_not(behavDict['EarlyLick'])))
behavDict['numEL'] = sum(np.logical_and(behavDict['IncorrectL'], np.logical_not(behavDict['EarlyLick'])))
behavDict['total'] = total
CR = Saved['AnalysisSection_PercentCorrect_R']
CL = Saved['AnalysisSection_PercentCorrect_L']
behavDict['CR'] = CR
behavDict['CL'] = CL
plt.title('CR: %d %f%%, CL: %d %f%%, ER:, %d, EL:, %d' % (numCR, CR, numCL, CL, behavDict['numER'],
behavDict['numEL']))
# get two level index
getIndices(behavDict)
return behavDict
def getIndices(behavDict):
""" prepares the index for aggregation of the data according to behavior
:param behavDict: Behavior dictionary after loadBehavior runs
"""
nRecords = behavDict['nRecords']
StartTrial = np.asarray(np.where(np.diff(behavDict['stamps_frames'][:, 3]) > 0))
StartTrial = np.concatenate((np.asarray([[0]]), StartTrial), axis=1)
StartTrial = StartTrial[0]
StartTrial = StartTrial / behavDict['nPlanes']
Index = np.zeros(nRecords, dtype=np.int)
for i in StartTrial:
Index[int(i)::] = Index[int(i)::] + 1
if behavDict['stop'] is None:
behavDict['stop'] = np.max(Index)
TimeIndex = np.zeros(nRecords, dtype=np.int) # frame number from start of current trial
TypeIndex = np.zeros(nRecords, dtype=np.int) # trial type: 1 CR, 2 CL, 3 ER, 4 EL, 0 Early lick
TimeIndex[0] = 1
Time = 1
MaxBehave = len(behavDict['CorrectR'])
where = Index >= MaxBehave
if where.sum() > 0:
logger.info('Clipped timepoints: %d, at: %s out of %d' %
(where.sum(), np.where(Index >= MaxBehave)[0], nRecords - 1))
Index[Index >= MaxBehave] = MaxBehave - 1
for i in range(1, nRecords):
if behavDict['CorrectR'][Index[i]] and not (behavDict['EarlyLick'][Index[i]]):
Type = 1
elif behavDict['CorrectL'][Index[i]] and not (behavDict['EarlyLick'][Index[i]]):
Type = 2
elif behavDict['IncorrectR'][Index[i]] and not (behavDict['EarlyLick'][Index[i]]):
Type = 3
elif behavDict['IncorrectL'][Index[i]] and not (behavDict['EarlyLick'][Index[i]]):
Type = 4
else:
Type = 0
TypeIndex[i] = Type
if Index[i] != Index[i - 1]:
Time = 1
else:
Time += 1
TimeIndex[i] = Time
TypeIndex[0] = TypeIndex[1]
newIndex = np.array([TypeIndex, TimeIndex]).T
behavDict['Index'] = Index
behavDict['newIndexAll'] = newIndex
valid = (np.where((Index >= behavDict['start']) & (Index <= behavDict['stop'])))[0]
behavDict['newIndex'] = newIndex[valid, :]
def aggregateByType(behavDict, data):
""" aggregates data from an images object according to a two level index
:param behavDict: Behavior dictionary
:param data: Images object
:return: a 5D numpy array (index1, index2, x, y, z)
"""
twoLevelIndex = behavDict['newIndex']
data.cache()
data.count()
si = data.shape[1:]
maxValues = np.max(twoLevelIndex, axis=0)
aggregated = np.zeros((maxValues[0] + 1, maxValues[1], si[0], si[1], si[2]), dtype='float32')
aggregated[:] = np.NAN
logger.info('aggregateByType:: ' + str(si[2]) + ' planes: ')
# for each plane
for k in range(0, si[2]):
plane = data[:, :, :, k:k + 1].toarray()
# index level 1
for i in range(0, maxValues[0] + 1):
# index level 2
for j in range(0, maxValues[1]):
idx = np.intersect1d((twoLevelIndex[:, 0] == i).nonzero()[0],
(twoLevelIndex[:, 1] == (j + 1)).nonzero()[0])
aggregated[i, j, :, :, k] = np.nanmean(plane[idx, :, :], axis=0).astype('float32')
logger.info(k)
sys.stdout.flush()
return aggregated
def runBehavior(sc, session, regData, TForm1):
"""
:param sc: Spark Context
:param session: SpineSession object
:param regData:
:param TForm1:
:return:
"""
behavDict = loadBehavior(sc, session)
aggregated = aggregateByType(behavDict, regData)
correctR = getExampleVol(sc, aggregated[1, :140, :, :, :], TForm1, project=True)
correctL = getExampleVol(sc, aggregated[2, :140, :, :, :], TForm1, project=True)
session.writeTiff(correctR.transpose((1, 2, 0)), 'correctR')
session.writeTiff(correctL.transpose((1, 2, 0)), 'correctL')
diffs = np.diff(np.dstack(behavDict['stamps'])[0, 1, :])
session.volRateNew = np.nanmedian(1. / diffs[diffs > 0.001])
getEvents(session.volRateNew, behavDict)
return behavDict
| [
2,
14711,
7656,
25,
3384,
69,
12,
23,
201,
198,
37811,
20181,
8265,
329,
11046,
1366,
422,
11923,
32639,
4069,
10444,
13,
201,
198,
201,
198,
37811,
201,
198,
201,
198,
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
201,
198,
201,
1... | 2.274598 | 4,228 |
from setuptools import setup
with open('README.md', encoding='utf-8') as f:
README = f.read()
setup(
name='pyminiCLI',
version='0.2.1',
packages=['minicli'],
license='MIT',
author='HDIctus',
tests_require=['pytest'],
author_email='h.t.dictus@gmail.com',
description='quick and easy command-line interfaces',
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/HDictus/pyminiCLI"
)
| [
6738,
900,
37623,
10141,
1330,
9058,
198,
198,
4480,
1280,
10786,
15675,
11682,
13,
9132,
3256,
21004,
11639,
40477,
12,
23,
11537,
355,
277,
25,
198,
220,
220,
220,
20832,
11682,
796,
277,
13,
961,
3419,
198,
198,
40406,
7,
198,
220,... | 2.473958 | 192 |
# Copyright 2014-2018 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Routines for doing various type conversions. These usually embody some
higher-level semantics than are present in standard Python types (e.g.
``boolean`` will convert the string ``"false"`` to ``False``, where as
non-empty strings are usually considered to be ``True``).
A lot of these are intended to specify type conversions declaratively in place
like ``Parameter``'s ``kind`` argument. These are basically "hacks" around the
fact that Python is not the best language to use for configuration.
"""
import os
import re
import numbers
import shlex
import sys
from bisect import insort
if sys.version_info[0] == 3:
from urllib.parse import quote, unquote # pylint: disable=no-name-in-module, import-error
from past.builtins import basestring # pylint: disable=redefined-builtin
long = int # pylint: disable=redefined-builtin
else:
from urllib import quote, unquote # pylint: disable=no-name-in-module
# pylint: disable=wrong-import-position
from collections import defaultdict
from collections.abc import MutableMapping
from functools import total_ordering
from future.utils import with_metaclass
from devlib.utils.types import identifier, boolean, integer, numeric, caseless_string
from wa.utils.misc import (isiterable, list_to_ranges, list_to_mask,
mask_to_list, ranges_to_list)
def list_of_strs(value):
"""
Value must be iterable. All elements will be converted to strings.
"""
if not isiterable(value):
raise ValueError(value)
return list(map(str, value))
list_of_strings = list_of_strs
def list_of_ints(value):
"""
Value must be iterable. All elements will be converted to ``int``\ s.
"""
if not isiterable(value):
raise ValueError(value)
return list(map(int, value))
list_of_integers = list_of_ints
def list_of_numbers(value):
"""
Value must be iterable. All elements will be converted to numbers (either ``ints`` or
``float``\ s depending on the elements).
"""
if not isiterable(value):
raise ValueError(value)
return list(map(numeric, value))
def list_of_bools(value, interpret_strings=True):
"""
Value must be iterable. All elements will be converted to ``bool``\ s.
.. note:: By default, ``boolean()`` conversion function will be used, which
means that strings like ``"0"`` or ``"false"`` will be
interpreted as ``False``. If this is undesirable, set
``interpret_strings`` to ``False``.
"""
if not isiterable(value):
raise ValueError(value)
if interpret_strings:
return list(map(boolean, value))
else:
return list(map(bool, value))
def list_of(type_):
"""Generates a "list of" callable for the specified type. The callable
attempts to convert all elements in the passed value to the specified
``type_``, raising ``ValueError`` on error."""
return type('list_of_{}s'.format(type_.__name__),
(list, ), {
"__init__": __init__,
"__setitem__": __setitem__,
"append": append,
"extend": extend,
"to_pod": _to_pod,
"from_pod": classmethod(from_pod),
})
def list_or_string(value):
"""
Converts the value into a list of strings. If the value is not iterable,
a one-element list with stringified value will be returned.
"""
if isinstance(value, str):
return [value]
else:
try:
return list(value)
except ValueError:
return [str(value)]
def list_or_caseless_string(value):
"""
Converts the value into a list of ``caseless_string``'s. If the value is
not iterable a one-element list with stringified value will be returned.
"""
if isinstance(value, str):
return [caseless_string(value)]
else:
try:
return list(map(caseless_string, value))
except ValueError:
return [caseless_string(value)]
def list_or(type_):
"""
Generator for "list or" types. These take either a single value or a list
values and return a list of the specified ``type_`` performing the
conversion on the value (if a single value is specified) or each of the
elements of the specified list.
"""
list_type = list_of(type_)
return list_or_type
list_or_integer = list_or(integer)
list_or_number = list_or(numeric)
list_or_bool = list_or(boolean)
regex_type = type(re.compile(''))
none_type = type(None)
def regex(value):
"""
Regular expression. If value is a string, it will be complied with no
flags. If you want to specify flags, value must be precompiled.
"""
if isinstance(value, regex_type):
return value
else:
return re.compile(value)
def version_tuple(v):
"""
Converts a version string into a tuple of strings that can be used for
natural comparison allowing delimeters of "-" and ".".
"""
v = v.replace('-', '.')
return tuple(map(str, (v.split("."))))
def module_name_set(l): # noqa: E741
"""
Converts a list of target modules into a set of module names, disregarding
any configuration that may be present.
"""
modules = set()
for m in l:
if m and isinstance(m, dict):
modules.update(m.keys())
else:
modules.add(m)
return modules
__counters = defaultdict(int)
def counter(name=None):
"""
An auto incrementing value (kind of like an AUTO INCREMENT field in SQL).
Optionally, the name of the counter to be used is specified (each counter
increments separately).
Counts start at 1, not 0.
"""
__counters[name] += 1
value = __counters[name]
return value
class arguments(list):
"""
Represents command line arguments to be passed to a program.
"""
class toggle_set(set):
"""
A set that contains items to enable or disable something.
A prefix of ``~`` is used to denote disabling something, for example
the list ['apples', '~oranges', 'cherries'] enables both ``apples``
and ``cherries`` but disables ``oranges``.
"""
@staticmethod
@staticmethod
def values(self):
"""
returns a list of enabled items.
"""
return {item for item in self if not item.startswith('~')}
def conflicts_with(self, other):
"""
Checks if any items in ``other`` conflict with items already in this list.
Args:
other (list): The list to be checked against
Returns:
A list of items in ``other`` that conflict with items in this list
"""
conflicts = []
for item in other:
if item.startswith('~') and item[1:] in self:
conflicts.append(item)
if not item.startswith('~') and ('~' + item) in self:
conflicts.append(item)
return conflicts
class obj_dict(MutableMapping):
"""
An object that behaves like a dict but each dict entry can also be accessed
as an attribute.
:param not_in_dict: A list of keys that can only be accessed as attributes
"""
@staticmethod
# pylint: disable=super-init-not-called
@total_ordering
class level(object):
"""
A level has a name and behaves like a string when printed, however it also
has a numeric value which is used in ordering comparisons.
"""
@staticmethod
def enum(args, start=0, step=1):
"""
Creates a class with attributes named by the first argument.
Each attribute is a ``level`` so they behave is integers in comparisons.
The value of the first attribute is specified by the second argument
(``0`` if not specified).
::
MyEnum = enum(['A', 'B', 'C'])
is roughly equivalent of::
class MyEnum(object):
A = 0
B = 1
C = 2
however it also implement some specialized behaviors for comparisons and
instantiation.
"""
reserved = ['values', 'levels', 'names']
levels = []
n = start
for v in args:
id_v = identifier(v)
if id_v in reserved:
message = 'Invalid enum level name "{}"; must not be in {}'
raise ValueError(message.format(v, reserved))
name = caseless_string(id_v)
lv = level(v, n)
setattr(Enum, name, lv)
levels.append(lv)
n += step
setattr(Enum, 'levels', levels)
setattr(Enum, 'values', [lvl.value for lvl in levels])
setattr(Enum, 'names', [lvl.name for lvl in levels])
return Enum
class ParameterDict(dict):
"""
A dict-like object that automatically encodes various types into a url safe string,
and enforces a single type for the contents in a list.
Each value is first prefixed with 2 letters to preserve type when encoding to a string.
The format used is "value_type, value_dimension" e.g a 'list of floats' would become 'fl'.
"""
# Function to determine the appropriate prefix based on the parameters type
@staticmethod
# Function to add prefix and urlencode a provided parameter.
@staticmethod
# Function to decode a string and return a value of the original parameter type.
# pylint: disable=too-many-return-statements
@staticmethod
class cpu_mask(object):
"""
A class to allow for a consistent way of representing a cpus mask with
methods to provide conversions between the various required forms. The
mask can be specified directly as a mask, as a list of cpus indexes or a
sysfs-style string.
"""
@staticmethod
def __bool__(self):
"""Allow for use in comparisons to check if a mask has been set"""
return bool(self._mask)
__nonzero__ = __bool__
__str__ = __repr__
def list(self):
"""Returns a list of the indexes of bits that are set in the mask."""
return list(reversed(mask_to_list(self._mask)))
def mask(self, prefix=True):
"""Returns a hex representation of the mask with an optional prefix"""
if prefix:
return hex(self._mask)
else:
return hex(self._mask)[2:]
def ranges(self):
""""Returns a sysfs-style ranges string"""
return list_to_ranges(self.list())
| [
2,
220,
220,
220,
15069,
1946,
12,
7908,
20359,
15302,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
... | 2.728383 | 4,013 |
import http
from functools import wraps
from flask import json, jsonify
from flask import render_template, flash, redirect, request
from ewi.api import app
from ewi.forms import LoginForm
from common.request import jsoned, send_request
@app.route("/")
@app.route("/index")
@app.route("/login", methods = ["GET", "POST"])
@app.route("/user/<name>")
| [
11748,
2638,
198,
6738,
1257,
310,
10141,
1330,
27521,
198,
198,
6738,
42903,
1330,
33918,
11,
33918,
1958,
198,
6738,
42903,
1330,
8543,
62,
28243,
11,
7644,
11,
18941,
11,
2581,
198,
198,
6738,
304,
37686,
13,
15042,
1330,
598,
198,
... | 3.131579 | 114 |
# Copyright (C) 2014 Sony Mobile Communications AB.
# All rights, including trade secret rights, reserved.
| [
2,
15069,
357,
34,
8,
1946,
10184,
12173,
14620,
9564,
13,
198,
2,
1439,
2489,
11,
1390,
3292,
3200,
2489,
11,
10395,
13,
198
] | 4.458333 | 24 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Module of execption classes."""
class PygitaException(Exception):
"""Base class for exceptions."""
pass
class ServerConnectionError(PygitaException):
"""Unable to connect with server."""
pass
class BadRequestError(PygitaException):
"""Bad Request: The request was unacceptable
due to wrong parameter(s)."""
pass
class UnauthorisedError(PygitaException):
"""Unauthorized: Invalid access_token used."""
pass
class RequestFailedError(PygitaException):
"""API request failed from server side."""
pass
class NotFoundError(PygitaException):
"""Not Found: The chapter/verse number you are
looking for could not be found."""
pass
class ServerError(PygitaException):
"""Server Error: Something went wrong on server end."""
pass
class AuthorizationError(PygitaException):
"""Unable to authorise using App credentials,
Please cross check your CLIENT_ID and CLIENT_SECRET."""
pass
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
26796,
286,
2452,
1159,
6097,
526,
15931,
628,
198,
4871,
9485,
70,
5350,
16922,
7,
16922,
2599,
198,
220,
220,
... | 3.181818 | 319 |
import argparse
import json
import os
import pathlib
import re
import shutil
import subprocess
import unicodedata
from datetime import datetime
from pathlib import Path
from ruamel.yaml import YAML
yaml = YAML()
allowed = {'name', 'description', 'author', 'url', 'documentation', 'version', 'vendor', 'license', 'avatar',
'platform', 'update', 'keywords'}
required = {'name', 'description'}
sver_regex = r'^(=|>=|<=|=>|=<|>|<|!=|~|~>|\^)?(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)' \
r'\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)' \
r'(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+' \
r'(?:\.[0-9a-zA-Z-]+)*))?$'
name_regex = r'^[a-zA-Z_$][a-zA-Z_\s\-$0-9]{2,20}$'
cur_dir = pathlib.Path(__file__).parent.absolute()
root_dir = pathlib.Path(__file__).parents[1].absolute()
jinasrc_dir = os.path.join(root_dir, 'src', 'jina')
image_tag_regex = r'^hub.[a-zA-Z_$][a-zA-Z_\s\-\.$0-9]*$'
label_prefix = 'ai.jina.hub.'
docker_registry = 'jinaai/'
# current date and time
builder_files = list(Path(root_dir).glob('builder/app.py')) + \
list(Path(root_dir).glob('builder/*.yml'))
build_hist_path = os.path.join(root_dir, 'status', 'build-history.json')
readme_path = os.path.join(root_dir, 'status', 'README.md')
hubbadge_path = os.path.join(root_dir, 'status', 'hub-stat.svg')
hub_files = list(Path(root_dir).glob('hub/**/*.y*ml')) + \
list(Path(root_dir).glob('hub/**/*Dockerfile')) + \
list(Path(root_dir).glob('hub/**/*.py'))
builder_revision = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode()
build_badge_regex = r'<!-- START_BUILD_BADGE -->(.*)<!-- END_BUILD_BADGE -->'
build_badge_prefix = r'<!-- START_BUILD_BADGE --><!-- END_BUILD_BADGE -->'
if __name__ == '__main__':
a = get_parser().parse_args()
if a.check_only:
t = get_update_targets()[0]
if t:
exit(0)
else:
# nothing to update exit with 1
exit(1)
if a.target:
build_target(a)
else:
build_multi_targets(a)
| [
11748,
1822,
29572,
198,
11748,
33918,
198,
11748,
28686,
198,
11748,
3108,
8019,
198,
11748,
302,
198,
11748,
4423,
346,
198,
11748,
850,
14681,
198,
11748,
28000,
9043,
1045,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
6738,
3108,
801... | 1.98279 | 1,104 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import absolute_import
from mock import patch, Mock
from alluratest.tools import assert_equal
from tg import config
from allura.lib import helpers as h
from allura.tests import TestController
| [
2,
220,
220,
220,
220,
220,
220,
49962,
284,
262,
24843,
10442,
5693,
357,
1921,
37,
8,
739,
530,
198,
2,
220,
220,
220,
220,
220,
220,
393,
517,
18920,
5964,
11704,
13,
220,
4091,
262,
28536,
2393,
198,
2,
220,
220,
220,
220,
2... | 3.284457 | 341 |
#=======================================================================
# Copyright (c) 2020, Tahmid Khan
# All rights reserved.
#
# Licensed under the BSD 3-Clause license found in the LICENSE file
#=======================================================================
from flask import Flask
from flask_wtf.csrf import CsrfProtect
from flask_sqlalchemy import SQLAlchemy
# Initialize and configure Flask app
app = Flask(__name__)
# Dynamically choose config based on FLASK_ENV environment variable
if app.config["ENV"] == "development":
app.config.from_object('config.DevelopmentConfig')
elif app.config["ENV"] == "testing":
app.config.from_object('config.TestingConfig')
else:
app.config.from_object('config.ProductionConfig')
# Trim excess whitespace when rendering with jinja2
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
# Add user directory
app.add_url_rule('/user/default/<path:filename>', endpoint='user',
view_func=app.send_static_file)
# Create database instance connected to webapp
db = SQLAlchemy(app)
# Secure web app forms against CSRF
csrf = CsrfProtect(app)
from app import views | [
2,
23926,
1421,
18604,
198,
2,
220,
15069,
357,
66,
8,
12131,
11,
26293,
13602,
11356,
198,
2,
220,
1439,
2489,
10395,
13,
198,
2,
198,
2,
220,
49962,
739,
262,
347,
10305,
513,
12,
2601,
682,
5964,
1043,
287,
262,
38559,
24290,
2... | 3.411243 | 338 |
#import logging
import os
import time
DEBUG = False
API_URL_PREFIX = "/anuvaad-etl/document-processor/gv-document-digitization"
HOST = '0.0.0.0'
PORT = 5001
BASE_DIR = 'upload'
#BASE_DIR = '/home/naresh/anuvaad/anuvaad-etl/anuvaad-extractor/document-processor/gv-document-digitization/upload'
download_folder = 'upload'
ENABLE_CORS = False
# kafka
#
input_topic_default = 'anuvaad-dp-tools-gv-document-digitization-input-v10'
input_topic_identifier = 'KAFKA_ANUVAAD_DP_TOOLS_GV_DOCUMENT_DIGITIZATION_INPUT'
input_topic = os.environ.get(input_topic_identifier, input_topic_default)
output_topic_default = 'anuvaad-dp-tools-gv-document-digitization-output-v10'
output_topic_identifier = 'KAFKA_ANUVAAD_DP_TOOLS_GV_DOCUMENT_DIGITIZATION_OUTPUT'
output_topic = os.environ.get(output_topic_identifier, output_topic_default)
kf_local_server = 'localhost:9092'
kafka_ip_host = 'KAFKA_BOOTSTRAP_SERVER_HOST'
bootstrap_server = os.environ.get(kafka_ip_host, kf_local_server)
TASK_STAT = 'GV-DOCUMENT-DIGITIZATION'
CONSUMER_GROUP_default = 'anuvaad-etl-gv-document-digitization-consumer-group-v10'
CONSUMER_GROUP_identifier = 'ANUVAAD_ETL_GV_DOCUMENT_DIGITIZATION_CONSUMER_GROUP_V10'
CONSUMER_GROUP = os.environ.get(CONSUMER_GROUP_identifier,CONSUMER_GROUP_default)
# API_URL_PREFIX = "/anuvaad-etl/document-processor/ocr/google-vision"
# HOST = '0.0.0.0'
# PORT = 5001
# BASE_DIR = 'upload'
# ENABLE_CORS = False
# # kafka
# input_topic_default = 'anuvaad-dp-tools-ocr-tesseract-input-v1'
# input_topic_identifier = 'KAFKA_ANUVAAD_DP_TOOLS_OCR_TESSERACT_INPUT'
# input_topic = os.environ.get(input_topic_identifier, input_topic_default)
# output_topic_default = 'anuvaad-dp-tools-ocr-tesseract-output-v1'
# output_topic_identifier = 'KAFKA_ANUVAAD_DP_TOOLS_OCR_TESSERACT_OUTPUT'
# output_topic = os.environ.get(output_topic_identifier, output_topic_default)
# kf_local_server = 'localhost:9092'
# kafka_ip_host = 'KAFKA_BOOTSTRAP_SERVER_HOST'
# bootstrap_server = os.environ.get(kafka_ip_host, kf_local_server)
# TASK_STAT = 'GV-DOCUMENT-DIGITIZATION'
# CONSUMER_GROUP_default = 'anuvaad-etl-tess-consumer-group'
# CONSUMER_GROUP_identifier = 'ANUVAAD_ETL_TESS_CONSUMER_GROUP_V1'
# CONSUMER_GROUP = os.environ.get(CONSUMER_GROUP_identifier,CONSUMER_GROUP_default)
# input_topic_default = 'anuvaad-dp-tools-ocr-google-vision-input-v1'
# input_topic_identifier = 'KAFKA_ANUVAAD_DP_TOOLS_OCR_GOOGLE_VISION_INPUT'
# input_topic = os.environ.get(input_topic_identifier, input_topic_default)
# output_topic_default = 'anuvaad-dp-tools-ocr-google-vision-output-v1'
# output_topic_identifier = 'KAFKA_ANUVAAD_DP_TOOLS_OCR_GOOGLE_VISION_OUTPUT'
# output_topic = os.environ.get(output_topic_identifier, output_topic_default)
# kf_local_server = 'localhost:9092'
# kafka_ip_host = 'KAFKA_BOOTSTRAP_SERVER_HOST'
# bootstrap_server = os.environ.get(kafka_ip_host, kf_local_server)
# TASK_STAT = 'GOOGLE-VISION-OCR'
# CONSUMER_GROUP_default = 'anuvaad-etl-gvocr-consumer-group'
# CONSUMER_GROUP_identifier = 'ANUVAAD_ETL_GVOCR_CONSUMER_GROUP_V1'
# CONSUMER_GROUP = os.environ.get(CONSUMER_GROUP_identifier,CONSUMER_GROUP_default)
#
#
# logging.basicConfig(
# filename=os.getenv("SERVICE_LOG", "server.log"),
# level=logging.DEBUG,
# format="%(levelname)s: %(asctime)s \
# pid:%(process)s module:%(module)s %(message)s",
# datefmt="%d/%m/%y %H:%M:%S",
# )
EXRACTION_RESOLUTION = 300
#SAVE_URL = "https://auth.anuvaad.org/anuvaad/ocr-content-handler/v0/ocr/save-document"
SAVE_VAR = "OCR_CH_URL"
SAVE_DEFAULT = "http://gateway_anuvaad-ocr-content-handler:5001//anuvaad/ocr-content-handler/v0/ocr/save-document"
SAVE_URL = os.environ.get(SAVE_VAR,SAVE_DEFAULT)
print(SAVE_URL)
#SAVE_URL = "http://172.30.0.232:5009//anuvaad/ocr-content-handler/v0/ocr/save-document"
SAVE_NO_PAGE = 1
CLEAN_BACKGROUND = False
##########################################################################
#Alignment
ALIGN = True
ALIGN_MODE= 'FAST'
###########################################################################
| [
2,
11748,
18931,
198,
11748,
28686,
198,
11748,
640,
198,
198,
30531,
796,
10352,
198,
17614,
62,
21886,
62,
47,
31688,
10426,
796,
12813,
42357,
6862,
324,
12,
316,
75,
14,
22897,
12,
41341,
14,
70,
85,
12,
22897,
12,
27003,
1634,
... | 2.236926 | 1,874 |
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib.ticker import FormatStrFormatter
import sys
import pandas
import re
from cycler import cycler
if __name__ == '__main__':
dfs = {}
files = sys.argv[1:]
for file in files:
df = pandas.read_csv(file, sep='\t').set_index(['file'])
print(file)
print(df)
every_fifth = ['' if int(i) % 5 else i for i in df.T.index.tolist()]
dfs[file] = pandas.rolling_mean(df.T, 2)
sns.set_style("darkgrid")
fig, ax = plt.subplots(1, len(files))
plt.rc('axes', prop_cycle=(cycler('linestyle', ['--', '-', ':'])))
f = plt.figure(figsize=(6, 6))
for (i, file) in enumerate(files):
ax = f.add_subplot(len(files), 1, i+1)
ax.set_xticklabels(every_fifth)
#ax.yaxis.set_major_formatter(FormatStrFormatter('%.1e'))
# if 'eng' in file:
# title = 'English, keyword search'
# if 'fre' in file:
# title = 'French, keyword search'
# if 'ger' in file:
# title = 'German, keyword search'
# if 'spa' in file:
# title = 'Spanish, keyword search'
plt.title(file)
plt.plot(dfs[file])
if i == 0:
plt.legend(['Algol', 'Fortran', 'Lisp'])
plt.subplots_adjust(hspace=0.7)
plt.savefig('/tmp/lines.svg') | [
11748,
384,
397,
1211,
355,
3013,
82,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
198,
6738,
2603,
29487,
8019,
13,
83,
15799,
1330,
18980,
13290,
8479,
1436,
198,
198,
11748,
25064,
198,
11748,
19798,
292,
198,
... | 2.057751 | 658 |
import requests
import datetime as dt
import time
import base64
from Crypto.Cipher import AES
from Crypto.Random import get_random_bytes
from Crypto.Hash import SHA3_384
from Crypto.Util.Padding import pad
import validators
import random
import re
import string
from collections import Counter
from loguru import logger
from packaging import version as pyver
from distutils.version import LooseVersion
from django.db import models
from django.conf import settings
from core.models import CoreSettings, TZ_CHOICES
from logs.models import BaseAuditModel
import automation
import autotasks
import clients
logger.configure(**settings.LOG_CONFIG)
RECOVERY_CHOICES = [
("salt", "Salt"),
("mesh", "Mesh"),
("command", "Command"),
]
| [
11748,
7007,
198,
11748,
4818,
8079,
355,
288,
83,
198,
11748,
640,
198,
11748,
2779,
2414,
198,
6738,
36579,
13,
34,
10803,
1330,
34329,
198,
6738,
36579,
13,
29531,
1330,
651,
62,
25120,
62,
33661,
198,
6738,
36579,
13,
26257,
1330,
... | 3.458333 | 216 |
import time
from gyminput import check_input
from gymdisplay import set_display, set_display_individual
from gymbuzz import beep
# EMOM style
# Tabata style
| [
11748,
640,
198,
198,
6738,
21486,
1084,
1996,
1330,
2198,
62,
15414,
198,
6738,
11550,
13812,
1330,
900,
62,
13812,
11,
900,
62,
13812,
62,
43129,
198,
6738,
21486,
2022,
4715,
1330,
307,
538,
198,
198,
2,
17228,
2662,
3918,
198,
198... | 3.404255 | 47 |
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
import subprocess
from textwrap import dedent
from colors import blue
from pants.backend.python.tasks.gather_sources import GatherSources
from pants.backend.python.tasks.python_binary_create import PythonBinaryCreate
from pants.backend.python.tasks.select_interpreter import SelectInterpreter
from pants.base.run_info import RunInfo
from pants.build_graph.register import build_file_aliases as register_core
from pants_test.backend.python.tasks.python_task_test_base import PythonTaskTestBase
| [
2,
15069,
1584,
41689,
1628,
20420,
357,
3826,
27342,
9865,
3843,
20673,
13,
9132,
737,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
3826,
38559,
24290,
737,
198,
198,
11748,
28686,
198,
11748,
850,
14681,
198,
... | 3.58427 | 178 |
import requests,json,warnings
from distutils.version import LooseVersion
# This use to ignore sorting *LooseVersion.
# You can remove it if you want.
warnings.filterwarnings('ignore')
| [
11748,
7007,
11,
17752,
11,
40539,
654,
198,
6738,
1233,
26791,
13,
9641,
1330,
6706,
577,
14815,
198,
2,
770,
779,
284,
8856,
29407,
1635,
27654,
577,
14815,
13,
220,
198,
2,
921,
460,
4781,
340,
611,
345,
765,
13,
198,
40539,
654,... | 3.627451 | 51 |
import random as r
get_friendship_meter('sam', 'purnima') | [
11748,
4738,
355,
374,
198,
220,
220,
220,
220,
198,
1136,
62,
6726,
6720,
62,
27231,
10786,
37687,
3256,
705,
79,
700,
8083,
11537
] | 2.583333 | 24 |
import load
from utils import dna_io
from nose.tools import eq_, ok_
from nose import SkipTest
import nose, functools
import os
import numpy as np
### Basset DNase data fixtures
path = os.path.expanduser("~/projects/SeqDemote/data/DNase/encode_roadmap_all.h5")
chunk_size = 4096
batch_size = 128
batch_train_shape = (batch_size,4,1,600)
basset_train_size = 3201397
basset_valid_size = 70000
basset_output_size = 164
basset_num_chunks_train = basset_train_size // chunk_size
basset_num_chunks_valid = basset_valid_size // chunk_size
basset_full_train_shape = (basset_train_size, 4, 1, 600)
basset_chunk_train_shape = (chunk_size,4,1,600)
basset_chunk_out_shape = (chunk_size,basset_output_size)
### Hematopoetic DNase data fixtures
heme_path = os.path.expanduser("~/projects/SeqDemote/data/DNase/hematopoetic_data.h5")
heme_train_size = 240943
heme_valid_size = 51638
heme_output_size = 6
heme_num_chunks_train = heme_train_size // chunk_size
heme_num_chunks_valid = heme_valid_size // chunk_size
heme_full_train_shape = (heme_train_size, 4, 1, 600)
heme_chunk_train_shape = (chunk_size,4,1,600)
heme_chunk_out_shape = (chunk_size,heme_output_size)
### Kmer fixtures
aaa_3mer_string = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
aaa_3mer_position = 0
ttt_3mer_string = 'TTTTTTTTTTTTTTTTTTTTTTTTTTTTTT'
ttt_3mer_position = 63
aaa_4mer_string = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
ttt_4mer_string = 'TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT'
rando_3mer_string = 'ATGGGGTAGAGAATGGGGTAGAGACCAGGT'
alphabet_size_3 = 64
alphabet_size_4 = 256
threemer_chunk_shape = (chunk_size,alphabet_size_3, 1, 598)
fourmer_chunk_shape = (chunk_size, alphabet_size_4, 1, 597)
### Structure, shape based tests for Basset data
def test_build_data_loader():
""" Can I build a data loader for the DNase data """
data_loader = load.StandardDataLoader()
data_loader.load_train()
eq_(data_loader.train_in.shape, basset_full_train_shape)
def test_build_data_loader_kwargs():
""" Can I build a data loader for the DNase data specifying the data path """
data_loader = load.StandardDataLoader(data_path=path)
data_loader.load_train()
eq_(data_loader.train_in.shape, basset_full_train_shape)
def test_dnase_data_shape():
""" Is my DNase data the right size and shape """
data_loader = load.StandardDataLoader(data_path=path)
data_loader.load_train()
num_chunks = range(basset_num_chunks_train)
for e, (x_chunk, y_chunk) in zip(num_chunks,data_loader.create_batch_gen()):
print("Chunk: ", str(e + 1), " of ", basset_num_chunks_train)
eq_(x_chunk.shape, basset_chunk_train_shape)
eq_(y_chunk.shape, basset_chunk_out_shape)
def test_exhaust_data():
""" If I iterate through all the chunks, how many data points do I see? """
seen_pts = 0
data_loader = load.StandardDataLoader(data_path=path)
data_loader.load_train()
num_chunks = range(basset_num_chunks_train)
for e, (x_chunk, y_chunk) in zip(num_chunks,data_loader.create_batch_gen()):
seen_pts = seen_pts + x_chunk.shape[0]
print("Saw ", str(seen_pts), " points total")
ok_(seen_pts <= basset_train_size)
### Structure, shape based tests for Heme data
def test_build_heme_data_loader():
""" Can I build a data loader for the Hematopoetic data """
data_loader = load.HematopoeticDataLoader()
data_loader.load_train()
eq_(data_loader.train_in.shape, heme_full_train_shape)
def test_build_heme_data_loader_kwargs():
""" Can I build a data loader for the Heme data specifying the data path """
data_loader = load.HematopoeticDataLoader(data_path=heme_path)
data_loader.load_train()
eq_(data_loader.train_in.shape, heme_full_train_shape)
def test_build_heme_data_loader_pvsf():
""" Can I build a peaks versus flanks output shape? """
data_loader = load.HematopoeticDataLoader(data_path=heme_path, peaks_vs_flanks=True)
data_loader.load_train()
eq_(data_loader.train_out.shape, (heme_train_size,1))
def test_heme_data_shape():
""" Is my Hematopoetic data the right size and shape """
data_loader = load.HematopoeticDataLoader(data_path=heme_path)
data_loader.load_train()
num_chunks = range(heme_num_chunks_train)
for e, (x_chunk, y_chunk) in zip(num_chunks,data_loader.create_batch_gen()):
print("Chunk: ", str(e + 1), " of ", heme_num_chunks_train)
eq_(x_chunk.shape, heme_chunk_train_shape)
eq_(y_chunk.shape, heme_chunk_out_shape)
### Semantic tests: are all examples properly one-hot encoded?
def test_training_batch_encoding_sum():
""" If I sum all elements of a chunk of training data, do I get the number of expected ones? """
data_loader = load.StandardDataLoader(data_path=path)
data_loader.load_train()
num_chunks = range(basset_num_chunks_train)
expected_chunk_sum = 600 * chunk_size
### each chunk of should have |seq_length| * |batch_size| * |chunk_size| number of 1s
for e, (x_chunk, y_chunk) in zip(num_chunks,data_loader.create_batch_gen()):
ones_per_chunk = np.sum(x_chunk)
print("Chunk ", str(e + 1), " has ", ones_per_chunk, " bits turned on")
eq_(ones_per_chunk, expected_chunk_sum)
def test_validation_batch_encoding_sum():
""" If I sum all elements of a chunk of validation data, do I get the number of expected ones? """
data_loader = load.StandardDataLoader(data_path=path)
data_loader.load_validation()
num_chunks = range(basset_num_chunks_train)
expected_chunk_sum = 600 * chunk_size
### each chunk of should have |seq_length| * |batch_size| * |chunk_size| number of 1s
for e, (x_chunk, y_chunk) in zip(num_chunks,data_loader.create_valid_gen()):
ones_per_chunk = np.sum(x_chunk)
print("Chunk ", str(e + 1), " has ", ones_per_chunk, " bits turned on")
eq_(ones_per_chunk, expected_chunk_sum)
def test_any_always_negatives_training():
""" Are there any always-negative examples in the training set? """
data_loader = load.StandardDataLoader(data_path=path)
data_loader.load_train()
total_peaks_on = 0
total_peaks_off = 0
for e, (x_chunk, y_chunk) in zip(range(basset_num_chunks_train),data_loader.create_batch_gen()):
for label_vector in y_chunk:
total_peaks = np.sum(label_vector)
if total_peaks > 0:
total_peaks_on += 1
else:
total_peaks_off += 1
peak_frac = total_peaks_off / total_peaks_on
ok_(0.0 < peak_frac < 1.0)
def test_any_always_negatives_validation():
""" Are there any always-negative examples in the validation set? """
data_loader = load.StandardDataLoader(data_path=path)
data_loader.load_validation()
total_peaks_on = 0
total_peaks_off = 0
for e, (x_chunk, y_chunk) in zip(range(basset_num_chunks_valid),data_loader.create_valid_gen()):
for label_vector in y_chunk:
total_peaks = np.sum(label_vector)
if total_peaks > 0:
total_peaks_on += 1
else:
total_peaks_off += 1
peak_frac = total_peaks_off / total_peaks_on
ok_(0.0 < peak_frac < 1.0)
def test_enumerate_malformed_validation_data():
""" How many mal-encoded validation data do we have? """
raise SkipTest
data_loader = load.StandardDataLoader(data_path=path)
data_loader.load_validation()
num_chunks = range(basset_num_chunks_train)
expected_chunk_sum = 600 * chunk_size
mistakes = []
### each chunk of should have |seq_length| * |chunk_size| number of 1s
for e, (x_chunk, y_chunk) in zip(num_chunks,data_loader.create_valid_gen()):
for item, element in enumerate(x_chunk):
for i, position in enumerate(np.transpose(element)):
ones_per_position = np.sum(position)
if ones_per_position != 1.0:
mistakes.append("Error in position {0} of element {1} for chunk {2}".format(i, item, e))
#eq_(ones_per_position, 1.0)
print("found ", len(mistakes), " mistakes total.")
for m in mistakes:
print(m)
### Kmerizing generators / data-loader tests
def test_threemer_data_loader_shape():
""" Can I build a data loader for the kmerized DNase data specifying the data path? """
data_loader = load.KmerDataLoader(data_path=path, kmer_length=3)
data_loader.load_train()
num_chunks = range(basset_num_chunks_train)
for e, (x_chunk, y_chunk) in zip(num_chunks,data_loader.create_batch_gen()):
print("Chunk: ", str(e + 1), " of ", basset_num_chunks_train)
eq_(x_chunk.shape, threemer_chunk_shape)
eq_(y_chunk.shape, basset_chunk_out_shape)
def test_fourmer_data_loader_shape():
""" Can I build a data loader for the kmerized DNase data specifying the data path? """
data_loader = load.KmerDataLoader(data_path=path, kmer_length=4)
data_loader.load_train()
num_chunks = range(basset_num_chunks_train)
for e, (x_chunk, y_chunk) in zip(num_chunks,data_loader.create_batch_gen()):
print("Chunk: ", str(e + 1), " of ", basset_num_chunks_train)
eq_(x_chunk.shape, fourmer_chunk_shape)
eq_(y_chunk.shape, basset_chunk_out_shape)
def test_exhaust_threemer_data_loader():
""" Is my kmerized data loader generator producing the right shape chunks? """
seen_pts = 0
data_loader = load.KmerDataLoader(data_path=path, kmer_length=3)
data_loader.load_train()
num_chunks = range(basset_num_chunks_train)
for e, (x_chunk, y_chunk) in zip(num_chunks,data_loader.create_batch_gen()):
seen_pts = seen_pts + x_chunk.shape[0]
print("Saw ", str(seen_pts), " points total")
ok_(seen_pts <= basset_train_size)
### Kmer mismatch generator tests
| [
11748,
3440,
198,
6738,
3384,
4487,
1330,
288,
2616,
62,
952,
198,
198,
6738,
9686,
13,
31391,
1330,
37430,
62,
11,
12876,
62,
198,
6738,
9686,
1330,
32214,
14402,
198,
11748,
9686,
11,
1257,
310,
10141,
198,
11748,
28686,
198,
11748,
... | 2.338789 | 4,277 |
from django.contrib import admin
from . import models
# Register your models here.
admin.site.register(models.Group)
| [
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
201,
198,
6738,
764,
1330,
4981,
201,
198,
201,
198,
2,
17296,
534,
4981,
994,
13,
201,
198,
201,
198,
28482,
13,
15654,
13,
30238,
7,
27530,
13,
13247,
8,
201,
198
] | 3.125 | 40 |
import sys
import spotipy
import spotipy.util as util
import time
import keys
| [
11748,
25064,
198,
11748,
4136,
541,
88,
198,
11748,
4136,
541,
88,
13,
22602,
355,
7736,
198,
11748,
640,
198,
11748,
8251,
628,
628
] | 3.375 | 24 |
import sublime
import os
import shutil
import tempfile
from .. import DeferrableTestCase
class TempDirectoryTestCase(DeferrableTestCase):
"""Create a temp directory and open it."""
_temp_dir = None
@classmethod
def setUpClass(cls):
"""Create a temp directory for testing."""
cls._temp_dir = tempfile.mkdtemp()
nwindows = len(sublime.windows())
original_window_id = sublime.active_window().id()
sublime.run_command("new_window")
yield lambda: len(sublime.windows()) > nwindows
yield lambda: sublime.active_window().id() != original_window_id
cls.window = sublime.active_window()
project_data = dict(folders=[dict(follow_symlinks=True, path=cls._temp_dir)])
cls.window.set_project_data(project_data)
yield condition
@classmethod
| [
11748,
41674,
198,
11748,
28686,
198,
11748,
4423,
346,
198,
11748,
20218,
7753,
198,
6738,
11485,
1330,
2896,
8056,
540,
14402,
20448,
628,
198,
4871,
24189,
43055,
14402,
20448,
7,
7469,
8056,
540,
14402,
20448,
2599,
198,
220,
220,
220... | 2.657233 | 318 |
import textwrap
import requests
| [
11748,
2420,
37150,
198,
11748,
7007,
628,
198
] | 4.25 | 8 |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/11_performance.ipynb (unless otherwise specified).
__all__ = ['TBPerformace', 'actual_vs_predict', 'ReceiverOperatingCharacteristic', 'PrecisionRecall']
# Cell
import tabint
from .utils import *
from .dataset import *
from .visual import *
from .learner import *
from sklearn import metrics
from sklearn.metrics import *
# Cell
# Cell
# Cell
# Cell | [
2,
47044,
7730,
1677,
1137,
11617,
0,
8410,
5626,
48483,
0,
9220,
284,
4370,
25,
299,
1443,
14,
1157,
62,
26585,
13,
541,
2047,
65,
357,
25252,
4306,
7368,
737,
198,
198,
834,
439,
834,
796,
37250,
22737,
5990,
687,
558,
3256,
705,
... | 3.092308 | 130 |
from unittest import TestCase
from games.tic_tac_toe import TicTacToeGameSpec
from techniques.min_max import min_max_alpha_beta, min_max
| [
6738,
555,
715,
395,
1330,
6208,
20448,
198,
198,
6738,
1830,
13,
13370,
62,
83,
330,
62,
44579,
1330,
309,
291,
51,
330,
2514,
68,
8777,
22882,
198,
6738,
7605,
13,
1084,
62,
9806,
1330,
949,
62,
9806,
62,
26591,
62,
31361,
11,
9... | 2.957447 | 47 |
import struct
import socket
import sys
if len(sys.argv) != 2:
print("Usage: %s IP" % (sys.argv[0]))
exit()
smb_payload ="000000b2fe534d4240000100000000000000210010000000000000000000000000000000fffe00000000000000000000000000000000000000000000000000000000000024000500010000007f000000aa9952d87063ea118a76005056b886b0700000000200000002021002000302031103000001002600000000000100200001006c6110bcde71a04e50810ffac0769c32c4c011cf86e26deb2ba923cd79cbbf7c0000"
# Adding comperssion negotiation context
smb_payload += "0300" + \
"0a00" +\
"00000000" + \
"0100" + \
"0000" + \
"00000000" + \
"0100" # Compression type
s = socket.socket(2,1)
s.connect((sys.argv[1],445))
s.send(bytes.fromhex(smb_payload))
buff_res = s.recv(4096)
smb_version = struct.unpack("<H", buff_res[72:74])[0]
print("SMB Version: " + hex(smb_version))
if buff_res.endswith(b"\x00"*4 + b"\x00"*2 + b"\x01\x00"):
print("SMBv3: Compression (LZNT1) supported.")
s.close() | [
11748,
2878,
198,
11748,
17802,
198,
11748,
25064,
198,
198,
361,
18896,
7,
17597,
13,
853,
85,
8,
14512,
362,
25,
198,
197,
4798,
7203,
28350,
25,
4064,
82,
6101,
1,
4064,
357,
17597,
13,
853,
85,
58,
15,
60,
4008,
198,
197,
3702... | 2.364303 | 409 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'VisualizationWindow.ui'
#
# Created by: PyQt5 UI code generator 5.15.0
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
5178,
7822,
7560,
422,
3555,
334,
72,
2393,
705,
36259,
1634,
27703,
13,
9019,
6,
198,
2,
198,
2,
15622,
416,
25,
9485,
48,
83,
20,
12454,
2438,
17301,
642,
... | 3.117117 | 111 |
import os
from app import create_app
from config.production import ProductionConfig
if __name__ == '__main__':
app = create_app(ProductionConfig)
if 'SECRET_KEY' not in os.environ:
print('[WARN] SECRET KEY is not set in the environment variable !!')
app.run(**app.config['RUN_SETTING'])
| [
11748,
28686,
198,
198,
6738,
598,
1330,
2251,
62,
1324,
198,
6738,
4566,
13,
25493,
1330,
19174,
16934,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
598,
796,
2251,
62,
1324,
7,
35027,
16934... | 2.915888 | 107 |
# =============================================================================
# IMPORTS
# =============================================================================
import torch
import espaloma as esp
# =============================================================================
# MODULE CLASSES
# =============================================================================
class JanossyPooling(torch.nn.Module):
"""Janossy pooling (arXiv:1811.01900) to average node representation
for higher-order nodes.
"""
def forward(self, g):
"""Forward pass.
Parameters
----------
g : dgl.DGLHeteroGraph,
input graph.
"""
import dgl
# copy
g.multi_update_all(
{
"n1_as_%s_in_n%s"
% (relationship_idx, big_idx): (
dgl.function.copy_src("h", "m%s" % relationship_idx),
dgl.function.mean(
"m%s" % relationship_idx, "h%s" % relationship_idx
),
)
for big_idx in self.levels
for relationship_idx in range(big_idx)
},
cross_reducer="sum",
)
# pool
for big_idx in self.levels:
if g.number_of_nodes("n%s" % big_idx) == 0:
continue
g.apply_nodes(
func=lambda nodes: {
feature: getattr(
self, "f_out_%s_to_%s" % (big_idx, feature)
)(
self.pool(
getattr(self, "sequential_%s" % big_idx)(
None,
torch.cat(
[
nodes.data["h%s" % relationship_idx]
for relationship_idx in range(big_idx)
],
dim=1,
),
),
getattr(self, "sequential_%s" % big_idx)(
None,
torch.cat(
[
nodes.data["h%s" % relationship_idx]
for relationship_idx in range(
big_idx - 1, -1, -1
)
],
dim=1,
),
),
),
)
for feature in self.out_features[big_idx].keys()
},
ntype="n%s" % big_idx,
)
if 1 not in self.out_features:
return g
# atom level
g.apply_nodes(
func=lambda nodes: {
feature: getattr(self, "f_out_1_to_%s" % feature)(
self.sequential_1(g=None, x=nodes.data["h"])
)
for feature in self.out_features[1].keys()
},
ntype="n1",
)
return g
class JanossyPoolingImproper(torch.nn.Module):
"""Janossy pooling (arXiv:1811.01900) to average node representation
for improper torsions.
"""
def forward(self, g):
"""Forward pass.
Parameters
----------
g : dgl.DGLHeteroGraph,
input graph.
"""
import dgl
# copy
g.multi_update_all(
{
"n1_as_%s_in_%s"
% (relationship_idx, big_idx): (
dgl.function.copy_src("h", "m%s" % relationship_idx),
dgl.function.mean(
"m%s" % relationship_idx, "h%s" % relationship_idx
),
)
for big_idx in self.levels
for relationship_idx in range(4)
},
cross_reducer="sum",
)
if g.number_of_nodes("n4_improper") == 0:
return g
# pool
# sum over three cyclic permutations of "h0", "h2", "h3", assuming "h1" is the central atom in the improper
# following the smirnoff trefoil convention [(0, 1, 2, 3), (2, 1, 3, 0), (3, 1, 0, 2)]
# https://github.com/openff.toolkit/openff.toolkit/blob/166c9864de3455244bd80b2c24656bd7dda3ae2d/openff.toolkit/typing/engines/smirnoff/parameters.py#L3326-L3360
## Set different permutations based on which definition of impropers
## are being used
permuts = [(0, 1, 2, 3), (2, 1, 3, 0), (3, 1, 0, 2)]
stack_permuts = lambda nodes, p: \
torch.cat([nodes.data[f'h{i}'] for i in p], dim=1)
for big_idx in self.levels:
inner_net = getattr(self, f'sequential_{big_idx}')
g.apply_nodes(func=lambda nodes: {
feature: getattr(self, f'f_out_{big_idx}_to_{feature}')(
torch.sum(
torch.stack(
[inner_net(g=None, x=stack_permuts(nodes, p)) \
for p in permuts], dim=0
), dim=0
)
) for feature in self.out_features.keys()
}, ntype=big_idx)
return g
class JanossyPoolingWithSmirnoffImproper(torch.nn.Module):
"""Janossy pooling (arXiv:1811.01900) to average node representation
for improper torsions.
"""
def forward(self, g):
"""Forward pass.
Parameters
----------
g : dgl.DGLHeteroGraph,
input graph.
"""
import dgl
# copy
g.multi_update_all(
{
"n1_as_%s_in_%s"
% (relationship_idx, big_idx): (
dgl.function.copy_src("h", "m%s" % relationship_idx),
dgl.function.mean(
"m%s" % relationship_idx, "h%s" % relationship_idx
),
)
for big_idx in self.levels
for relationship_idx in range(4)
},
cross_reducer="sum",
)
if g.number_of_nodes("n4_improper") == 0:
return g
# pool
# sum over three cyclic permutations of "h0", "h2", "h3", assuming "h1" is the central atom in the improper
# following the smirnoff trefoil convention [(0, 1, 2, 3), (2, 1, 3, 0), (3, 1, 0, 2)]
# https://github.com/openff.toolkit/openff.toolkit/blob/166c9864de3455244bd80b2c24656bd7dda3ae2d/openff.toolkit/typing/engines/smirnoff/parameters.py#L3326-L3360
## Set different permutations based on which definition of impropers
## are being used
permuts = [(0, 1, 2, 3), (0, 2, 3, 1), (0, 3, 1, 2)]
stack_permuts = lambda nodes, p: \
torch.cat([nodes.data[f'h{i}'] for i in p], dim=1)
for big_idx in self.levels:
inner_net = getattr(self, f'sequential_{big_idx}')
g.apply_nodes(func=lambda nodes: {
feature: getattr(self, f'f_out_{big_idx}_to_{feature}')(
torch.sum(
torch.stack(
[inner_net(g=None, x=stack_permuts(nodes, p)) \
for p in permuts], dim=0
), dim=0
)
) for feature in self.out_features.keys()
}, ntype=big_idx)
return g
class JanossyPoolingNonbonded(torch.nn.Module):
"""Janossy pooling (arXiv:1811.01900) to average node representation
for improper torsions.
"""
def forward(self, g):
"""Forward pass.
Parameters
----------
g : dgl.DGLHeteroGraph,
input graph.
"""
# copy
g.multi_update_all(
{
"n1_as_%s_in_%s"
% (relationship_idx, big_idx): (
dgl.function.copy_src("h", "m%s" % relationship_idx),
dgl.function.mean(
"m%s" % relationship_idx, "h%s" % relationship_idx
),
)
for big_idx in self.levels
for relationship_idx in range(2)
},
cross_reducer="sum",
)
for big_idx in self.levels:
g.apply_nodes(
func=lambda nodes: {
feature: getattr(
self, "f_out_%s_to_%s" % (big_idx, feature)
)(
torch.sum(
torch.stack(
[
getattr(self, "sequential_%s" % big_idx)(
g=None,
x=torch.cat(
[
nodes.data["h0"],
nodes.data["h1"],
],
dim=1,
),
),
getattr(self, "sequential_%s" % big_idx)(
g=None,
x=torch.cat(
[
nodes.data["h1"],
nodes.data["h0"],
],
dim=1,
),
),
],
dim=0,
),
dim=0,
)
)
for feature in self.out_features.keys()
},
ntype=big_idx,
)
return g
| [
2,
38093,
25609,
198,
2,
30023,
33002,
198,
2,
38093,
25609,
198,
11748,
28034,
198,
198,
11748,
15024,
282,
6086,
355,
15024,
628,
198,
2,
38093,
25609,
198,
2,
33893,
42715,
1546,
198,
2,
38093,
25609,
198,
4871,
2365,
793,
88,
2720... | 1.589037 | 6,458 |
import pytest
from pytest import raises
from vyper import compiler
from vyper.exceptions import TypeMismatch
fail_list = [
"""
@public
def foo():
y: int128 = min(7, 0x1234567890123456789012345678901234567890)
"""
]
@pytest.mark.parametrize("bad_code", fail_list)
| [
11748,
12972,
9288,
198,
6738,
12972,
9288,
1330,
12073,
198,
198,
6738,
410,
88,
525,
1330,
17050,
198,
6738,
410,
88,
525,
13,
1069,
11755,
1330,
5994,
44,
1042,
963,
198,
198,
32165,
62,
4868,
796,
685,
198,
220,
220,
220,
37227,
... | 2.469027 | 113 |
"""
Super-resolution of CelebA using Generative Adversarial Networks.
The dataset can be downloaded from: https://www.dropbox.com/sh/8oqt9vytwxb3s4r/AADIKlz8PR9zr6Y20qbkunrba/Img/img_align_celeba.zip?dl=0
(if not available there see if options are listed at http://mmlab.ie.cuhk.edu.hk/projects/CelebA.html)
Instrustion on running the script:
1. Download the dataset from the provided link
2. Save the folder 'img_align_celeba' to '../../data/'
4. Run the sript using command 'python3 srgan.py'
"""
# %%
import argparse
import os
import numpy as np
import math
import itertools
import sys
import torchvision.transforms as transforms
from torchvision.utils import save_image, make_grid
from torch.utils.data import DataLoader
from torch.autograd import Variable
from models import *
from datasets import *
import torch.nn as nn
import torch.nn.functional as F
import torch
from tools.tools import tdict, Timer, append, AverageMeter
from utils import *
from aggregation import aggregate_grad, distribute_all
import pdb
# %%
os.makedirs("images", exist_ok=True)
os.makedirs("saved_models", exist_ok=True)
parser = argparse.ArgumentParser()
parser.add_argument("--epoch", type=int, default=0, help="epoch to start training from")
parser.add_argument("--n_epochs", type=int, default=200, help="number of epochs of training")
parser.add_argument("--dataset_name", type=str, default="img_align_celeba", help="name of the dataset")
parser.add_argument("--batch_size", type=int, default=4, help="size of the batches")
parser.add_argument("--batch_m", type=int, default=1, help="batch multiplier. iterate over n batches and then apply gradients")
parser.add_argument("--lr", type=float, default=0.0002, help="adam: learning rate")
parser.add_argument("--b1", type=float, default=0.5, help="adam: decay of first order momentum of gradient")
parser.add_argument("--b2", type=float, default=0.999, help="adam: decay of first order momentum of gradient")
parser.add_argument("--decay_epoch", type=int, default=100, help="epoch from which to start lr decay")
parser.add_argument("--n_cpu", type=int, default=8, help="number of cpu threads to use during batch generation")
parser.add_argument("--hr_height", type=int, default=256, help="high res. image height")
parser.add_argument("--hr_width", type=int, default=256, help="high res. image width")
parser.add_argument("--channels", type=int, default=3, help="number of image channels")
parser.add_argument("--sample_interval", type=int, default=100, help="interval between saving image samples")
parser.add_argument("--checkpoint_interval", type=int, default=-1, help="interval between model checkpoints")
parser.add_argument("--checkpoint_name", type=str, default='default', help="name of checkpoint")
opt = parser.parse_args()
print(opt)
# %%
cuda = torch.cuda.is_available()
n_cuda = torch.cuda.device_count()
hr_shape = (opt.hr_height, opt.hr_width)
print('n_cuda: %s'%n_cuda)
# Initialize generator and discriminator
generator_list = []
discriminator_list = []
feature_extractor_list = []
optimizer_G_list = []
optimizer_D_list = []
for i in range(n_cuda):
generator = GeneratorResNet().cuda(i)
discriminator = Discriminator(input_shape=(opt.channels, *hr_shape)).cuda(i)
feature_extractor = FeatureExtractor().cuda(i)
# Set feature extractor to inference mode
feature_extractor.eval()
# Optimizers
optimizer_G = torch.optim.Adam(generator.parameters(), lr=opt.lr, betas=(opt.b1, opt.b2))
optimizer_D = torch.optim.Adam(discriminator.parameters(), lr=opt.lr, betas=(opt.b1, opt.b2))
generator_list.append(generator)
discriminator_list.append(discriminator)
feature_extractor_list.append(feature_extractor)
optimizer_G_list.append(optimizer_G)
optimizer_D_list.append(optimizer_D)
optimizer_G = optimizer_G_list[0]
optimizer_D = optimizer_D_list[0]
print('number of parameters (generator): %s'%sum(p.numel() for p in generator_list[0].parameters()))
print('number of parameters (discriminator): %s'%sum(p.numel() for p in discriminator_list[0].parameters()))
for generator, discriminator, feature_extractor in zip(generator_list, discriminator_list, feature_extractor_list):
generator_device = next(generator.parameters()).device
discriminator_device = next(discriminator.parameters()).device
feature_extractor_device = next(feature_extractor.parameters()).device
print('models on device: generator(%s), discriminator(%s), feature_extractor(%s)'%(generator_device, discriminator_device, feature_extractor_device))
# Losses
criterion_GAN = torch.nn.MSELoss()
criterion_content = torch.nn.L1Loss()
if cuda:
criterion_GAN = criterion_GAN.cuda()
criterion_content = criterion_content.cuda()
Tensor = torch.cuda.FloatTensor if cuda else torch.Tensor
dataloader = DataLoader(
ImageDataset("../../data/%s" % opt.dataset_name, hr_shape=hr_shape),
batch_size=opt.batch_size,
shuffle=True,
num_workers=opt.n_cpu,
pin_memory=True
)
global_timer = Timer()
epoch_timer = Timer()
iter_timer = Timer()
iter_time_meter = AverageMeter()
# ----------
# Training
# ----------
global_timer.start()
for epoch in range(opt.epoch, opt.n_epochs):
epoch_timer.start()
imgs_list = []
for i, imgs in enumerate(dataloader):
if i % n_cuda == 0:
iter_timer.start()
imgs_list.append(imgs)
if len(imgs_list) < n_cuda:
continue
print('zero_grad_G')
optimizer_G.zero_grad()
print('zero_grad_D')
optimizer_D.zero_grad()
for imgs, generator, discriminator, feature_extractor in zip(imgs_list, generator_list, discriminator_list, feature_extractor_list):
device = next(generator.parameters()).device
with torch.cuda.device(device):
# ------------------
# Train Generators
# ------------------
# Configure model input
imgs_lr = imgs["lr"].cuda()
imgs_hr = imgs["hr"].cuda()
# Adversarial ground truths
valid = torch.ones((imgs_lr.size(0), *discriminator.output_shape), device=device)
fake = torch.zeros((imgs_lr.size(0), *discriminator.output_shape), device=device)
# Generate a high resolution image from low resolution input
gen_hr = generator(imgs_lr)
# Adversarial loss
loss_GAN = criterion_GAN(discriminator(gen_hr), valid)
# Content loss
gen_features = feature_extractor(gen_hr)
real_features = feature_extractor(imgs_hr)
loss_content = criterion_content(gen_features, real_features.detach())
# Total loss
loss_G = loss_content + 1e-3 * loss_GAN
loss_G = loss_G
loss_G.backward()
# ---------------------
# Train Discriminator
# ---------------------
# Loss of real and fake images
loss_real = criterion_GAN(discriminator(imgs_hr), valid)
loss_fake = criterion_GAN(discriminator(gen_hr.detach()), fake)
# Total loss
loss_D = (loss_real + loss_fake) / 2
loss_D = loss_D
loss_D.backward()
aggregate_grad(generator_list[1:], generator_list[0])
print('step_G')
optimizer_G.step()
distribute_all(generator_list[0], generator_list[1:])
aggregate_grad(discriminator_list[1:], discriminator_list[0])
print('step_D')
optimizer_D.step()
distribute_all(discriminator_list[0], discriminator_list[1:])
imgs_list = []
# --------------
# Log Progress
# --------------
print(
"[Epoch %d/%d] [Batch %d/%d] [D loss: %f] [G loss: %f]"
% (epoch, opt.n_epochs, i, len(dataloader), loss_D.item()*n_cuda, loss_G.item()*n_cuda)
)
iter_time_meter.update(iter_timer.stop())
print('time for iteration: %s (%s)'%(iter_time_meter.val, iter_time_meter.avg))
batches_done = epoch * len(dataloader) + i+1
if batches_done % opt.sample_interval == 0:
# Save image grid with upsampled inputs and SRGAN outputs
imgs_lr = nn.functional.interpolate(imgs_lr, scale_factor=4)
imgs_hr_raw = imgs['hr_raw'].to(device)
print('[psnr] (imgs_lr):%s, (gen_hr):%s'%(psnr(minmaxscaler(imgs_lr), imgs_hr_raw, max_val=1).mean(), psnr(minmaxscaler(gen_hr), imgs_hr_raw, max_val=1).mean()))
gen_hr = make_grid(gen_hr, nrow=1, normalize=True)
imgs_lr = make_grid(imgs_lr, nrow=1, normalize=True)
img_grid = torch.cat((imgs_lr, gen_hr), -1)
save_image(img_grid, "images/%d.png" % batches_done, normalize=False)
elapsed_time = epoch_timer.stop()
print('Elapsed_time: %s'%elapsed_time)
if opt.checkpoint_interval != -1 and epoch % opt.checkpoint_interval == 0:
# Save model checkpoints
torch.save(generator.state_dict(), "saved_models/generator_%d.pth" % epoch)
torch.save(discriminator.state_dict(), "saved_models/discriminator_%d.pth" % epoch)
elapsed_time = global_timer.stop()
print(str(elapsed_time))
append(str(elapsed_time), 'elapsed_time.txt')
torch.save(generator.state_dict(), "saved_models/generator_%s.pth" % opt.checkpoint_name)
torch.save(discriminator.state_dict(), "saved_models/discriminator_%s.pth" % opt.checkpoint_name)
2 **((1/4)*np.log2(6))
| [
37811,
198,
12442,
12,
29268,
286,
35688,
32,
1262,
2980,
876,
1215,
690,
36098,
27862,
13,
198,
464,
27039,
460,
307,
15680,
422,
25,
3740,
1378,
2503,
13,
14781,
3524,
13,
785,
14,
1477,
14,
23,
78,
39568,
24,
7670,
4246,
30894,
1... | 2.447483 | 3,913 |
import click
from . import alice_images_api as api
@click.group()
@click.option('--skill-id', prompt='Skill id', required=True, help='Alice skill id')
@click.option('--oauth-token', prompt='OAuth token', required=True, help='Account OAuth token')
@click.pass_context
@cli.command('upload')
@click.argument('image_path_or_url')
@click.pass_context
@cli.command('list')
@click.pass_context
@cli.command('status')
@click.pass_context
@cli.command('delete')
@click.option('--image-id', required=True, help='Image ID to delete.')
@click.pass_context
if __name__ == '__main__':
cli()
| [
11748,
3904,
198,
198,
6738,
764,
1330,
435,
501,
62,
17566,
62,
15042,
355,
40391,
628,
198,
31,
12976,
13,
8094,
3419,
198,
31,
12976,
13,
18076,
10786,
438,
42401,
12,
312,
3256,
6152,
11639,
35040,
4686,
3256,
2672,
28,
17821,
11,... | 2.902439 | 205 |
from FreeTAKServer.model.FTSModel.fts_protocol_object import FTSProtocolObject
from FreeTAKServer.model.FTSModel.ChecklistColumns import ChecklistColumns
from FreeTAKServer.model.FTSModel.ChecklistDetails import ChecklistDetails
from FreeTAKServer.model.FTSModel.ChecklistTasks import ChecklistTasks | [
6738,
3232,
5603,
42,
10697,
13,
19849,
13,
37,
4694,
17633,
13,
35594,
62,
11235,
4668,
62,
15252,
1330,
376,
4694,
19703,
4668,
10267,
198,
6738,
3232,
5603,
42,
10697,
13,
19849,
13,
37,
4694,
17633,
13,
9787,
4868,
39470,
82,
1330... | 3.397727 | 88 |
#! /usr/bin/env python
# Picon Plus Fireled Test
# Flashes Fireleds attached to Fireled Output
# Press Ctrl-C to stop
from __future__ import print_function
from piconplus import PiconPlus
from time import sleep
pp = PiconPlus(0x24)
rev = pp.getRevision()
print ('Board:', rev[1], " Revision:", rev[0])
for i in range(7):
pp.setPixel(i, 255, 0, 0)
sleep(0.5)
try:
while True:
pp.setAllPixels(255, 0, 0)
sleep(0.5)
pp.setAllPixels(0, 255, 0)
sleep(0.5)
pp.setAllPixels(0,0,255)
sleep(0.5)
pp.setAllPixels(255,255,255)
sleep(0.5)
pp.setAllPixels(0,0,0)
sleep(0.5)
except KeyboardInterrupt:
print()
finally:
pp.cleanup()
| [
2,
0,
1220,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
350,
4749,
8227,
3764,
992,
6208,
198,
2,
1610,
7465,
3764,
992,
82,
7223,
284,
3764,
992,
25235,
198,
2,
4332,
19212,
12,
34,
284,
2245,
198,
198,
6738,
11593,
37443,
834,
13... | 2.068571 | 350 |
while True:
try:
x = int(input("Please enter a number: "))
break
except ValueError:
print("Oops! That was no valid number. Try again...")
try:
arquivo = open("arquivo.txt", "r")
except OSError:
# será executado caso haja uma exceção
print("arquivo inexistente")
else:
# será executado se tudo ocorrer bem no try
print("arquivo manipulado e fechado com sucesso")
arquivo.close()
finally:
# será sempre executado, independentemente de erro
print("Tentativa de abrir arquivo")
| [
4514,
6407,
25,
198,
220,
220,
220,
1949,
25,
198,
220,
220,
220,
220,
220,
220,
220,
2124,
796,
493,
7,
15414,
7203,
5492,
3802,
257,
1271,
25,
366,
4008,
198,
220,
220,
220,
220,
220,
220,
220,
2270,
198,
220,
220,
220,
2845,
... | 2.39823 | 226 |
# Description: The re.findall Function
import re
"""
Note
1. The syntax: re.finditer(pattern, string, flags=0)
2. This function returns an iterator yielding MatchObject instances over all non-overlapping matches for the RE pattern
in the string. The string is scanned left-to-right, and matches are returned in the order found.
3. Empty matches are included in the result unless they touch the beginning of another match.
"""
text = 'a@example.com, blah blah b@example.com more blah blah again c@example.com' # A text containing many email Ids
emailPattern = r'([\w\.-]+@[\w\.-]+)' # A patern to get a list of all the email strings
for match in re.finditer(emailPattern, text, re.IGNORECASE):
print "Position %s: %s" % (match.start(), match.group(1))
| [
2,
12489,
25,
383,
302,
13,
19796,
439,
15553,
198,
198,
11748,
302,
198,
198,
37811,
198,
6425,
198,
16,
13,
383,
15582,
25,
302,
13,
19796,
2676,
7,
33279,
11,
4731,
11,
9701,
28,
15,
8,
198,
17,
13,
770,
2163,
5860,
281,
4131... | 3.261603 | 237 |
#!/usr/bin/python
#-*- encoding:utf-8 -*-
#title:workyi_Talent system SQL injection
#author: xx00
#ref: http://www.wooyun.org/bugs/wooyun-2010-065810
if __name__ == '__main__':
from dummy import *
audit(assign('kxmail', 'http://mail.cdzk.org:8888/')[1]) | [
2,
48443,
14629,
14,
8800,
14,
29412,
201,
198,
2,
12,
9,
12,
21004,
25,
40477,
12,
23,
532,
9,
12,
201,
198,
2,
7839,
25,
1818,
48111,
62,
31466,
298,
1080,
16363,
16954,
201,
198,
2,
9800,
25,
31383,
405,
201,
198,
2,
5420,
... | 2.138462 | 130 |
import logging
from emarkdown.System.Tool import File
from emarkdown.Controller.ConverterController import ConverterController
from emarkdown.Controller.ExtractController import ExtractController
from emarkdown.System.Mode import Mode
from emarkdown.www.dark import Dark_HTML
| [
11748,
18931,
198,
198,
6738,
795,
668,
2902,
13,
11964,
13,
25391,
1330,
9220,
198,
6738,
795,
668,
2902,
13,
22130,
13,
3103,
332,
353,
22130,
1330,
35602,
353,
22130,
198,
6738,
795,
668,
2902,
13,
22130,
13,
11627,
974,
22130,
133... | 4.028986 | 69 |
#!/usr/bin/env python3
"""Crete mafft combination commands from directory with alignments."""
import os, re, sys, argparse, itertools, glob
sys.path.append(os.path.dirname(os.path.abspath(__name__)))
def read_parse_doc_file(prosite_path):
'''
Reads the doc file from PROSITE and creates a dictionary of keys PDCs and values a list of associated PS families.
'''
docfile = prosite_path+"/prosite.doc"
pdc_to_ps={}
with open (docfile, 'r', encoding="latin-1") as in_file:
data = in_file.read().replace('\n', ' ')
indexes = re.split('{END} | {BEGIN}',data)
del indexes[1::2]
indexes = list(filter(None, indexes))
for fam in indexes:
elements_in_fam = list(filter(None, re.split('{|}',fam)))
elements_in_fam = list(filter(lambda element: element.strip(), elements_in_fam))
elements_in_fam = list(filter(lambda element: not re.compile(r'//').search(element), elements_in_fam))
for index, element in enumerate(elements_in_fam):
elements_in_fam[index] = re.sub(r';.*', '', elements_in_fam[index])
pdc_to_ps[elements_in_fam.pop(0)] = elements_in_fam
return pdc_to_ps
def parse_indeli_folder(comm_args):
'''
Creates a dictionary of sequences from a folder with sequences, for mafft merging.
Makes sure alignments with the same number of sequences are grouped together. (ConSurf becnhmarking files)
'''
alnfiles_list = os.listdir(comm_args.indeli_path)
pdc_to_ps={}
for alnfile in alnfiles_list:
if alnfile.split('.')[0] not in pdc_to_ps:
pdc_to_ps[alnfile.split('.')[0]]=[]
pdc_to_ps[alnfile.split('.')[0]].append(comm_args.indeli_path+alnfile)
return(pdc_to_ps)
def parse_indeli_folder_mine(comm_args):
'''
Creates a dictionary of sequences from a folder with sequences, for mafft merging.
Makes sure alignments with the same number of sequences are grouped together. (ConSurf becnhmarking files)
'''
alnfiles_list = os.listdir(comm_args.indeli_path)
pdc_to_ps={}
pdc_to_ps[1] = alnfiles_list
return(pdc_to_ps)
def parse_rprot_folder(comm_args):
'''
Combines rProtein alignments and INDELI results into data structure for mafft merging module.
'''
indeli_list = glob.glob(comm_args.indeli_path+"*.fas")
rprot_list = glob.glob(comm_args.rprot_path+"*.fas")
pdc_to_ps = {}
for pair in itertools.product(indeli_list, rprot_list):
pdc_to_ps[pair] = [pair[0],pair[1]]
return pdc_to_ps
def build_nested(segs, text, container):
'''
Recursive constructor of a hash of hashes meant to organize the X -> H -> T -> F -> file_path.
Container gets augmented to hold that structure.
In cases of overlapping and similar Family levels (e.g 1.1.1.2 and 1.1.1.2-1 - two alignments of one family)
it uses a tuple to store the file paths
'''
head = segs[0]
tail = segs[1:]
if not tail:
if head not in container:
container[head] = text
else:
container[head] = (container[head], text)
else:
if head not in container:
container[head] = {}
build_nested(tail, text, container[head])
def parse_ecod_folder(comm_args):
'''
Produces combinations of family alignments for specified ECOD level.
'''
aln_list = glob.glob(comm_args.ecod_path+"*.fas")
fnamelist_to_alnpath = {}
for alignment in aln_list:
#parses through the aln filename to grab only the architecture levels in a fname list
fname = re.split('\.|-', alignment.split("/")[int(len(alignment.split("/"))-1)])[:-1]
build_nested(fname, alignment, fnamelist_to_alnpath)
for xlevel in sorted(fnamelist_to_alnpath):
print('X level: ',xlevel)
for hlevel in fnamelist_to_alnpath[xlevel]:
print('\tH level ',hlevel)
for tlevel in fnamelist_to_alnpath[xlevel][hlevel]:
print ('\t\tT level ',tlevel, fnamelist_to_alnpath[xlevel][hlevel][tlevel])
def merger_commands(sequence_groups,output_path):
'''
Executes mafft merging with a provided groups of sequences.
Writes out the resulting mergers to a specified directory.
'''
for group, seq_list in sequence_groups.items():
print(group)
for fam_comb in itertools.combinations(seq_list, 2):
#This one is unique
alns_for_merging = "concat_"+re.sub(r'.msa|.fas|.*/','',fam_comb[0])+"_"+re.sub(r'.*/','',fam_comb[1])
os.system("cat "+fam_comb[0]+" "+fam_comb[1]+" > "+alns_for_merging)
os.system("ruby /usr/local/bin/makemergetable.rb "+fam_comb[0]+" "+fam_comb[1]+" > subMSAtable")
os.system("mafft --merge subMSAtable "+alns_for_merging+" > "+output_path+re.sub(r'concat_','',alns_for_merging))
os.system("rm "+alns_for_merging)
return True
def prosite_merge(list_with_bad, prst_aln_loc, out_dir):
'''Specify whether its good or bad in the out_dir variable.'''
family_combinations = list(set(list(itertools.combinations(list_with_bad, 2))))
for fam_comb in family_combinations:
alns_for_merging = f"concat_{fam_comb[0]}_{fam_comb[1]}.fas"
file1 = f"{prst_aln_loc}/{fam_comb[0]}.msa"
file2 = f"{prst_aln_loc}/{fam_comb[1]}.msa"
os.system(f"cat {file1} {file2} > {out_dir}/{alns_for_merging}")
os.system(f"ruby /usr/local/bin/makemergetable.rb {file1} {file2} > subMSAtable")
os.system(f"mafft --quiet --merge subMSAtable {out_dir}/{alns_for_merging} > {out_dir}/{fam_comb[0]}_{fam_comb[1]}.fas")
os.system(f"rm {out_dir}/{alns_for_merging}")
os.system("rm subMSAtable")
return True
def main(commandline_arguments):
'''Main entry point'''
comm_args = create_and_parse_argument_options(commandline_arguments)
if comm_args.prosite_path:
if not comm_args.prosite_choice:
raise IOError("Choose what type of merge to do with -prst_choice")
pdc_to_ps = read_parse_prst_dat(comm_args.prosite_path)
prosite_bad_list = list()
prosite_good_list = list()
for pdc, ps_list in pdc_to_ps.items():
if len(ps_list) == 1:
prosite_bad_list.append(ps_list[0])
elif len(ps_list) > 1:
prosite_good_list.append(ps_list)
if comm_args.prosite_choice == 'bad' or comm_args.prosite_choice == 'both':
bad_filtered_ids = prosite_cleanup(prosite_bad_list, comm_args.prosite_path)
prosite_merge(bad_filtered_ids,
f"{comm_args.prosite_path}/prosite_alignments_handle",
f"{comm_args.output_directory}/BAD")
if comm_args.prosite_choice == 'good' or comm_args.prosite_choice == 'both':
good_filtered_ids = list()
for unfiltered_good in prosite_good_list:
temp_filtered = list()
temp_filtered = prosite_cleanup(unfiltered_good, comm_args.prosite_path)
if len(temp_filtered) > 1:
good_filtered_ids.append(temp_filtered)
for filtered_good in good_filtered_ids:
prosite_merge(filtered_good,
f"{comm_args.prosite_path}/prosite_alignments_handle",
f"{comm_args.output_directory}/GOOD")
elif comm_args.rprot_path and comm_args.indeli_path:
pdc_to_ps = parse_rprot_folder(comm_args)
merger_commands(pdc_to_ps, comm_args.output_directory)
elif comm_args.indeli_path:
pdc_to_ps = parse_indeli_folder(comm_args)
merger_commands(pdc_to_ps, comm_args.output_directory)
elif comm_args.ecod_path:
parse_ecod_folder(comm_args)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
37811,
34,
8374,
285,
2001,
83,
6087,
9729,
422,
8619,
351,
10548,
902,
526,
15931,
198,
11748,
28686,
11,
302,
11,
25064,
11,
1822,
29572,
11,
340,
861,
10141,
11,
15095,
198,
1... | 2.428323 | 2,881 |
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Trim the image and creates a PDF with the same size.
import argparse
import Image
from reportlab.pdfgen.canvas import Canvas
from PIL import Image, ImageChops, ImageFilter
PROGNAME = 'TrimImagetoPDF'
VERSION = '0.20171225'
def export_pdf(imgname, autotm, default_dpi, outfile, tformat, tleft, ttop, twidth, theight):
"""Trim the image and creates a PDF with the same size."""
if outfile == '':
outfile = '%s.pdf' % (imgname)
outtrim = '%s-trim.' % (outfile)
outtrim = outtrim + tformat
pdf = Canvas(outfile, pageCompression=1)
dpi = default_dpi
im = Image.open(imgname)
w, h = im.size
width = round(w * 72.0 / dpi, 3)
height = round(h * 72.0 / dpi, 3)
pdf.setPageSize((width, height))
if ((tleft < 0) or (ttop < 0) or (twidth < 0) or (theight < 0)):
if autotm:
trimbox = autocrop(im, 255)
else:
if im.mode == 'RGB':
trimbox = trim(im, (255, 255, 255))
else:
trimbox = trim(im, 255)
else:
trimbox = (tleft, ttop, (tleft + twidth), (ttop + theight))
if trimbox:
print trimbox
x1, y1, x2, y2 = trimbox
wt = round((x2 - x1) * 72.0 / dpi, 3)
ht = round((y2 - y1) * 72.0 / dpi, 3)
x = round(x1 * 72.0 / dpi, 3)
y = height - round(y2 * 72.0 / dpi, 3)
trimim = im.crop(trimbox)
trimim.save(outtrim)
pdf.drawImage(outtrim, x, y, width=wt, height=ht)
else:
# found no content
raise ValueError('cannot trim; image was empty')
pdf.showPage()
pdf.save()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Trim the image and creates a PDF with the same size')
parser.add_argument(
'-a',
'--auto',
action='store_true',
default=False,
help='autotrim, bw mode, default False')
parser.add_argument(
'-d',
'--dpi',
metavar='dpi',
type=int,
default=300,
help='dpi for pdf output, default is 300')
parser.add_argument(
'-o',
'--outfile',
metavar='outfile',
type=str,
default='',
help='output pdf name, default imgname.pdf')
parser.add_argument(
'-f',
'--format',
metavar='format',
type=str,
default='png',
help='format trim image, default png')
parser.add_argument(
'-l',
'--left',
metavar='left',
type=int,
default=-1,
help='use left, complit -ltwz, defaul -1')
parser.add_argument(
'-t',
'--top',
metavar='top',
type=int,
default=-1,
help='use top, complit -ltwz, defaul -1')
parser.add_argument(
'-w',
'--width',
metavar='width',
type=int,
default=-1,
help='use width, complit -ltwz, defaul -1')
parser.add_argument(
'-z',
'--height',
metavar='height',
type=int,
default=-1,
help='use height, complit -ltwz, defaul -1')
parser.add_argument(
'-v',
'--version',
action='version',
version=print_version())
parser.add_argument('imgname', help='image file name')
args = parser.parse_args()
export_pdf(args.imgname, args.auto, args.dpi, args.outfile, args.format, args.left, args.top, args.width, args.height)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2... | 2.156989 | 1,860 |
pessoa = dict()
numero_pessoas = abaixo_peso = normal = sobrepeso = obesidade = obesidade_grave = 0
while True:
pessoa['nome'] = str(input('Digite o nome:')).strip().upper()
numero_pessoas += 1
pessoa['peso'] = float(input('Digite o peso:'))
pessoa['altura'] = float(input('Digite a altura'))
pessoa['imc'] = pessoa['peso']/(pessoa['altura'] * 2)
if pessoa['imc'] < 18.5:
pessoa['situacao'] = 'Abaixo do peso'
abaixo_peso += 1
elif 18.5 <= pessoa['imc'] <= 24.9:
pessoa['situacao'] = 'Normal'
normal += 1
elif 25 <= pessoa['imc'] <= 29.9:
pessoa['situacao'] = 'Sobrepeso'
sobrepeso += 1
elif 30.0 <= pessoa['imc'] <= 39.9:
pessoa['situacao'] = 'Obesidade'
obesidade += 1
elif pessoa['imc'] >= 40:
pessoa['situacao'] = 'Obesidade grave'
obesidade_grave += 1
for c, v in pessoa.items():
print(f'{c} => {v}')
resp = str(input('Continuar sim ou não [S/N]')).strip().upper()[0]
if resp == 'N':
break
print(f'Foram avaliadas {numero_pessoas} pessoas\n {abaixo_peso} estão abaixo do peso\n {normal} estão normais')
print(f'{sobrepeso} estão com sobrepeso\n {obesidade} estão obesas\n {obesidade_grave} estão com obesidade grave') | [
79,
408,
12162,
796,
8633,
3419,
198,
22510,
3529,
62,
79,
408,
78,
292,
796,
450,
64,
844,
78,
62,
12272,
78,
796,
3487,
796,
523,
4679,
12272,
78,
796,
909,
274,
312,
671,
796,
909,
274,
312,
671,
62,
41711,
796,
657,
198,
451... | 1.975117 | 643 |
import httpx
import json
import asyncio
| [
11748,
2638,
87,
198,
11748,
33918,
198,
11748,
30351,
952,
198
] | 3.636364 | 11 |
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.11.4
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# <img align="right" src="images/tf.png" width="128"/>
# <img align="right" src="images/ninologo.png" width="128"/>
# <img align="right" src="images/dans.png" width="128"/>
#
# ---
#
# To get started: consult [start](start.ipynb)
#
# ---
#
# # Search Introduction
#
# *Search* in Text-Fabric is a template based way of looking for structural patterns in your dataset.
#
# It is inspired by the idea of
# [topographic query](http://books.google.nl/books?id=9ggOBRz1dO4C).
#
# Within Text-Fabric we have the unique possibility to combine the ease of formulating search templates for
# complicated patterns with the power of programmatically processing the results.
#
# This notebook will show you how to get up and running.
#
# ## Alternative for hand-coding
#
# Search is a powerful feature for a wide range of purposes.
#
# Quite a bit of the implementation work has been dedicated to optimize performance.
# Yet I do not pretend to have found optimal strategies for all
# possible search templates.
# Some search tasks may turn out to be somewhat costly or even very costly.
#
# That being said, I think search might turn out helpful in many cases,
# especially by reducing the amount of hand-coding needed to work with special subsets of your data.
#
# ## Easy command
#
# Search is as simple as saying (just an example)
#
# ```python
# results = A.search(template)
# A.show(results)
# ```
#
# See all ins and outs in the
# [search template docs](https://annotation.github.io/text-fabric/tf/about/searchusage.html).
# %load_ext autoreload
# %autoreload 2
from tf.app import use
A = use("oldassyrian:clone", checkout="clone", hoist=globals())
# A = use('oldassyrian', hoist=globals())
# # Basic search command
#
# We start with the most simple form of issuing a query.
# Let's look for the numerals with a repeat greater than 3.
# We also want to show the words in which they occur.
#
# All work involved in searching takes place under the hood.
query = """
word
sign type=numeral repeat>3
"""
results = A.search(query)
A.table(results, end=10)
# We can show them in unicode representation as well:
A.table(results, end=10, fmt="text-orig-unicode")
# The hyperlinks take us all to the CDLI archival page of the document (tablet) in question.
#
# Note that we can choose start and/or end points in the results list.
A.table(results, start=500, end=503, fmt="text-orig-rich")
# We can show the results more fully with `show()`.
#
# That gives us pretty displays of tablet lines with the results highlighted.
A.show(results, end=3)
# # Condense results
#
# There are two fundamentally different ways of presenting the results: condensed and uncondensed.
#
# In **uncondensed** view, all results are listed individually.
# You can keep track of which parts belong to which results.
# The display can become unwieldy.
#
# This is the default view, because it is the straightest, most logical, answer to your query.
#
# In **condensed** view all nodes of all results are grouped in containers first (e.g. verses), and then presented
# container by container.
# You loose the information of what parts belong to what result.
#
# As an example of is the difference, we look for all numerals.
query = """
% we choose a tablet with several numerals
document pnumber=P357880
sign type=numeral
"""
# Note that you can have comments in a search template. Comment lines start with a `%`.
results = A.search(query)
A.table(results, end=10)
# Let's expand the results display:
A.show(results, end=2, skipCols="1")
# As you see, the results are listed per result tuple, even if they occur all in the same verse.
# This way you can keep track of what exactly belongs to each result.
#
# Now in condensed mode:
A.show(results, condensed=True, withNodes=True)
# The last line has two results, and both results are highlighted in the same line display.
#
# We can modify the container in which we see our results.
#
# By default, it is `line`, but we can make it `face` as well:
A.show(results, end=2, condensed=True, condenseType="face")
# We now see the the displays of two faces, one with two numerals in it and one with three.
# # Custom highlighting
#
# Let us make a new search where we look for two different things in the same line.
#
# We can apply different highlight colors to different parts of the result.
# The signs in the pair are member 0 and 1 of the result tuples.
# The members that we do not map, will not be highlighted.
# The members that we map to the empty string will be highlighted with the default color.
#
# **NB:** Choose your colors from the
# [CSS specification](https://developer.mozilla.org/en-US/docs/Web/CSS/color_value).
query = """
line
sign missing=1
sign question=1
sign damage=1
"""
results = A.search(query)
A.table(results, end=10, baseTypes="sign")
A.table(
results,
end=10,
baseTypes="sign",
colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"},
)
A.show(results, end=5, colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"})
# Color mapping works best for uncondensed results. If you condense results, some nodes may occupy
# different positions in different results. It is unpredictable which color will be used
# for such nodes:
A.show(
results,
condensed=True,
end=5,
colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"},
)
# You can specify to what container you want to condense. By default, everything is condensed to lines.
#
# Let's change that to faces.
# Note that the `end` parameter counts the number of faces now.
A.show(
results,
end=2,
condensed=True,
condenseType="face",
colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"},
)
# # Constraining order
# You can stipulate an order on the things in your template.
# You only have to put a relational operator between them.
# Say we want only results where the damage follows the missing.
query = """
line
sign question=1
sign missing=1
< sign damage=1
"""
results = A.search(query)
A.table(results, end=10, baseTypes="sign")
# We can also require the things to be adjacent.
query = """
line
sign question=1
sign missing=1
<: sign damage=1
"""
results = A.search(query)
A.table(results, end=10, baseTypes="sign")
A.show(
results,
end=10,
baseTypes="sign",
colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"},
)
# Finally, we make the three things fully adjacent in fixed order:
query = """
line
sign question=1
<: sign missing=1
<: sign damage=1
"""
results = A.search(query)
A.table(results, end=10, baseTypes="sign")
A.show(results, end=5, colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"})
# # Custom feature display
#
# We would like to see the original atf and the flags for signs.
# The way to do that, is to perform a `A.prettySetup(features)` first.
#
# We concentrate on one specific result.
A.displaySetup(extraFeatures="atf flags")
A.show(
results,
start=4,
end=4,
baseTypes="sign",
colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"},
)
# The features without meaningful values have been left out. We can also change that by passing a set of values
# we think are not meaningful. The default set is
#
# ```python
# {None, 'NA', 'none', 'unknown'}
# ```
A.displaySetup(noneValues=set())
A.show(
results,
start=4,
end=4,
baseTypes="sign",
colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"},
)
# This makes clear that it is convenient to keep `None` in the `noneValues`:
A.displaySetup(noneValues={None})
A.show(
results,
start=4,
end=4,
baseTypes="sign",
colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"},
)
# We can even choose to suppress other values, e.g. the value 1.
#
# That will remove all the features such as `question`, `missing`.
A.displaySetup(noneValues={None, "NA", "unknown", 1})
A.show(
results,
start=4,
end=4,
baseTypes="sign",
colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"},
)
# In the rest of the notebook we stick to our normal setup, so we reset the extra features.
A.displayReset()
A.show(
results,
start=4,
end=4,
baseTypes="sign",
colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"},
)
# # Features from queries
#
# In earlier displays we saw the *types* of signs, because the query mentioned it.
#
# Suppose we want to display the type also here, then we can modify the query by mentioning the feature `type`.
#
# But we do not want to impose extra limitations, so we say `type*`, meaning: no conditions on type whatsoever.
query = """
line
sign question=1 type*
<: sign missing=1
<: sign damage=1
"""
results = A.search(query)
A.show(
results, start=4, end=4, colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"}
)
# We do not see the features, because they are `sign` features, and our display stops at the `word` level.
# But we can improve on that:
A.show(
results,
start=4,
end=4,
baseTypes="sign",
colorMap={0: "", 2: "cyan", 3: "magenta", 4: "lightsalmon"},
)
# # Show your own tuples
#
# So far we have `show()`n the results of searches.
# But you can also construct your own tuples and show them.
#
# Whereas you can use search to get a pretty good approximation of what you want, most of the times
# you do not arrive precisely at your destination.
#
# Here is an example where we use search to come close, and then work our way to produce the end result.
#
# ## More missing than damaged
#
# We look for lines that have more missing signs than damaged signs.
#
# In our search templates we cannot formulate that a feature has different values on two nodes in the template.
# We could spell out all possible combinations of values and make a search template for each of them,
# but that is needlessly complex.
#
# Let's first use search to find all clauses containing missing and damaged signs.
query = """
line
sign missing
sign damage
"""
results = A.search(query)
# Now the hand coding begins. We are going to extract the tuples we want.
lines = {}
for (l, m, d) in results:
lines.setdefault(l, (set(), set()))
lines[l][0].add(m)
lines[l][1].add(d)
print(f"{len(lines)} lines")
# Now we have all lines with both missing and damaged signs, without duplicates.
#
# For each line we have a set with its missing signs and one with its damaged signs.
#
# We filter in order to retain the lines with more missing than damaged signs.
# We put all missing signs in one big set and all damaged signs in one big set.
# +
answer = []
missing = set()
damage = set()
for (l, (m, d)) in lines.items():
if len(m) > len(d):
answer.append((l, *m, *d))
missing |= m
damage |= d
len(answer)
# -
answer[0]
# We are going to make a dictionary of highligts: one color for the missing signs and one for the damaged.
highlights = {}
colorM = "lightsalmon"
colorD = "mediumaquamarine"
for s in missing:
highlights[s] = colorM
for s in damage:
highlights[s] = colorD
# And now we can show them:
A.table(answer, start=1, end=10, highlights=highlights)
# As you see, you have total control.
# ---
#
# All chapters:
#
# * **[start](start.ipynb)** become an expert in creating pretty displays of your text structures
# * **[display](display.ipynb)** become an expert in creating pretty displays of your text structures
# * **search** turbo charge your hand-coding with search templates
# * **[exportExcel](exportExcel.ipynb)** make tailor-made spreadsheets out of your results
# * **[share](share.ipynb)** draw in other people's data and let them use yours
# * **[similarLines](similarLines.ipynb)** spot the similarities between lines
#
# ---
#
# See the [cookbook](cookbook) for recipes for small, concrete tasks.
#
# CC-BY Dirk Roorda
| [
2,
11420,
198,
2,
474,
929,
88,
353,
25,
198,
2,
220,
220,
474,
929,
88,
5239,
25,
198,
2,
220,
220,
220,
220,
2420,
62,
15603,
341,
25,
198,
2,
220,
220,
220,
220,
220,
220,
7552,
25,
764,
9078,
198,
2,
220,
220,
220,
220,
... | 3.078612 | 3,918 |