content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
# -*- coding: utf-8 -*-
import math
from ncc import LOGGER
from . import NccLRScheduler, register_lr_scheduler
@register_lr_scheduler('cosine')
class CosineSchedule(NccLRScheduler):
"""Assign LR based on a cyclical schedule that follows the cosine function.
See https://arxiv.org/pdf/1608.03983.pdf for details.
We also support a warmup phase where we linearly increase the learning rate
from some initial learning rate (``--warmup-init-lr``) until the configured
max learning rate (``--max-lr``).
During warmup::
lrs = torch.linspace(args.warmup_init_lr, args.lr, args.warmup_updates)
lr = lrs[update_num]
After warmup::
lr = lr_min + 0.5*(lr_max - lr_min)*(1 + cos(t_curr / t_i))
where ``t_curr`` is current percentage of updates within the current period
range and ``t_i`` is the current period range, which is scaled by ``t_mul``
after every iteration.
"""
def step(self, epoch, val_loss=None):
"""Update the learning rate at the end of the given epoch."""
super().step(epoch, val_loss)
# we don't change the learning rate at epoch boundaries
return self.optimizer.get_lr()
def step_update(self, num_updates):
"""Update the learning rate after each update."""
if num_updates < self.warmup_updates:
self.lr = self.warmup_init_lr + num_updates * self.lr_step
else:
curr_updates = num_updates - self.warmup_updates
if self.t_mult != 1:
i = math.floor(math.log(1 - curr_updates / self.period * (1 - self.t_mult), self.t_mult))
t_i = self.t_mult ** i * self.period
t_curr = curr_updates - (1 - self.t_mult ** i) / (1 - self.t_mult) * self.period
else:
i = math.floor(curr_updates / self.period)
t_i = self.period
t_curr = curr_updates - (self.period * i)
lr_shrink = self.lr_shrink ** i
min_lr = self.min_lr * lr_shrink
max_lr = self.max_lr * lr_shrink
self.lr = min_lr + 0.5 * (max_lr - min_lr) * (1 + math.cos(math.pi * t_curr / t_i))
self.optimizer.set_lr(self.lr)
return self.lr
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
10688,
198,
198,
6738,
299,
535,
1330,
41605,
30373,
198,
6738,
764,
1330,
399,
535,
43,
6998,
1740,
18173,
11,
7881,
62,
14050,
62,
1416,
704,
18173,
628,
... | 2.236473 | 998 |
from __future__ import absolute_import
import itertools
from collections import defaultdict
from django.conf import settings
from sentry.api.serializers import Serializer, register, serialize
from sentry.auth.utils import is_active_superuser
from sentry.models import (
OrganizationAccessRequest, OrganizationMemberTeam, Project, ProjectStatus,
Team
)
@register(Team)
| [
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
198,
198,
11748,
340,
861,
10141,
198,
198,
6738,
17268,
1330,
4277,
11600,
198,
6738,
42625,
14208,
13,
10414,
1330,
6460,
198,
198,
6738,
1908,
563,
13,
15042,
13,
46911,
11341,
1330,
23... | 3.745098 | 102 |
from PySide6.QtWidgets import *
from PySide6.QtCore import *
from PySide6.QtGui import *
import os
| [
6738,
9485,
24819,
21,
13,
48,
83,
54,
312,
11407,
1330,
1635,
198,
6738,
9485,
24819,
21,
13,
48,
83,
14055,
1330,
1635,
198,
6738,
9485,
24819,
21,
13,
48,
83,
8205,
72,
1330,
1635,
198,
11748,
28686,
198
] | 2.538462 | 39 |
from cadl import librispeech
from cadl.utils import exists
from numpy.testing import run_module_suite
if __name__ == "__main__":
run_module_suite()
| [
6738,
20603,
75,
1330,
9195,
2442,
431,
3055,
198,
6738,
20603,
75,
13,
26791,
1330,
7160,
198,
6738,
299,
32152,
13,
33407,
1330,
1057,
62,
21412,
62,
2385,
578,
628,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,... | 2.90566 | 53 |
from avmess.server_utils import ROOMS
from avmess.models.room_model import RoomModel
| [
6738,
1196,
37348,
13,
15388,
62,
26791,
1330,
15107,
2662,
50,
198,
6738,
1196,
37348,
13,
27530,
13,
3823,
62,
19849,
1330,
10096,
17633,
198
] | 3.4 | 25 |
from django.template import Context, loader, RequestContext
from django.http import HttpResponse, HttpResponseRedirect
from aquaticore.fish.models import *
from django.shortcuts import render_to_response, get_object_or_404
from datetime import datetime
from math import *
from flickrapi import FlickrAPI
from django.core.cache import cache
from django.forms import ModelForm
import datetime
from django.core.exceptions import ObjectDoesNotExist
# Order
# Family
# Genus | [
6738,
42625,
14208,
13,
28243,
1330,
30532,
11,
40213,
11,
19390,
21947,
198,
6738,
42625,
14208,
13,
4023,
1330,
367,
29281,
31077,
11,
367,
29281,
31077,
7738,
1060,
198,
6738,
37115,
382,
13,
11084,
13,
27530,
1330,
1635,
198,
6738,
... | 3.598485 | 132 |
"""
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file LICENSE, distributed with this software.
Created on July 6, 2017
@author: jrm
"""
from atom.api import Typed, set_default
from enamlnative.widgets.image_view import ProxyImageView
from .android_view import AndroidView, View
from .bridge import (
JavaBridgeObject, JavaMethod, JavaCallback, JavaStaticMethod
)
class AndroidImageView(AndroidView, ProxyImageView):
""" An Android implementation of an Enaml ProxyImageView.
"""
#: A reference to the widget created by the proxy.
widget = Typed(ImageView)
#: A Glide request manager for loading images from urls or files
manager = Typed(RequestManager)
# -------------------------------------------------------------------------
# Initialization API
# -------------------------------------------------------------------------
def create_widget(self):
""" Create the underlying widget.
"""
self.widget = ImageView(self.get_context())
# -------------------------------------------------------------------------
# OnDrawableLoaded API
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# ProxyImageView API
# -------------------------------------------------------------------------
| [
37811,
198,
15269,
357,
66,
8,
2177,
11,
449,
958,
385,
5780,
13,
198,
198,
20344,
6169,
739,
262,
2846,
286,
262,
17168,
13789,
13,
198,
198,
464,
1336,
5964,
318,
287,
262,
2393,
38559,
24290,
11,
9387,
351,
428,
3788,
13,
198,
... | 4.165714 | 350 |
import unittest
sys.path.insert(0, str(Path(__file__).parent.parent))
from lib.io import read_json
if __name__ == '__main__':
unittest.main()
| [
11748,
555,
715,
395,
198,
17597,
13,
6978,
13,
28463,
7,
15,
11,
965,
7,
15235,
7,
834,
7753,
834,
737,
8000,
13,
8000,
4008,
198,
198,
6738,
9195,
13,
952,
1330,
1100,
62,
17752,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
... | 2.551724 | 58 |
#!python
# coding=utf-8
import pytest
import responses
from erddap_metrics.lib.erddap_metrics import ErddapMetrics
@pytest.fixture
@pytest.fixture
| [
2,
0,
29412,
198,
2,
19617,
28,
40477,
12,
23,
198,
198,
11748,
12972,
9288,
198,
11748,
9109,
198,
198,
6738,
1931,
1860,
499,
62,
4164,
10466,
13,
8019,
13,
263,
1860,
499,
62,
4164,
10466,
1330,
5256,
1860,
499,
9171,
10466,
628,... | 2.59322 | 59 |
import json
import logging
import sys
from click.testing import CliRunner
import rasterio
from rasterio.rio import info
from rasterio.rio.main import main_group
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
| [
11748,
33918,
198,
11748,
18931,
198,
11748,
25064,
198,
198,
6738,
3904,
13,
33407,
1330,
1012,
72,
49493,
198,
198,
11748,
374,
1603,
952,
198,
6738,
374,
1603,
952,
13,
27250,
1330,
7508,
198,
6738,
374,
1603,
952,
13,
27250,
13,
1... | 2.934066 | 91 |
from scrapy.crawler import CrawlerProcess
from scrapy.settings import Settings
from smartlab import settings
from smartlab.spiders.smartlab_dividend import SmartlabDividendSpider
if __name__ == '__main__':
crawl_settings = Settings()
crawl_settings.setmodule(settings)
crawl_proc = CrawlerProcess(settings=crawl_settings)
crawl_proc.crawl(SmartlabDividendSpider)
crawl_proc.start()
| [
198,
6738,
15881,
88,
13,
66,
39464,
1330,
20177,
1754,
18709,
198,
6738,
15881,
88,
13,
33692,
1330,
16163,
198,
198,
6738,
4451,
23912,
1330,
6460,
198,
6738,
4451,
23912,
13,
2777,
4157,
13,
27004,
23912,
62,
67,
1699,
437,
1330,
1... | 3.115385 | 130 |
# -*- coding: utf-8 -*-
from .IpetEvaluationEditorApp import IpetEvaluationEditorApp
from .IpetPbHistoryWindow import IpetPbHistoryWindow
from .QIPETApplication import QIPETApplication
__all__ = ["ExperimentManagement",
]
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
764,
40,
6449,
36,
2100,
2288,
17171,
4677,
1330,
314,
6449,
36,
2100,
2288,
17171,
4677,
198,
6738,
764,
40,
6449,
47,
65,
18122,
27703,
1330,
314,
6449,
47,
65... | 3.083333 | 72 |
import subprocess, sqlite3
#valgrind -q --tool=massif --massif-out-file=mem_profile.massif --depth=1 ./mem_profilee 2 8 S H
##ms_print massif.out.`cat mem_profile_pid.txt`
##ms_print mem_profile.massif
#grep mem_heap_B mem_profile.massif |cut -c 12- |sort -n | tail -n 1
con = sqlite3.connect("mem_profile.sqlite")
con.cursor().execute("create table if not exists A (METHOD TEXT, BASIS TEXT, D INT, M INT, BYTES INT, PERFORMED TEXT, VERSION TEXT)")
con.commit()
version=1
no_commit=0
if no_commit:
print("warning: not saving results")
#test(0,0,True,1,no_commit)
for use_lyndon in [True]:#,False]:
for use_compiled in [True,False]:
for d in [3]:
for m in range(10,11):
test(d,m, use_compiled, use_lyndon, no_commit)
| [
11748,
850,
14681,
11,
44161,
578,
18,
198,
198,
2,
2100,
2164,
521,
532,
80,
1377,
25981,
28,
22208,
361,
1377,
22208,
361,
12,
448,
12,
7753,
28,
11883,
62,
13317,
13,
22208,
361,
1377,
18053,
28,
16,
24457,
11883,
62,
13317,
68,
... | 2.3125 | 336 |
import datetime
from datetime import timedelta
import re
from pprint import pprint
from tulip_api import TulipApi
| [
11748,
4818,
8079,
198,
6738,
4818,
8079,
1330,
28805,
12514,
198,
11748,
302,
198,
6738,
279,
4798,
1330,
279,
4798,
198,
6738,
48373,
541,
62,
15042,
1330,
30941,
541,
32,
14415,
628
] | 3.59375 | 32 |
from data import load_data_gse
import pandas as pd
def load_data_gse68465(verbose=-1, read_as_ndarray=False):
"""
This method loads the data set of the project GSE68465 available at
https://www.ncbi.nlm.nih.gov/geo/query/acc.cgi?acc=GSE68465. This project
reports a large, training/testing, multi-site, blinded validation study to
characterize the performance of several prognostic models based on
gene expression for 442 lung adenocarcinomas.
:param verbose: (int) print logging messages if greater than 0 (default: -1)
:param read_as_ndarray: (bool) reads data as pandas data frame if false and
as numpy ndarray if True (default: False)
:return:
- clinical (pd.DataFrame): contains a set of clinical markers associated to lung patients,
- genes (pd.DataFrame): contains gene expression levels associated to lung patients,
- outcome (pd.DataFrame): contains one variable grouping patients in high (0) and low (1) risk
"""
clinical, genes, outcome = load_data_gse('GSE68465', processing_gse68465, verbose, read_as_ndarray)
return clinical, genes, outcome
| [
6738,
1366,
1330,
3440,
62,
7890,
62,
70,
325,
198,
198,
11748,
19798,
292,
355,
279,
67,
628,
198,
198,
4299,
3440,
62,
7890,
62,
70,
325,
41580,
2996,
7,
19011,
577,
10779,
16,
11,
1100,
62,
292,
62,
358,
18747,
28,
25101,
2599,... | 3.084011 | 369 |
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# ZTE.ZXDSL98xx.get_chassis_id
# ---------------------------------------------------------------------
# Copyright (C) 2007-2018 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
"""
"""
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetchassisid import IGetChassisID
import re
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
16529,
30934,
198,
2,
1168,
9328,
13,
40692,
5258,
43,
4089,
5324,
13,
1136,
62,
354,
20297,
62,
312,
198,
2,
16529,
30934,
198,
2,
15069,
357,
34,
8,
4343,
12,
... | 4.466019 | 103 |
# Copyright (c) 2017-2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
from unittest import TestCase
from dazl.client.commands import CommandBuilder, CommandDefaults, CommandPayload, create
from dazl.damlast.lookup import parse_type_con_name
from dazl.ledger import CreateCommand, ExerciseCommand
from dazl.prim import ContractId, Party
import pytest
SOME_TEMPLATE_NAME = parse_type_con_name("Sample:Untyped")
SOME_PARTY = Party("SomeParty")
SOME_CONTRACT_ID = ContractId(SOME_TEMPLATE_NAME, "#0:0")
DEFAULTS = CommandDefaults(
default_party=SOME_PARTY,
default_ledger_id="some_ledger",
default_workflow_id="some_workflow",
default_application_id="some_app",
default_command_id="some_commands",
)
class TestCommandBuilderTest(TestCase):
"""
Tests for the various ways that helper objects are converted to :class:`CommandPayload`.
"""
| [
2,
15069,
357,
66,
8,
2177,
12,
1238,
2481,
10231,
31433,
357,
10462,
13947,
8,
402,
2022,
39,
290,
14,
273,
663,
29116,
13,
1439,
2489,
10395,
13,
198,
2,
30628,
55,
12,
34156,
12,
33234,
7483,
25,
24843,
12,
17,
13,
15,
628,
1... | 2.925466 | 322 |
import logging
import numpy as np
import scipy.stats
| [
11748,
18931,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
629,
541,
88,
13,
34242,
628,
198
] | 3.235294 | 17 |
import os
import sys
import json
import re
from werkzeug.utils import secure_filename
from json_doc_reader import JSONDocReader
sys.path.insert(0, '../search/web_app')
from corpus_settings import CorpusSettings
from response_processors import SentenceViewer
class JSON2HTML:
"""
Contains methods for translating annotated JSON files into
HTML files, provided that the corpus settings allow full-text view.
"""
SETTINGS_DIR = '../conf'
def finalize_html_sentence(self, sent):
"""
Add span tags etc. to a sentence in HTML and clean it.
"""
# sent = sent.replace('<span class="newline"></span>', '<br>')
sent = re.sub('^[\n ]*<br> *', '', sent, flags=re.DOTALL)
sent = re.sub('\n\n+', '\n', sent, flags=re.DOTALL)
sent = re.sub(' +', ' ', sent)
return sent
def finalize_html_paragraph(self, sentByTier, colClass, paraNum):
"""
Make one HTML paragraph with parallel sentences.
"""
remainingCol = max(2, 12 - colClass * len(sentByTier))
paragraph = '<div class="d-none d-sm-block col-md-' + str(remainingCol // 2) + '"></div>'
paragraph += '<div class="paragraph_num">'
if paraNum % 10 == 0:
paragraph += '<div>' + str(paraNum) + '</div>'
paragraph += '</div>\n'
for iTier in range(len(sentByTier)):
sent = sentByTier[iTier]
sent = re.sub('(?<=class="word)(.*)',
lambda m: m.group(1).replace('<span class="newline"></span>', '<br>'),
sent,
flags=re.DOTALL)
sent = '<div class="col-sm-' + str(colClass) \
+ '"><span class="sent_lang sent_lang_lang' + str(iTier) \
+ '" id="res1_lang' + str(iTier) + '">' \
+ sent + '</span></div>\n'
paragraph += sent
return paragraph
def process_file(self, fnameIn, fnameOut):
"""
Read one JSON file (fnameIn). Generate an HTML representation for it
and store it in fnameOut.
"""
htmlByTier = [[]]
nTier = 0
paraIDsByTier = [set()]
for s, bLast in self.iterSent.get_sentences(fnameIn):
if 'lang' in s:
langID = s['lang']
else:
langID = 0
s['lang'] = langID
curParaIDs = []
if 'para_alignment' in s:
for para in s['para_alignment']:
if 'para_id' in para:
curParaIDs.append(para['para_id'])
s['doc_id'] = '0'
s = {
'_source': s
}
self.lastSentNum += 1
lang = self.settings.languages[langID]
sentProcessed = self.sentView.process_sentence(s,
numSent=self.lastSentNum,
lang=lang,
langView='lang' + str(nTier))
if len(sentProcessed['languages']['lang' + str(nTier)]['text']) > 0:
curSentData = {
'html': sentProcessed['languages']['lang' + str(nTier)]['text'] + ' \n',
'para_ids': curParaIDs
}
htmlByTier[nTier].append(curSentData)
paraIDsByTier[nTier] |= set(curSentData['para_ids'])
if bLast or ('last' in s['_source'] and s['_source']['last']):
nTier += 1
htmlByTier.append([])
paraIDsByTier.append(set())
# Remove empty tiers
for iTier in range(len(htmlByTier) - 1, -1, -1):
if (len(htmlByTier[iTier]) <= 0
or all(len(sent['html'].strip()) <= 0
for sent in htmlByTier[iTier])):
del htmlByTier[iTier]
del paraIDsByTier[iTier]
nTiers = len(htmlByTier)
colClass = 8
if nTiers > 1:
colClass = max(2, 10 // nTiers)
curPointers = [0] * nTiers
usedParaIDsByTier = [set() for _ in range(nTiers)]
dataFinal = {
'rows': [],
'meta': self.iterSent.get_metadata(fnameIn)
}
fname = ''
if 'fulltext_id' in dataFinal['meta']:
fname = secure_filename(dataFinal['meta']['fulltext_id'])
if len(fname) <= 0:
return
while curPointers[0] < len(htmlByTier[0]):
curParagraph = [''] * nTiers
curParagraph[0] = self.finalize_html_sentence(htmlByTier[0][curPointers[0]]['html'])
curParaIDs = set(htmlByTier[0][curPointers[0]]['para_ids'])
for iTier in range(1, nTiers):
remainingParaIDs = (paraIDsByTier[iTier] & curParaIDs) - usedParaIDsByTier[iTier]
while len(remainingParaIDs) > 0 and curPointers[iTier] < len(htmlByTier[iTier]):
curParagraph[iTier] += self.finalize_html_sentence(htmlByTier[iTier][curPointers[iTier]]['html'])
usedParaIDsByTier[iTier] |= set(htmlByTier[iTier][curPointers[iTier]]['para_ids'])
remainingParaIDs -= set(htmlByTier[iTier][curPointers[iTier]]['para_ids'])
curPointers[iTier] += 1
dataFinal['rows'].append(self.finalize_html_paragraph(curParagraph, colClass, curPointers[0] + 1))
curPointers[0] += 1
if not os.path.exists(os.path.dirname(fnameOut)):
os.makedirs(os.path.dirname(fnameOut))
with open(fnameOut, 'w', encoding='utf-8') as fOut:
json.dump(dataFinal, fOut, indent=1, ensure_ascii=False)
if __name__ == '__main__':
j2h = JSON2HTML()
j2h.process_file('../corpus/beserman_multimedia/json_disamb/2014/LV_AS-2014.08.09-TA_MU-cow_1.json',
'../search/corpus_html/beserman_multimedia/1.json')
j2h.process_file('../corpus/beserman_multimedia/json_disamb/2018/AL_RA-2018.05.01-MU_NF-quest_repeat.json',
'../search/corpus_html/beserman_multimedia/2.json')
| [
11748,
28686,
198,
11748,
25064,
198,
11748,
33918,
198,
11748,
302,
198,
6738,
266,
9587,
2736,
1018,
13,
26791,
1330,
5713,
62,
34345,
198,
6738,
33918,
62,
15390,
62,
46862,
1330,
19449,
23579,
33634,
198,
198,
17597,
13,
6978,
13,
2... | 1.900308 | 3,250 |
import argparse
import prometheus_client
from time import sleep
from . import NCS2Exporter, NCS2DeviceExporter, UsageFormatter
if __name__ == '__main__':
main()
| [
11748,
1822,
29572,
198,
11748,
1552,
36916,
62,
16366,
198,
6738,
640,
1330,
3993,
198,
6738,
764,
1330,
399,
7902,
17,
3109,
26634,
11,
399,
7902,
17,
24728,
3109,
26634,
11,
29566,
8479,
1436,
628,
198,
198,
361,
11593,
3672,
834,
... | 3.111111 | 54 |
import os
import errno
from tqdm import tqdm
import torch
import json
import numpy as np
from PIL import Image
import torch.nn as nn
import torchvision.transforms as transforms
import torch.optim as optim
import torch.nn.functional as F
import sys
d2lzh = sys.modules[__name__]
# Defined in file: ./chapter_preface/preface.md
from matplotlib import pyplot as plt
import d2lzh as d2l
import json
import time
from collections import namedtuple
import cv2
from IPython import display
##################################### Display Functions #################################################
# Defined in file: ./chapter_crashcourse/probability.md
def use_svg_display():
"""Use the svg format to display plot in jupyter."""
display.set_matplotlib_formats('svg')
# Defined in file: ./chapter_crashcourse/probability.md
def set_figsize(figsize=(3.5, 2.5)):
"""Change the default figure size"""
use_svg_display()
plt.rcParams['figure.figsize'] = figsize
# Defined in file: ./chapter_crashcourse/naive-bayes.md
def show_images(imgs, num_rows, num_cols, titles=None, scale=1.5):
"""Plot a list of images."""
figsize = (num_cols * scale, num_rows * scale)
_, axes = d2l.plt.subplots(num_rows, num_cols, figsize=figsize)
axes = axes.flatten()
for i, (ax, img) in enumerate(zip(axes, imgs)):
ax.imshow(img)
ax.axes.get_xaxis().set_visible(False)
ax.axes.get_yaxis().set_visible(False)
if titles:
ax.set_title(titles[i])
return axes
################################ Functions for making an inference on an image using trained model ###########################
# PredBoundingBox = namedtuple("PredBoundingBox", ["probability", "class_id",
# "classname", "bounding_box"
# ])
PredBoundingBox = namedtuple("PredBoundingBox", ["probability", "class_id",
"bounding_box"
])
def invert_transformation(bb_hat, anchors):
"""
Invert the transform from "loc_transformation".
"""
return torch.stack([anchors[:, 0] + bb_hat[:, 0] * anchors[:, 2],
anchors[:, 1] + bb_hat[:, 1] * anchors[:, 3],
anchors[:, 2] * torch.exp(bb_hat[:, 2]),
anchors[:, 3] * torch.exp(bb_hat[:, 3])
], dim=1)
########################################## Functions for downloading and preprocessing data ##############################################
def makedir_exist_ok(dirpath):
"""
Python2 support for os.makedirs(.., exist_ok=True)
"""
try:
os.makedirs(dirpath)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
def download_url(url, root, filename=None, md5=None):
"""Download a file from a url and place it in root.
Args:
url (str): URL to download file from
root (str): Directory to place downloaded file in
filename (str, optional): Name to save the file under. If None, use the basename of the URL
md5 (str, optional): MD5 checksum of the download. If None, do not check
"""
from six.moves import urllib
root = os.path.expanduser(root)
if not filename:
filename = os.path.basename(url)
fpath = os.path.join(root, filename)
makedir_exist_ok(root)
# downloads file
if check_integrity(fpath, md5):
print('Using downloaded and verified file: ' + fpath)
else:
try:
print('Downloading ' + url + ' to ' + fpath)
urllib.request.urlretrieve(
url, fpath,
reporthook=gen_bar_updater()
)
except (urllib.error.URLError, IOError) as e:
if url[:5] == 'https':
url = url.replace('https:', 'http:')
print('Failed download. Trying https -> http instead.'
' Downloading ' + url + ' to ' + fpath)
urllib.request.urlretrieve(
url, fpath,
reporthook=gen_bar_updater()
)
else:
raise e
# The following code is used to download pikachu dataset from mxnet aws server in the form of .rec files
# Then this .rec file is converted into png images and json files for annotation data
# This part requires 'mxnet' library which can be downloaded using conda
# using the command 'conda install mxnet'
# Matplotlib is also required for saving the png image files
# Download Pikachu Dataset
# Create dataloaders in mxnet
# Use mxnet dataloaders to convert .rec file to .png images and annotations.json
################################################## PyTorch Dataloader for PIKACHU dataset ########################################################
# Create Dataloaders in Pytorch
###################################################### Functions for showing graph of loss function ##############################################
def set_axes(axes, xlabel, ylabel, xlim, ylim, xscale, yscale, legend):
"""A utility function to set matplotlib axes"""
axes.set_xlabel(xlabel)
axes.set_ylabel(ylabel)
axes.set_xscale(xscale)
axes.set_yscale(yscale)
axes.set_xlim(xlim)
axes.set_ylim(ylim)
if legend: axes.legend(legend)
axes.grid()
################################## Functions for finding the overlap between anchors and predicted bounding boxes using IoU #######################
def center_2_hw(box: torch.Tensor) -> float:
"""
Converting (cx, cy, w, h) to (x1, y1, x2, y2)
"""
return torch.cat(
[box[:, 0, None] - box[:, 2, None]/2,
box[:, 1, None] - box[:, 3, None]/2,
box[:, 0, None] + box[:, 2, None]/2,
box[:, 1, None] + box[:, 3, None]/2
], dim=1)
#################################### Functions for saving and loading trained models #############################################
############################ Object Detection Related Functions ##############################
import itertools
import math
############################ Functions for Multi-Scale Object Detection Jupyter Notebook ##########################
def bbox_to_rect(bbox, color):
"""Convert bounding box to matplotlib format."""
return d2l.plt.Rectangle(xy=(bbox[0], bbox[1]), width=bbox[2]-bbox[0],
height=bbox[3]-bbox[1], fill=False, edgecolor=color,
linewidth=2)
####################################################################################################################################
| [
11748,
28686,
198,
11748,
11454,
3919,
198,
6738,
256,
80,
36020,
1330,
256,
80,
36020,
198,
11748,
28034,
198,
11748,
33918,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
350,
4146,
1330,
7412,
198,
11748,
28034,
13,
20471,
355,
299,
... | 2.508778 | 2,677 |
"""
test_lexicon: unit tests for Lexicon
Corey Rayburn Yung <coreyrayburnyung@gmail.com>
Copyright 2020-2021, Corey Rayburn Yung
License: Apache-2.0 (https://www.apache.org/licenses/LICENSE-2.0)
"""
import dataclasses
import sourdough
@dataclasses.dataclass
@dataclasses.dataclass
if __name__ == '__main__':
test_lexicon()
| [
37811,
198,
9288,
62,
2588,
4749,
25,
4326,
5254,
329,
17210,
4749,
198,
14055,
88,
7760,
10899,
575,
2150,
1279,
7295,
2417,
323,
10899,
88,
2150,
31,
14816,
13,
785,
29,
198,
15269,
12131,
12,
1238,
2481,
11,
24154,
7760,
10899,
575... | 2.575758 | 132 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 4 13:22:11 2021
@author: mariapalazzi
"""
import random
import numpy as np
from scipy.optimize import root
from required_functions_null_models import avg_degrees_expoRG
#%%
def exponentialRG_proportional_proportional(M):
'''
Solve the system of equations of the maximum log-likelihood problem.
The system of equations is solved using scipy's root function with
the solver defined by 'method'.
The solutions correspond to the Lagrange multipliers. We used the Least-squares
method='lm' for the Levenberg-Marquardt approach.
It can happen that the solver ``method`` used by ``scipy.root``
does not converge to a solution.
Payrato et al (PRX 2019) and F. Saracco et al (Sci Rep 2015)
Parameters
----------
M : array
the binary biadjacency matrix.
Returns
-------
Pij : array, floats
The probability matrix.
Given the biadjacency matrix M, which describes the probabilities of
row-node r and column-node c being linked.
'''
R,C=M.shape #rows and cols
#degrees of "real" matrix
cols_degr = M.sum(axis=0)
rows_degr = M.sum(axis=1)
x0=np.random.uniform(0,1,size=(R+C))
Pij = np.zeros((R,C));
solution_vector = root(avg_degrees_expoRG, x0,args=(rows_degr,cols_degr),method='lm')
print("Solver successful for lagrange multipliers?", solution_vector.success)
print(solution_vector.message)
if solution_vector.success==False:
Pij=Pij*np.nan
print('Solution did not converge, returning P_ij with NAN')
return Pij
x = solution_vector.x[0:R]
y = solution_vector.x[R:R+C]
for p_index in range(len(rows_degr)):
for a_index in range(len(cols_degr)):
Pij[p_index,a_index] = (x[p_index]*y[a_index])/(1+x[p_index]*y[a_index])
return Pij
#%%
def bascompte_probabilistic_proportional(M):
'''
Bascopmte's probabilistic null model. Bacompte PNAS 2003.
Parameters
----------
M : array
the binary biadjacency matrix.
Returns
-------
p_ij : array
The probability matrix.
Given the biadjacency matrix M, which describes the probabilities of
row-node r and column-node c being linked.
'''
rows,cols=M.shape
# dregrees of the rows and cols nodes
cols_degr = M.sum(axis=0)
rows_degr = M.sum(axis=1)
# normalized degrees
rows_degr_norm = rows_degr/cols
cols_degr_norm = cols_degr/rows
# M_null=np.zeros((rows,cols),dtype=int)
M_n=np.zeros((rows,cols))
# M_rand_ij=np.random.uniform(0,1,size=(rows,cols))
#obtaining the matrix of probabilities
for i in range(rows):
for j in range(cols):
p_ij=0.5*(cols_degr_norm[j] + rows_degr_norm[i])
M_n[i,j]=p_ij
#null matrix
#M_null=(M_n>=M_rand_ij).astype(int)
return M_n
#%%
def corrected_probabilistic_proportional(M):
'''
corrected probabilistic null model, a variation of Bascompte's model
Parameters
----------
M : array
the binary biadjacency matrix.
Returns
-------
p_ij : array
The probability matrix.
Given the biadjacency matrix M, which describes the probabilities of
row-node r and column-node c being linked.
'''
rows,cols=M.shape
# dregrees of the rows and cols nodes
cols_degr = M.sum(axis=0)
rows_degr = M.sum(axis=1)
# normalized dregrees
rows_degr_norm = rows_degr/cols
cols_degr_norm = cols_degr/rows
degr_rows_from_cols_sampling = (cols_degr_norm.sum())/cols
degr_cols_from_rows_sampling = (rows_degr_norm.sum())/rows
#M_null=np.zeros((rows,cols),dtype=int)
M_n=np.zeros((rows,cols))
#M_rand_ij=np.random.uniform(0,1,size=(rows,cols))
#obtaining the matrix of probabilities
for i in range(rows):
for j in range(cols):
p_ij=0.5*(cols_degr_norm[j] +
((1 - cols_degr_norm[j])*(rows_degr_norm[i] - degr_rows_from_cols_sampling)) +
rows_degr_norm[i] +
((1 - rows_degr_norm[i])*(cols_degr_norm[j] - degr_cols_from_rows_sampling)) )
if p_ij<0:
p_ij=0
M_n[i,j]=p_ij
#null matrix
# M_null=(M_n>=M_rand_ij).astype(int)
# if symmetric==True:
# np.fill_diagonal(M_null, 0)
# M_=np.triu(M_null,k=1)+(np.triu(M_null,k=1)).T
# else:
# M_=M_null
return M_n
#%%
def equiprobable_equiprobable(matrix):
'''
Function to generate randomization with equiprobable degrees.
Inputs:
----------
input_matrix: array
the binary biadjacency matrix
output:
----------
result: array
randomized version of the original matrix with equiprobable degrees
'''
matrix=np.array(matrix)
R,C=matrix.shape
occs=matrix.sum()
fill=occs/float(R*C)
rm=np.zeros([R,C])
while rm.sum()<occs:
rr,rc=random.randrange(R),random.randrange(C)
if random.random()<=fill:
rm[rr][rc]=1
return rm.astype(int)
#%%
def curve_ball(m,presences,num_iterations=-1):
'''
Function to generate randomization with fixed degrees.
FF null model, Curveball (Strona et al. 2014)
Inputs:
----------
input_matrix: array
the binary biadjacency matrix
num_iterations: int
Number of pair comparisons for each matrix generation, if empty, it takes
the smallets dimension times five.
presences: list of lists (int)
a list of list containing the indices of column (or rows) nodes to
wich each row (or column) node has a link
output:
----------
result: array
randomized version of the original matrix with fixed degrees
'''
r_hp=presences.copy()
num_rows, num_cols = m.shape
num_iters = 5 * min(num_rows, num_cols) if num_iterations == -1 else num_iterations
for rep in range(num_iters):
ab = random.sample(range(len(r_hp)), 2) #randomly select two nodes
a = ab[0]
b = ab[1]
ab = set(r_hp[a]) & set(r_hp[b])# overlap between the two nodes
a_ba = set(r_hp[a]) - ab #other links of node a
if len(a_ba) != 0:
b_aa = set(r_hp[b]) - ab #other links of node b
if len(b_aa) != 0:
ab = list(ab)
a_ba = list(a_ba)
b_aa = list(b_aa)
random.shuffle(a_ba)
random.shuffle(b_aa)
max_swap_extent=min(len(a_ba), len(b_aa)) #max value for pair extractions
swap_extent = random.randint(1,max_swap_extent)
#pair extractions
r_hp[a] = ab+a_ba[:-swap_extent]+b_aa[-swap_extent:]
r_hp[b] = ab+b_aa[:-swap_extent]+a_ba[-swap_extent:]
out_mat = np.zeros([num_rows, num_cols], dtype='int8') if num_cols >= num_rows else np.zeros([num_cols,num_rows], dtype='int8')
for r in range(min(num_rows, num_cols)):
out_mat[r, r_hp[r]] = 1
result = out_mat if num_cols >= num_rows else out_mat.T
return result
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
2892,
2556,
220,
604,
1511,
25,
1828,
25,
1157,
33448,
198,
198,
31,
9800,
25,
1667,
72,
... | 2.118506 | 3,426 |
from Tkinter import *
import os
import TabbedUI
from MyTab import MyTab
import Log
# # # #
if __name__ == '__main__':
main()
| [
6738,
309,
74,
3849,
1330,
1635,
198,
11748,
28686,
198,
198,
11748,
16904,
3077,
10080,
198,
6738,
2011,
33349,
1330,
2011,
33349,
198,
11748,
5972,
628,
198,
198,
2,
1303,
1303,
1303,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
... | 2.647059 | 51 |
"""
Define Database Configuration
"""
class Config:
"""
Common configurations
"""
SQLALCHEMY_DATABASE_URI = 'postgresql://pdfbuilder:FKQMoXB7BQyU1zp59DGu@db:5432/pdfbuilder'
SQLALCHEMY_BINDS = {
'backup': 'postgresql://pdfbuilderbackup:FKQMoXB7BQyU1zp59DGu@backupdb:5432'
'/pdfbuilderbackup '
}
SQLALCHEMY_TRACK_MODIFICATIONS = False
SECRET_KEY = 'p9Bv<3Eid9dQW#$&sdER25wSF2w4fs$i01' # Secret API key
class DevelopmentConfig(Config):
"""
Development configurations
"""
DEBUG = True
SQLALCHEMY_ECHO = True
class ProductionConfig(Config):
"""
Production configurations
"""
DEBUG = False
APP_CONFIG = {
'development': DevelopmentConfig,
'production': ProductionConfig
}
| [
37811,
198,
7469,
500,
24047,
28373,
198,
37811,
198,
4871,
17056,
25,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
8070,
25412,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
16363,
1847,
3398,
3620,
56,
62,
35,
1404,
6242,
11159,
... | 2.327327 | 333 |
import time
from unittest.mock import MagicMock
import optuna
from optuna import Trial
from optuna.distributions import CategoricalDistribution, UniformDistribution, IntUniformDistribution
from optuna.trial import BaseTrial
from controllers.searchspacemodel import SearchSpaceModel
| [
11748,
640,
198,
6738,
555,
715,
395,
13,
76,
735,
1330,
6139,
44,
735,
198,
198,
11748,
2172,
9613,
198,
6738,
2172,
9613,
1330,
21960,
198,
6738,
2172,
9613,
13,
17080,
2455,
507,
1330,
327,
2397,
12409,
20344,
3890,
11,
35712,
2034... | 3.763158 | 76 |
from rest_framework import serializers
from .models import *
| [
6738,
1334,
62,
30604,
1330,
11389,
11341,
198,
6738,
764,
27530,
1330,
1635,
628
] | 4.428571 | 14 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import tables
from distutils.version import LooseVersion
import ska_helpers
__version__ = ska_helpers.get_version(__package__)
def test(*args, **kwargs):
"""
Run py.test unit tests.
"""
import testr
return testr.test(*args, **kwargs)
if LooseVersion(tables.__version__) < LooseVersion('3.0'):
# alias some methods to their pytables3 names
tables.open_file = tables.openFile
tables.table.Table.read_where = tables.table.Table.readWhere
tables.table.Table.get_where_list = tables.table.Table.getWhereList
tables.table.Table.read_coordinates = tables.table.Table.readCoordinates
tables.table.Table.modify_coordinates = tables.table.Table.modifyCoordinates
tables.table.Table.remove_rows = tables.table.Table.removeRows
tables.table.Column.create_csindex = tables.table.Column.createCSIndex
tables.table.Column.create_index = tables.table.Column.createIndex
tables.file.File.get_node = tables.file.File.getNode
tables.file.File.create_table = tables.file.File.createTable
tables.file.File.create_earray = tables.file.File.createEArray
tables.file.File.copy_file = tables.file.File.copyFile
| [
2,
49962,
739,
257,
513,
12,
565,
682,
347,
10305,
3918,
5964,
532,
766,
38559,
24290,
13,
81,
301,
198,
11748,
8893,
198,
6738,
1233,
26791,
13,
9641,
1330,
6706,
577,
14815,
198,
11748,
1341,
64,
62,
16794,
364,
198,
198,
834,
964... | 2.887324 | 426 |
import os
import constants
import errorHandler
| [
11748,
28686,
201,
198,
11748,
38491,
201,
198,
11748,
4049,
25060,
201,
198,
201,
198,
201,
198
] | 3.176471 | 17 |
from django.shortcuts import render
from django.core.paginator import Paginator
from django.db.models import Count
from data.models import Recipe, Ingredient, Tag
from data.search import recipe_search
from time import time
search_ingredients = list(Ingredient.objects.values_list("title", flat=True))
search_ingredients.remove("")
| [
6738,
42625,
14208,
13,
19509,
23779,
1330,
8543,
198,
6738,
42625,
14208,
13,
7295,
13,
79,
363,
20900,
1330,
31525,
20900,
198,
6738,
42625,
14208,
13,
9945,
13,
27530,
1330,
2764,
198,
198,
6738,
1366,
13,
27530,
1330,
26694,
11,
175... | 3.510417 | 96 |
#!/usr/bin/env python2.3
import logging
import optparse
import socket
import sys
import time
import traceback
import ZEO.ClientStorage
from six.moves import map
from six.moves import zip
usage = """Usage: %prog [options] [servers]
Pack one or more storages hosted by ZEO servers.
The positional arguments specify 0 or more tcp servers to pack, where
each is of the form:
host:port[:name]
"""
WAIT = 10 # wait no more than 10 seconds for client to connect
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
17,
13,
18,
198,
198,
11748,
18931,
198,
11748,
2172,
29572,
198,
11748,
17802,
198,
11748,
25064,
198,
11748,
640,
198,
11748,
12854,
1891,
198,
11748,
1168,
4720,
13,
11792,
31425,
198,
673... | 3.18239 | 159 |
import rospy
import subprocess
import threading
import re
from lg_common.msg import WindowGeometry
import awesome
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| [
11748,
686,
2777,
88,
198,
11748,
850,
14681,
198,
11748,
4704,
278,
198,
11748,
302,
198,
198,
6738,
300,
70,
62,
11321,
13,
19662,
1330,
26580,
10082,
15748,
198,
11748,
7427,
628,
198,
2,
43907,
25,
7400,
11338,
28,
23,
4292,
8658,... | 3.288462 | 52 |
# coding: utf-8
# Copyright 2009 Alexandre Fiori
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import txredisapi as redis
from twisted.internet import defer
from twisted.trial import unittest
from .mixins import RedisVersionCheckMixin, REDIS_HOST, REDIS_PORT
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
2,
15069,
3717,
21000,
260,
40040,
72,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
1... | 3.702439 | 205 |
import os
import pathlib
import shutil
from django.test import TestCase
from django.contrib.auth.models import User
from django.conf import settings
from .helpers import _create_userdir, _safe_to_create, create_repository
base_dir = settings.GIT_REPOS_ROOT
| [
11748,
28686,
198,
11748,
3108,
8019,
198,
11748,
4423,
346,
198,
6738,
42625,
14208,
13,
9288,
1330,
6208,
20448,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
27530,
1330,
11787,
198,
6738,
42625,
14208,
13,
10414,
1330,
6460,
... | 3.197531 | 81 |
a=int(input())
b=int(input())
print ("""{a+b}
{a-b}
{a*b}""")
| [
64,
28,
600,
7,
15414,
28955,
198,
65,
28,
600,
7,
15414,
28955,
198,
4798,
5855,
15931,
90,
64,
10,
65,
92,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1391,
64,
12,
65,
92,
198,
220,
220,
220,
220,
220,
220,
220,
220,
... | 1.509091 | 55 |
from ..DB.Repositorio_Turistas_Entrantes_INE import RepositoryTuristasEntrantesINE as DBRepository
from ..Utilidades.Conversores import Conversores as Conversor
def obtener_porcentaje_turistas_entrantes_en_ciudad_destino_desde_ciudad_origen_en_rango_anio_mensualmente(CiudadDestino, CiudadOrigen, AnioInicio, AnioFin):
"""
Dado una ciudad y un año obtiene el porcentaje total de personas que llegan a esa ciudad de forma total
Dado una ciudad y un año obtiene el porcentaje total de personas que llegan a esa ciudad de forma total
:param Ciudad: Ciudad destino
:type Ciudad: str
:param Anio: Anio
:type Anio: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerPorcentajeTuristasEntrantesEnCiudadDestinoDesdeCiudadOrigenEnRangoAniosMensualmente(CiudadDestino, CiudadOrigen, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
def obtener_cantidad_total_turistas_entrantes_en_ciudad_anio(Ciudad, Anio):
"""
Dado una ciudad y un año obtiene la cantidad total de personas que llegan a esa ciudad de forma total
Dado una ciudad y un año obtiene la cantidad total de personas que llegan a esa ciudad de forma total
:param Ciudad: Ciudad destino
:type Ciudad: str
:param Anio: Anio
:type Anio: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerNumeroTotalTuristasEntrantesEnCiudadEnAnio(Ciudad, Anio)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
def obtener_cantidad_turistas_entrantes_de_ciudad_origen_hacia_ciudad_destino_en_rango_anio_en_mes(CiudadDestino, CiudadOrigen, AnioInicio, AnioFin, Mes):
"""
Dado una ciudad y un año obtiene la cantidad total de personas que llegan de ciudad origen y van hacia ciudad destino de forma total durante esos años de forma anual durante ese mes
Dado una ciudad y un año obtiene la cantidad total de personas que llegan de ciudad origen y van hacia ciudad destino de forma total durante esos años de forma anual durante ese mes
:param CiudadDestino: Ciudad Destino
:type CiudadDestino: str
:param CiudadOrigen: Ciudad Origen
:type CiudadOrigen: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:param Mes: Mes
:type Mes: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerNumeroTotalTuristasEntrantesEnCiudadDestinoDesdeCiudadOrigenEnRangoAniosEnMes(CiudadDestino, CiudadOrigen, AnioInicio, AnioFin, Mes)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
def obtener_cantidad_turistas_entrantes_en_ciudad_anio_mensualmente(Ciudad, Anio):
"""
Dado una ciudad y un año obtiene la cantidad total de personas que llegan a esa ciudad de forma total durante los meses de ese año
Dado una ciudad y un año obtiene la cantidad total de personas que llegan a esa ciudad de forma global durante los meses de ese año
:param Ciudad: Ciudad destino
:type Ciudad: str
:param Anio: Anio
:type Anio: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerNumeroTotalTuristasEntrantesEnCiudadEnAnioMensualmente(Ciudad, Anio)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
def obtener_porcentaje_turistas_entrantes_en_ciudad_destino_desde_ciudad_origen_en_anio(CiudadDestino, CiudadOrigen, Anio):
"""
Dado una ciudad destino, una ciudad origen y un año obtiene el porcentaje total de personas que llegan a ciudad Destino desde ciudad origen durante ese año de forma anual
Dado una ciudad destino, una ciudad origen y un año obtiene el porcentaje total de personas que llegan a ciudad Destino desde ciudad origen durante ese año de forma anual
:param CiudadDestino: Ciudad Destino
:type CiudadDestino: str
:param CiudadOrigen: Ciudad Origen
:type CiudadOrigen: str
:param Anio: Anio
:type Anio: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerPorcentajeTuristasEntrantesEnCiudadDestinoDesdeCiudadOrigenEnAnio(CiudadDestino, CiudadOrigen, Anio)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
def obtener_cantidad_turistas_entrantes_en_ciudad_destino_desde_ciudad_origen_en_rango_anio_en_mes(CiudadDestino, CiudadOrigen, AnioInicio, AnioFin, Mes):
"""
Dado una ciudad destino, una ciudad origen, un mes y un año obtiene la cantidad total de personas que salen de ciudad origen y van hacia ciudad destino de forma total durante esos años de forma anual durante ese mes
Dado una ciudad destino, una ciudad origen, un mes y un año obtiene la cantidad total de personas que salen de ciudad origen y van hacia ciudad destino de forma total durante esos años de forma anual durante ese mes
:param CiudadDestino: Ciudad Destino
:type CiudadDestino: str
:param CiudadOrigen: Ciudad Origen
:type CiudadOrigen: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:param Mes: Mes
:type Mes: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerNumeroTotalTuristasEntrantesEnCiudadDestinoDesdeCiudadOrigenEnRangoAniosEnMes(CiudadDestino, CiudadOrigen, AnioInicio, AnioFin, Mes)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
def obtener_porcentaje_turistas_entrantes_en_ciudad_destino_desde_ciudad_origen_en_rango_anios(CiudadDestino, CiudadOrigen, AnioInicio, AnioFin):
"""
Dado una ciudad destino, una ciudad origen y un rango de años obtiene la cantidad total de personas que llegan a ciudad destino desde ciudad origen durante esos años de forma anual
Dado una ciudad destino, una ciudad origen y un rango de años obtiene la cantidad total de personas que llegan a ciudad destino desde ciudad origen durante esos años de forma anual
:param CiudadDestino: Ciudad Destino
:type CiudadDestino: str
:param CiudadOrigen: Ciudad Origen
:type CiudadOrigen: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerPorcentajeTotalTuristasEntrantesEnCiudadDestinoDesdeCiudadOrigenEnRangoAnios(CiudadDestino, CiudadOrigen, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
def obtener_cantidad_turistas_entrantes_en_ciudad_rango_anios(Ciudad, AnioInicio, AnioFin):
"""
Dado una ciudad y un rango de años obtiene la cantidad total de personas que llegan a esa ciudad de forma total durante esos años
Dado una ciudad y un rango de años obtiene la cantidad total de personas que llegan a esa ciudad de forma total durante esos años
:param Ciudad: Ciudad destino
:type Ciudad: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerNumeroTotalTuristasEntrantesEnCiudadEnRangoAnios(Ciudad, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
def obtener_cantidad_turistas_entrantes_en_ciudad_rango_anios_en_mes(Ciudad, AnioInicio, AnioFin, Mes):
"""
Dado una ciudad, un rango de años y un mes obtiene la cantidad total de personas que llegan a esa ciudad de forma total durante esos años durante ese mes
Dado una ciudad, un rango de años y un mes obtiene la cantidad total de personas que llegan a esa ciudad de forma total durante esos años durante ese mes
:param Ciudad: Ciudad destino
:type Ciudad: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:param Mes: Mes
:type Mes: str
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerNumeroTotalTuristasEntrantesEnCiudadEnRangoAniosEnMes(Ciudad, AnioInicio, AnioFin, Mes)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
def obtener_cantidad_turistas_entrantes_en_ciudad_rango_anios_mensualmente(Ciudad, AnioInicio, AnioFin):
"""
Dado una ciudad, un rango de años y un mes obtiene la cantidad total de personas que llegan a esa ciudad de forma total durante esos años separados por meses
Dado una ciudad, un rango de años y un mes obtiene la cantidad total de personas que llegan a esa ciudad de forma total durante esos años separados por meses
:param Ciudad: Ciudad destino
:type Ciudad: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:param Mes: Mes
:type Mes: str
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerNumeroTotalTuristasEntrantesEnCiudadEnRangoAniosMensualmente(Ciudad, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
| [
6738,
11485,
11012,
13,
6207,
418,
2072,
952,
62,
17483,
37503,
62,
14539,
5250,
274,
62,
8881,
1330,
1432,
13264,
17483,
37503,
14539,
5250,
274,
8881,
355,
20137,
6207,
13264,
198,
6738,
11485,
18274,
346,
312,
2367,
13,
3103,
690,
28... | 2.443279 | 4,575 |
"""Unit tests for the OJAudit violations collector."""
from ..source_collector_test_case import SourceCollectorTestCase
class OJAuditViolationsTest(SourceCollectorTestCase):
"""Unit tests for the OJAudit violations collector."""
SOURCE_TYPE = "ojaudit"
METRIC_TYPE = "violations"
async def test_violations(self):
"""Test that the number of violations is returned."""
ojaudit_xml = """<audit xmlns="http://xmlns.oracle.com/jdeveloper/1013/audit">
<violation-count>2</violation-count>
<exception-count>1</exception-count>
<error-count>0</error-count>
<warning-count>1</warning-count>
<incomplete-count>0</incomplete-count>
<advisory-count>0</advisory-count>
<models>
<model id="a">
<file>
<path>a</path>
</file>
</model>
<model id="b">
<file>
<path>b</path>
</file>
</model>
</models>
<construct>
<children>
<construct>
<children>
<violation>
<message>a</message>
<location model="a">
<line-number>20</line-number>
<column-offset>4</column-offset>
</location>
<values>
<value>warning</value>
</values>
</violation>
</children>
</construct>
<violation>
<message>b</message>
<location model="b">
<line-number>10</line-number>
<column-offset>2</column-offset>
</location>
<values>
<value>exception</value>
</values>
</violation>
</children>
</construct>
</audit>"""
response = await self.collect(get_request_text=ojaudit_xml)
expected_entities = [
dict(
component="a:20:4",
key="894756a0231a17f66b33d0ac18570daa193beea3",
message="a",
severity="warning",
count="1",
),
dict(
component="b:10:2",
key="2bdb532d49f0bf2252e85dc2d41e034c8c3e1af3",
message="b",
severity="exception",
count="1",
),
]
self.assert_measurement(response, value="2", entities=expected_entities)
async def test_missing_location(self):
"""Test that an exception is raised if the violation location is missing."""
ojaudit_xml = """<audit xmlns="http://xmlns.oracle.com/jdeveloper/1013/audit">
<violation-count>2</violation-count>
<models>
<model id="a">
<file>
<path>a</path>
</file>
</model>
<model id="b">
<file>
<path>b</path>
</file>
</model>
</models>
<construct>
<violation>
<message>a</message>
<values>
<value>medium</value>
</values>
</violation>
</construct>
</audit>"""
response = await self.collect(get_request_text=ojaudit_xml)
self.assertTrue("has no location element" in response["sources"][0]["parse_error"])
async def test_filter_violations(self):
"""Test that violations of types the user doesn't want to see are not included."""
ojaudit_xml = """<audit xmlns="http://xmlns.oracle.com/jdeveloper/1013/audit">
<violation-count>1</violation-count>
<exception-count>1</exception-count>
<error-count>0</error-count>
<warning-count>0</warning-count>
<incomplete-count>0</incomplete-count>
<advisory-count>0</advisory-count>
<models>
<model id="a">
<file>
<path>a</path>
</file>
</model>
</models>
<construct>
<violation>
<message>a</message>
<location model="a">
<line-number>20</line-number>
<column-offset>4</column-offset>
</location>
<values>
<value>exception</value>
</values>
</violation>
</construct>
</audit>"""
self.set_source_parameter("severities", ["error"])
response = await self.collect(get_request_text=ojaudit_xml)
self.assert_measurement(response, value="0", entities=[])
async def test_ignore_duplicated_violations(self):
"""Test that violations with the same model, message, location, etc. are ignored."""
ojaudit_xml = """<audit xmlns="http://xmlns.oracle.com/jdeveloper/1013/audit">
<violation-count>2</violation-count>
<exception-count>1</exception-count>
<error-count>0</error-count>
<warning-count>1</warning-count>
<incomplete-count>0</incomplete-count>
<advisory-count>0</advisory-count>
<models>
<model id="a">
<file>
<path>a</path>
</file>
</model>
</models>
<construct>
<children>
<construct>
<children>
<violation>
<message>a</message>
<location model="a">
<line-number>20</line-number>
<column-offset>4</column-offset>
</location>
<values>
<value>warning</value>
</values>
</violation>
<violation>
<message>a</message>
<location model="a">
<line-number>20</line-number>
<column-offset>4</column-offset>
</location>
<values>
<value>warning</value>
</values>
</violation>
</children>
</construct>
</children>
</construct>
</audit>"""
response = await self.collect(get_request_text=ojaudit_xml)
expected_entities = [
dict(
component="a:20:4",
key="894756a0231a17f66b33d0ac18570daa193beea3",
message="a",
severity="warning",
count="2",
)
]
self.assert_measurement(response, value="2", entities=expected_entities)
| [
37811,
26453,
5254,
329,
262,
440,
41,
16353,
270,
11734,
22967,
526,
15931,
198,
198,
6738,
11485,
10459,
62,
33327,
273,
62,
9288,
62,
7442,
1330,
8090,
31337,
273,
14402,
20448,
628,
198,
4871,
440,
41,
16353,
270,
33894,
602,
14402,... | 2.044876 | 2,830 |
# -*- coding: utf-8 -*-
# Copyright 2019, IBM.
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
"""
Test circuits and reference outputs for initialize instruction.
"""
from numpy import array, sqrt
from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit
# ==========================================================================
# Deterministic output
# ==========================================================================
def initialize_circuits_deterministic(final_measure=True):
"""Initialize test circuits with deterministic count output"""
circuits = []
qr = QuantumRegister(3)
if final_measure:
cr = ClassicalRegister(3)
regs = (qr, cr)
else:
regs = (qr, )
# Start with |+++> state
# Initialize qr[i] to |1> for i=0,1,2
for qubit in range(3):
circuit = QuantumCircuit(*regs)
circuit.h(qr[0])
circuit.h(qr[1])
circuit.h(qr[2])
circuit.initialize([0, 1], [qr[qubit]])
if final_measure:
circuit.barrier(qr)
circuit.measure(qr, cr)
circuits.append(circuit)
# Start with |+++> state
# Initialize qr[i] to |1> and qr[j] to |0>
# For [i,j] = [0,1], [1, 0], [0, 2], [2, 0], [1, 2], [2, 1]
for qubit_i in range(3):
for qubit_j in range(3):
if (qubit_i != qubit_j):
circuit = QuantumCircuit(*regs)
circuit.h(qr[0])
circuit.h(qr[1])
circuit.h(qr[2])
circuit.initialize([0, 1, 0, 0], [qr[qubit_i], qr[qubit_j]])
if final_measure:
circuit.barrier(qr)
circuit.measure(qr, cr)
circuits.append(circuit)
# Start with |+++> state
# Initialize qr[i] to |1>, qr[j] to |0> and qr[k] to |->
# For [i,j,k] = [0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0], [2, 0, 1], [2, 1, 0]
for qubit_i in range(3):
for qubit_j in range(3):
for qubit_k in range(3):
if ((qubit_i != qubit_j) & (qubit_i != qubit_k) & (qubit_k != qubit_j)):
circuit = QuantumCircuit(*regs)
circuit.h(qr[0])
circuit.h(qr[1])
circuit.h(qr[2])
circuit.initialize([0, 1, 0, 0, 0, -1, 0, 0] / sqrt(2), \
[qr[qubit_i], qr[qubit_j], qr[qubit_k]])
if final_measure:
circuit.barrier(qr)
circuit.measure(qr, cr)
circuits.append(circuit)
return circuits
def initialize_statevector_deterministic():
"""Initialize test circuits reference counts."""
targets = []
# Start with |+++> state
# Initialize qr[i] to |1> for i=0,1,2
targets.append(array([0. +0.j, 0.5+0.j, 0. +0.j, 0.5+0.j, 0. +0.j, 0.5+0.j, 0. +0.j, 0.5+0.j]))
targets.append(array([0. +0.j, 0. +0.j, 0.5+0.j, 0.5+0.j, 0. +0.j, 0. +0.j, 0.5+0.j, 0.5+0.j]))
targets.append(array([0. +0.j, 0. +0.j, 0. +0.j, 0. +0.j, 0.5+0.j, 0.5+0.j, 0.5+0.j, 0.5+0.j]))
# Start with |+++> state
# Initialize qr[i] to |1> and qr[j] to |0>
# For [i,j] = [0,1], [1, 0], [0, 2], [2, 0], [1, 2], [2, 1]
targets.append(array([0. + 0.j, 1.0 + 0.j, 0. + 0.j, 0. + 0.j, \
0. + 0.j, 1.0 + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 1.0 + 0.j, 0. + 0.j, 1.0 + 0.j, \
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 1.0 + 0.j, 0. + 0.j, \
0. + 0.j, 0. + 0.j, 1.0 + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 1.0 + 0.j, 1.0 + 0.j, \
0. + 0.j, 0. + 0.j, 0 + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 0 + 0.j, 0. + 0.j, \
1.0 + 0.j, 0. + 0.j, 1.0 + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 0 + 0.j, 0. + 0.j, \
1.0 + 0.j, 1.0 + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
# Start with |+++> state
# Initialize qr[i] to |1>, qr[j] to |0> and qr[k] to |->
# For [i,j,k] = [0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0], [2, 0, 1], [2, 1, 0]
targets.append(array([0. + 0.j, 1.0 + 0.j, 0. + 0.j, 0. + 0.j, \
0. + 0.j, -1.0 + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 1.0 + 0.j, 0. + 0.j, -1.0 + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 1.0 + 0.j, 0. + 0.j, \
0. + 0.j, 0. + 0.j, -1.0 + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 1.0 + 0.j, -1.0 + 0.j, \
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, \
1.0 + 0.j, 0. + 0.j, -1.0 + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, \
1.0 + 0.j, -1.0 + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
return targets
# ==========================================================================
# Non-Deterministic output
# ==========================================================================
def initialize_circuits_nondeterministic(final_measure=True):
"""Initialize test circuits with non-deterministic count output"""
circuits = []
qr = QuantumRegister(2)
if final_measure:
cr = ClassicalRegister(2)
regs = (qr, cr)
else:
regs = (qr, )
# Start with a state (|00>+|11>)/sqrt(2)
# Initialize qubit 0 to |+>
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
circuit = QuantumCircuit(qr, cr)
circuit.h(qr[0])
# circuit.cx(qr[0], qr[1])
circuit.initialize([1, 1]/sqrt(2), [qr[0]])
if final_measure:
circuit.barrier(qr)
circuit.measure(qr, cr)
circuits.append(circuit)
# Start with a state (|00>+|11>)/sqrt(2)
# Initialize qubit 0 to |->
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
circuit = QuantumCircuit(qr, cr)
circuit.h(qr[0])
# circuit.cx(qr[0], qr[1])
circuit.initialize([1, -1]/sqrt(2), [qr[0]])
if final_measure:
circuit.barrier(qr)
circuit.measure(qr, cr)
circuits.append(circuit)
return circuits
def initialize_statevector_nondeterministic():
"""Initialize test circuits reference counts."""
targets = []
# Start with a state (|00>+|11>)/sqrt(2)
# Initialize qubit 0 to |+>
targets.append(array([1, 1, 0, 0]) / sqrt(2))
# Initialize qubit 0 to |->
targets.append(array([1, -1, 0, 0]) / sqrt(2))
return targets | [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
15069,
13130,
11,
19764,
13,
198,
2,
198,
2,
770,
2723,
2438,
318,
11971,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
1043,
287,
198,
2,
262,
38559,
2429... | 1.860665 | 3,732 |
import itertools
import random
import six
from geodata.addresses.config import address_config
from geodata.addresses.directions import RelativeDirection, LateralDirection, AnteroposteriorDirection
from geodata.addresses.floors import Floor
from geodata.addresses.numbering import NumberedComponent, sample_alphabet, latin_alphabet
from geodata.configs.utils import nested_get
from geodata.encoding import safe_decode
from geodata.math.sampling import weighted_choice, zipfian_distribution, cdf
from geodata.text.utils import is_numeric_strict
| [
11748,
340,
861,
10141,
198,
11748,
4738,
198,
11748,
2237,
198,
198,
6738,
4903,
375,
1045,
13,
2860,
16746,
13,
11250,
1330,
2209,
62,
11250,
198,
6738,
4903,
375,
1045,
13,
2860,
16746,
13,
12942,
507,
1330,
45344,
35,
4154,
11,
40... | 3.385093 | 161 |
# coding: utf-8
import pytest
from jinja2 import PackageLoader, Environment
from jinja2schema.config import Config
from jinja2schema.core import infer
from jinja2schema.model import Dictionary, String
@pytest.fixture
# noinspection PyUnusedLocal
# noinspection PyUnusedLocal
@pytest.fixture
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
11748,
12972,
9288,
198,
6738,
474,
259,
6592,
17,
1330,
15717,
17401,
11,
9344,
198,
198,
6738,
474,
259,
6592,
17,
15952,
2611,
13,
11250,
1330,
17056,
198,
6738,
474,
259,
6592,
17,
15952,
261... | 3.050505 | 99 |
#!/usr/bin/env python
import sys
import numpy as np
import unittest
from path_finder import PathFinder
from path_validator import PathValidator
from path_visualizer import PathVisualizer
from waypoint import Waypoint
if __name__ == '__main__':
if '--viz' in sys.argv:
Challenge1TestCase.VIZ = True
sys.argv.pop()
unittest.main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
11748,
25064,
198,
11748,
299,
32152,
355,
45941,
198,
198,
11748,
555,
715,
395,
198,
198,
6738,
3108,
62,
22805,
1330,
10644,
37,
5540,
198,
6738,
3108,
62,
12102,
1352,
1330,
10644,
... | 2.712121 | 132 |
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 15 15:15:56 2021
@author: Shiv
"""
from math import sqrt
from numpy import arange
import datetime
from operator import itemgetter as ig
# define the cell class
# K-nearest Neighbors(Choose 4 nearest APs out of the whole list we have)
# Calculate the barryCenter of the 4 points we found with the fingerprinting database
# ------------------------------------------main Fuction--------------------------------------------
dataset = [(Location(.5,.5,.5), 3.0), (Location(4.0,.0,.0), 2.0), (Location(4.0,5.0,5.0), 4.2), (Location(3.0,3.0,3.0), 2.5)]
dataset1 = [(Location(.5,.5,.5), 3.0), (Location(4.0,.0,.0), 2.0), (Location(4.0,5.0,5.0), 4.2)]
Tf = []
# cells to calculate fingerprint database
Tf = [[newCell(-38,-27,-54,-13,2,2),newCell(-74,-62,-48,-33,6,2),newCell(-13,-28,-12,-40,10,2) ],\
[newCell(-34,-27,-38,-41,2,6), newCell(-64,-48,-72,-35,6,6), newCell(-45,-37,-20,-15,10,6)], \
[newCell(-17,-50,-44,-33,2,10), newCell(-27,-28,-32,-45,6,10), newCell(-30,-20,-60,-40,10,10)]]
MoblieCell = RSSVector(-26, -42, -13, -46)
# N-Lateration
start = datetime.datetime.now()
result = NLateration(dataset1, step=.1)
print("\r\nLocation : " + result[0].toString())
end = datetime.datetime.now()
# print (end-start)
# K-Neighbours
print("\nK-neighbors of Terminal Mobile : ")
neighborsCells = KNeighbors(Tf, MoblieCell)
for k in neighborsCells:
print("(", k.location.x, ";", k.location.y, ")")
# Result Calculated
print("\r\nResult ax the localization estimate :")
center = solve_center(neighborsCells, MoblieCell.distances)
print(center.toString())
print (end-start) | [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
201,
198,
37811,
201,
198,
41972,
319,
2892,
1526,
1315,
1315,
25,
1314,
25,
3980,
33448,
201,
198,
201,
198,
31,
9800,
25,
43305,
201,
198,
37811,
201,
198,
201,
198,
6738... | 2.424069 | 698 |
#
# Car sensor demo using OpenCV
#
# Author: vswamina, Sept 2017.
# 2017_09_09 Created from code in globalfish/FaceTracker
# With a lot of help from the Internet
import cv2
import json
import threading
import time
from espeak import espeak
#
# Type of camera you want to use: we keep the code here but we shall use the PiCamera
#
DLINK930 = 1 # D-Link camera model 930. URL = user:pass@ip:port/video.cgi
DLINK2312 = 2 # D-Link camera model 2312. URL = user:pass@ip/video1.mjpg
BUILTINCAMERA = 3 # USB or built in camera on laptop
PICAMERA = 4 # Pi camera if running on Raspberry Pi
#
# if you have multiple cameras on your device then just use appropriate device
# if you have just one camera on your device then DEFAULT_CAM should work
DEFAULT_CAM = 0
REAR_CAM = 1
XTNL_CAM = 2
flywheelPin = 14 #broadcom pin to start flywheel
triggerPin = 15
#
# colors to use for bounding box and text
RED = 0, 0, 255
BLUE = 255, 0, 0
GREEN = 0, 255, 0
WHITE = 255,255,255
BLACK=10,10,10
# which camera do we use
camera_port = DEFAULT_CAM
#
# generic class that's called with a parameter and this then instantiates the
# correct type of implementation. The video recognition logic is in this class
#
# Thread for voice prompts. Run in separate thread to avoid blocking main thread
# and also to prevent irritating repetition and chatter
x = 400
y = 250
frameArea = x*y
vs = VideoCamera(PICAMERA, (x,y), 15)
vs.start()
vp = VoicePrompts().start()
cv2.setMouseCallback("Cars", vs.MouseTrack)
carsIdentified = False
try:
while True:
if( vs.foundNearCarsInFrame() ):
cars = vs.readNearCars()
for car in cars:
(x1,y1,w, h) = car
centerX = x1+w/2
centerY = y1+h/2
if ( w*h > 3000):
vp.setPhrase("Watch out. Vehicle ahead!")
else:
vp.setPhrase("")
if( w*h > 4000 and abs(centerX-200)<50 and abs(centerY-150) < 50):
vp.setPhrase("Approaching vehicle")
else:
vp.setPhrase("")
if( vs.isStopped() ):
vs.stop()
vp.stop()
except (KeyboardInterrupt): # expect to be here when keyboard interrupt
#vs.stop()
vp.stop()
| [
2,
198,
2,
1879,
12694,
13605,
1262,
4946,
33538,
198,
2,
198,
2,
6434,
25,
410,
2032,
18891,
11,
2362,
2177,
13,
198,
2,
2177,
62,
2931,
62,
2931,
15622,
422,
2438,
287,
15095,
1604,
680,
14,
32388,
35694,
198,
198,
2,
2080,
257,... | 2.329353 | 1,005 |
import pandas as pd
import os
from time import ctime
dataset = "camcan"
if os.path.exists("/home/parietal/"):
results_path = "/home/parietal/hjanati/csvs/%s/" % dataset
else:
data_path = "~/Dropbox/neuro_transport/code/mtw_experiments/meg/"
data_path = os.path.expanduser(data_path)
results_path = data_path + "results/%s/" % dataset
savedir_names = ["5sources_camcan"]
df = []
for savedir_name in savedir_names:
datadir = results_path + savedir_name + "/"
for root, dirs, files in os.walk(datadir):
for f in files:
if f.split('.')[-1] == "csv":
d = pd.read_csv(root + f, header=0, index_col=0)
df.append(d)
if len(df):
df = pd.concat(df, ignore_index=True)
if savedir_name == "5sources_camcan":
df["day"] = df["save_time"].apply(lambda x: int(ctime(x / 1e5)[7:10]))
df = df[(df.model != "mtw") + (df.day != 2)]
df.loc[df.model == "mtw", "model"] = "mtw"
df.loc[df.model == "mtw2", "model"] = "mtw"
df.to_csv("data/%s.csv" % savedir_name)
else:
print("No data for %s" % savedir_name)
| [
11748,
19798,
292,
355,
279,
67,
198,
11748,
28686,
198,
6738,
640,
1330,
269,
2435,
628,
198,
19608,
292,
316,
796,
366,
20991,
5171,
1,
198,
361,
28686,
13,
6978,
13,
1069,
1023,
7203,
14,
11195,
14,
1845,
21587,
30487,
2599,
198,
... | 2.044248 | 565 |
"""
api/urls.py
"""
from django.urls import path
from rest_framework.urlpatterns import format_suffix_patterns
from rest_framework.authtoken.views import obtain_auth_token
from api import views
urlpatterns = [
#---------------------------#
# UNPROTECTED API ENDPOINTS #
# --------------------------#
#btc_price caculations
path('stats/tsd', views.GetStatistics.as_view()name= 'btc_stadstics'),
path('api/version', views.VersionAPI.as_view(), name='version_api'),
# GATEWAY
#path('api/register', views.RegisterAPI.as_view(), name='register_api'),
path('api/login', obtain_auth_token, name='api_token_auth'),
#path('api/logout', views.post_logout_api, name='logout_api'),
]
urlpatterns = format_suffix_patterns(urlpatterns)
| [
37811,
201,
198,
15042,
14,
6371,
82,
13,
9078,
201,
198,
37811,
201,
198,
6738,
42625,
14208,
13,
6371,
82,
1330,
3108,
201,
198,
6738,
1334,
62,
30604,
13,
6371,
33279,
82,
1330,
5794,
62,
37333,
844,
62,
33279,
82,
201,
198,
6738... | 2.645485 | 299 |
from django.shortcuts import render, HttpResponse
from .models import Report
from .spark_funcs import process_json_data, get_json_covid_our_data
from .serializers import ReportSerializer
from rest_framework import viewsets
from rest_framework import permissions
class ReportViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows reports to be viewed.
"""
queryset = Report.objects.all().order_by('location')
serializer_class = ReportSerializer
permission_classes = [permissions.IsAuthenticated]
filterset_fields = ['location', 'date']
def update_database(request):
"""
API endpoint that executes database update.
"""
json_data = get_json_covid_our_data()
df = process_json_data(json_data)
for row in df.rdd.collect():
report = Report(date=row.date,
location=row.location,
total_cases=row.total_cases,
new_cases=row.new_cases,
new_cases_smoothed=row.new_cases_smoothed,
total_deaths=row.total_deaths,
new_deaths=row.new_deaths,
new_deaths_smoothed=row.new_deaths_smoothed,
total_cases_per_million=row.total_cases_per_million,
new_cases_per_million=row.new_cases_per_million,
new_cases_smoothed_per_million=row.new_cases_smoothed_per_million,
total_deaths_per_million=row.total_deaths_per_million,
new_deaths_per_million=row.new_deaths_per_million,
new_deaths_smoothed_per_million=row.new_deaths_smoothed_per_million,
reproduction_rate=row.reproduction_rate
)
if not Report.objects.filter(location=report.location, date=report.date):
report.save()
return HttpResponse(b'DONE')
| [
6738,
42625,
14208,
13,
19509,
23779,
1330,
8543,
11,
367,
29281,
31077,
198,
6738,
764,
27530,
1330,
6358,
198,
6738,
764,
2777,
668,
62,
12543,
6359,
1330,
1429,
62,
17752,
62,
7890,
11,
651,
62,
17752,
62,
66,
709,
312,
62,
454,
... | 2.113786 | 914 |
import re
from collections import defaultdict
from enum import unique, Enum
from pathlib import Path
from typing import Union
import numpy as np
import tensorflow as tf
import tensorrtserver.api.model_config_pb2 as model_config
import torch
from tensorrtserver.api import ServerStatusContext
from modelci.types.bo import Framework, Engine, ModelVersion
from modelci.types.trtis_objects import DataType, ModelInputFormat, ServerStatus, ModelStatus
def parse_path(path: Path):
"""Obtain filename, framework and engine from saved path.
"""
if re.match(r'^.*?[!/]*/[a-z]+-[a-z]+/\d+$', str(path.with_suffix(''))):
filename = path.name
architecture = path.parent.parent.stem
info = path.parent.name.split('-')
framework = Framework[info[0].upper()]
engine = Engine[info[1].upper()]
version = ModelVersion(Path(filename).stem)
return {
'architecture': architecture,
'framework': framework,
'engine': engine,
'version': version,
'filename': filename,
'base_dir': path.parent
}
else:
raise ValueError('Incorrect model path pattern')
def generate_path(model_name: str, framework: Framework, engine: Engine, version: Union[ModelVersion, str, int]):
"""Generate saved path from model
"""
model_name = str(model_name)
if not isinstance(framework, Framework):
raise ValueError(f'Expecting framework type to be `Framework`, but got {type(framework)}')
if not isinstance(engine, Engine):
raise ValueError(f'Expecting engine type to be `Engine`, but got {type(engine)}')
if not isinstance(version, ModelVersion):
version = ModelVersion(str(version))
return Path.home() / '.modelci' / model_name / f'{framework.name.lower()}-{engine.name.lower()}' / str(version)
def parse_trt_model(url, protocol, model_name, batch_size, verbose=False):
"""
Check the configuration of a model to make sure it meets the
requirements for an image classification network (as expected by
this client)
"""
ctx = ServerStatusContext(url, protocol, model_name, verbose)
server_status: ServerStatus = ctx.get_server_status()
# print(server_status.model_status)
if model_name not in server_status.model_status:
raise Exception("unable to get status for '" + model_name + "'")
status: ModelStatus = server_status.model_status[model_name]
config = status.config
if len(config.input) != 1:
raise Exception("expecting 1 input, got {}".format(len(config.input)))
if len(config.output) != 1:
raise Exception("expecting 1 output, got {}".format(len(config.output)))
input = config.input[0]
output = config.output[0]
if output.data_type != model_config.TYPE_FP32:
raise Exception("expecting output datatype to be TYPE_FP32, model '" +
model_name + "' output type is " +
model_config.DataType.Name(output.data_type))
# Output is expected to be a vector. But allow any number of
# dimensions as long as all but 1 is size 1 (e.g. { 10 }, { 1, 10
# }, { 10, 1, 1 } are all ok). Variable-size dimensions are not
# currently supported.
non_one_cnt = 0
for dim in output.dims:
if dim == -1:
raise Exception("variable-size dimension in model output not supported")
if dim > 1:
non_one_cnt += 1
if non_one_cnt > 1:
raise Exception("expecting model output to be a vector")
# Model specifying maximum batch size of 0 indicates that batching
# is not supported and so the input tensors do not expect an "N"
# dimension (and 'batch_size' should be 1 so that only a single
# image instance is inferred at a time).
max_batch_size = config.max_batch_size
if max_batch_size == 0:
if batch_size != 1:
raise Exception("batching not supported for model '" + model_name + "'")
else: # max_batch_size > 0
if batch_size > max_batch_size:
raise Exception("expecting batch size <= {} for model {}".format(max_batch_size, model_name))
# Model input must have 3 dims, either CHW or HWC
if len(input.dims) != 3:
raise Exception(
"expecting input to have 3 dimensions, model '{}' input has {}".format(
model_name, len(input.dims)))
# Variable-size dimensions are not currently supported.
for dim in input.dims:
if dim == -1:
raise Exception("variable-size dimension in model input not supported")
if ((input.format != model_config.ModelInput.FORMAT_NCHW) and
(input.format != model_config.ModelInput.FORMAT_NHWC)):
raise Exception("unexpected input format " + model_config.ModelInput.Format.Name(input.format) +
", expecting " +
model_config.ModelInput.Format.Name(model_config.ModelInput.FORMAT_NCHW) +
" or " +
model_config.ModelInput.Format.Name(model_config.ModelInput.FORMAT_NHWC))
if input.format == model_config.ModelInput.FORMAT_NHWC:
h = input.dims[0]
w = input.dims[1]
c = input.dims[2]
else:
c = input.dims[0]
h = input.dims[1]
w = input.dims[2]
return input.name, output.name, c, h, w, input.format, model_dtype_to_np(input.data_type)
@unique
class TensorRTPlatform(Enum):
"""TensorRT platform type for model configuration
"""
TENSORRT_PLAN = 0
TENSORFLOW_GRAPHDEF = 1
TENSORFLOW_SAVEDMODEL = 2
CAFFE2_NETDEF = 3
ONNXRUNTIME_ONNX = 4
PYTORCH_LIBTORCH = 5
CUSTOM = 6
TensorRTModelInputFormat = ModelInputFormat
| [
11748,
302,
198,
6738,
17268,
1330,
4277,
11600,
198,
6738,
33829,
1330,
3748,
11,
2039,
388,
198,
6738,
3108,
8019,
1330,
10644,
198,
6738,
19720,
1330,
4479,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
11192,
273,
11125,
355,
... | 2.543134 | 2,272 |
version = "__VERSION__"
| [
198,
9641,
796,
366,
834,
43717,
834,
1,
628
] | 2.888889 | 9 |
import random
import consul
import requests
from shop_services.common_service.register import base
| [
11748,
4738,
198,
198,
11748,
762,
377,
198,
11748,
7007,
198,
198,
6738,
6128,
62,
30416,
13,
11321,
62,
15271,
13,
30238,
1330,
2779,
628
] | 4.08 | 25 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'Main.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
import resources_rc
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
201,
198,
201,
198,
2,
5178,
7822,
7560,
422,
3555,
334,
72,
2393,
705,
13383,
13,
9019,
6,
201,
198,
2,
201,
198,
2,
15622,
416,
25,
9485,
48,
83,
20,
12454,
2438,
173... | 2.821705 | 129 |
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import os
from urllib import request
from scrapy.pipelines.images import ImagesPipeline
from bmw import settings
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
2896,
500,
534,
2378,
31108,
994,
198,
2,
198,
2,
2094,
470,
6044,
284,
751,
534,
11523,
284,
262,
7283,
3620,
62,
47,
4061,
3698,
1268,
1546,
4634,
198,
2,
... | 2.933333 | 105 |
__version__ = '0.0.0'
from ._atomstoys import ffi as _ffi
from ._atomstoys import lib as _lib
| [
834,
9641,
834,
796,
705,
15,
13,
15,
13,
15,
6,
198,
198,
6738,
47540,
37696,
301,
19417,
1330,
277,
12463,
355,
4808,
487,
72,
198,
6738,
47540,
37696,
301,
19417,
1330,
9195,
355,
4808,
8019,
628
] | 2.594595 | 37 |
import asyncio
import functools
import logging
logger = logging.getLogger(__name__)
user_cnt = 0
HELP = "%Commandes: !help, !quit, !join:pseudo, !list\n"
async def main():
"""
Main function, may be called from shotcut command
"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--logging', type=int, default=10, help='python logging level')
parser.add_argument('-a', '--address', default='0.0.0.0', help='server address')
parser.add_argument('-p', '--port', type=int, default=9999, help='server port')
args = parser.parse_args()
logging.basicConfig()
logger.setLevel(args.logging)
logger.info('start serving on port %s', args.port)
room = {}
server = await asyncio.start_server(functools.partial(handle_client, room), host=args.address, port=args.port)
try:
await server.serve_forever()
finally:
server.close()
if __name__ == '__main__':
asyncio.run(main())
| [
11748,
30351,
952,
198,
11748,
1257,
310,
10141,
198,
11748,
18931,
198,
198,
6404,
1362,
796,
18931,
13,
1136,
11187,
1362,
7,
834,
3672,
834,
8,
198,
198,
7220,
62,
66,
429,
796,
657,
198,
198,
39,
3698,
47,
796,
36521,
21575,
274... | 2.647849 | 372 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'loginmain.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_()) | [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
5178,
7822,
7560,
422,
3555,
334,
72,
2393,
705,
38235,
12417,
13,
9019,
6,
198,
2,
198,
2,
15622,
416,
25,
9485,
48,
83,
20,
12454,
2438,
17301,
642,
13,
... | 2.663507 | 211 |
from zeus.config import db
from zeus.db.func import array_agg_row
from zeus.models import Build, FailureReason
from .base_build import BaseBuildResource
from ..schemas import AggregateFailureReasonSchema
failurereasons_schema = AggregateFailureReasonSchema(many=True)
| [
6738,
1976,
27650,
13,
11250,
1330,
20613,
198,
6738,
1976,
27650,
13,
9945,
13,
20786,
1330,
7177,
62,
9460,
62,
808,
198,
6738,
1976,
27650,
13,
27530,
1330,
10934,
11,
25743,
45008,
198,
198,
6738,
764,
8692,
62,
11249,
1330,
7308,
... | 3.474359 | 78 |
import bpy
import bmesh
is_blender_28 = bpy.app.version[1] >= 80
def delete_all(obj_type: str='MESH'):
"""Delete all objects of the given type from the current scene"""
for obj in bpy.data.objects:
if is_blender_28:
pass
else:
obj.hide = False
obj.select = obj.type == obj_type
bpy.ops.object.delete(use_global=True)
"""
# Get object
bpy.context.scene.objects.active
bpy.context.scene.objects['object_name']
bpy.data.objects['Camera'] ??difference with context
# Frame handlers
bpy.app.handlers.frame_change_pre.clear()
bpy.app.handlers.frame_change_pre.append(lambda scene : scene.frame_current)
# Frames setting
bpy.context.scene.frame_start = 0
bpy.context.scene.frame_end = NUM_FRAMES
bpy.context.scene.frame_set(frame)
# Keyframing
target.location = (0, 0, 0)
target.keyframe_insert("location")
target.rotation_euler = (0, 0, 0)
target.keyframe_insert("rotation_euler")
# Create vertex group
vg = target.vertex_groups.new(name="vg_name")
vg.add([1], 1.0, 'ADD')
# Particles system
target.modifiers.new("name", type='PARTICLE_SYSTEM')
ps = target.particle_systems[0]
ps.settings.emit_from = 'VERT'
ps.vertex_group_density = "emitter"
"""
###################################
### Greasy Pencil
###################################
"""
# Frames and Strokes
gp_frame = gp_layer.frames.new(i) # notice that index in the frames list does not match frame number in the timeline
gp_stroke = gp_frame.strokes.new()
gp_stroke.points.add(count=4)
gp_stroke.points[0].co = (0, 0, 0)
"""
"""
# Blender import system clutter
import bpy
import bmesh
from mathutils import Vector
import numpy as np
import sys
from pathlib import Path
UTILS_PATH = Path.home() / "Documents/python_workspace/data-science-learning"
SRC_PATH = UTILS_PATH / "graphics/agents"
sys.path.append(str(UTILS_PATH))
sys.path.append(str(SRC_PATH))
import importlib
import <cls_example>
import utils.blender_utils
importlib.reload(<cls_example>)
importlib.reload(utils.blender_utils)
from <cls_example> import <cls_example>
from utils.blender_utils import delete_all
""" | [
11748,
275,
9078,
198,
11748,
275,
76,
5069,
198,
198,
271,
62,
2436,
2194,
62,
2078,
796,
275,
9078,
13,
1324,
13,
9641,
58,
16,
60,
18189,
4019,
628,
198,
4299,
12233,
62,
439,
7,
26801,
62,
4906,
25,
965,
11639,
44,
44011,
6,
... | 2.714286 | 784 |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.path import Path
from matplotlib.patches import PathPatch
plt.figure(figsize=(5,5))
# Determine the x and y values to use for sample points.
num_edge_pts = 50 # Change this to change the resolution of the result.
xy_rng = (0, 1)
pt_sp = (xy_rng[1] - xy_rng[0]) / (num_edge_pts - 1)
x = np.linspace(xy_rng[0], xy_rng[1] + 0.001, num_edge_pts)
y = np.linspace(xy_rng[0], xy_rng[1] + 0.001, num_edge_pts)
X, Y = np.meshgrid(x, y)
min_x, max_x, min_y, max_y = np.min(x), np.max(x), np.min(y), np.max(y)
median_x = np.median(x)
prm_color_val_func = lambda x, y: 1 - np.sqrt((X-x)**2 + (Y-y)**2) / \
(xy_rng[1] - xy_rng[0])
# The lower left of the triangle is red.
z_red = prm_color_val_func(min_x, min_y)
# The top of the triangle is green.
z_green = prm_color_val_func(median_x, max_y)
# The lower right of the triangle is blue.
z_blue = prm_color_val_func(max_x, min_y)
del x, y, X, Y
Z = np.stack((z_red, z_green, z_blue), axis=-1)
del z_red, z_green, z_blue
# Form the perimeter of the triangle
# (lower left, top, lower right, back to lower left).
path = Path([[min_x, min_y], [median_x, max_y],
[max_x, min_y], [min_x, min_y]])
patch = PathPatch(path, facecolor='none', alpha=0)
plt.gca().add_patch(patch)
im = plt.imshow(Z, interpolation='nearest',
origin='lower', extent=[*xy_rng, *xy_rng],
clip_path=patch)
# Label the corners.
plt.text(min_x+0.03, min_y, "BS", fontsize=24)
plt.text(median_x-0.06, max_y-0.18, "PV", fontsize=24)
plt.text(max_x-0.22, min_y, "NPV", fontsize=24)
plt.axis('off')
plt.title("Fractional Cover Color Scale")
plt.tight_layout()
plt.savefig('fractional_cover_color_scale.png') | [
11748,
299,
32152,
355,
45941,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
6738,
2603,
29487,
8019,
13,
6978,
1330,
10644,
198,
6738,
2603,
29487,
8019,
13,
8071,
2052,
1330,
10644,
33952,
198,
198,
489,
83,
13,
... | 2.119277 | 830 |
from klampt import vectorops
import math
class CartesianSpace(GeodesicSpace):
"""The standard geodesic on R^d"""
class MultiGeodesicSpace:
"""This forms the cartesian product of one or more GeodesicSpace's.
Distances are simply added together."""
| [
6738,
479,
2543,
457,
1330,
15879,
2840,
198,
11748,
10688,
198,
198,
4871,
13690,
35610,
14106,
7,
10082,
4147,
291,
14106,
2599,
198,
220,
220,
220,
37227,
464,
3210,
4903,
4147,
291,
319,
371,
61,
67,
37811,
198,
198,
4871,
15237,
... | 3.2625 | 80 |
import os
import glob
import random
import cv2
import numpy as np
import pandas as pd
import torch
from PIL import Image
from torch.utils.data import Dataset, DataLoader
from torchvision import datasets, transforms
| [
11748,
28686,
220,
198,
11748,
15095,
198,
11748,
4738,
198,
198,
11748,
269,
85,
17,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
28034,
198,
6738,
350,
4146,
1330,
7412,
198,
6738,
28034,
13,
... | 3.393939 | 66 |
import configparser
import json
from telethon.sync import TelegramClient
from telethon import connection
# для корректного переноса времени сообщений в json
from datetime import date, datetime
# классы для работы с каналами
from telethon.tl.functions.channels import GetParticipantsRequest
from telethon.tl.types import ChannelParticipantsSearch
# класс для работы с сообщениями
from telethon.tl.functions.messages import GetHistoryRequest
# Считываем учетные данные
config = configparser.ConfigParser()
config.read("/Users/romakindmitriy/PycharmProjects/TelegramParser/configs/config.ini")
# Присваиваем значения внутренним переменным
api_id = int(config['Telegram']['api_id'])
api_hash = config['Telegram']['api_hash']
username = config['Telegram']['username']
# print(username, api_id, api_hash)
# 1:
# https://t.me/proxy?server=ru.tgproxy.today&port=443&secret=ddfb175d6d7f820cdc73ab11edbdcdbd74
# proxy_server = "proxy.digitalresistance.dog"
# proxy_port = 443
# proxy_key = "d41d8cd98f00b204e9800998ecf8427e"
proxy_server = config['Proxy']['server']
proxy_port = int(config['Proxy']['port'])
proxy_key = config['Proxy']['key']
proxy = (proxy_server, proxy_port, proxy_key)
client = TelegramClient(username, api_id, api_hash,
connection=connection.ConnectionTcpMTProxyRandomizedIntermediate,
proxy=proxy)
# client = TelegramClient(username, api_id, api_hash)
client.start()
async def dump_all_participants(channel):
"""Записывает json-файл с информацией о всех участниках канала/чата"""
offset_user = 0 # номер участника, с которого начинается считывание
limit_user = 100 # максимальное число записей, передаваемых за один раз
all_participants = [] # список всех участников канала
filter_user = ChannelParticipantsSearch('')
while True:
participants = await client(GetParticipantsRequest(channel,
filter_user, offset_user, limit_user,
hash=0))
if not participants.users:
break
all_participants.extend(participants.users)
offset_user += len(participants.users)
all_users_details = [] # список словарей с интересующими параметрами участников канала
for participant in all_participants:
all_users_details.append({"id": participant.id,
"first_name": participant.first_name,
"last_name": participant.last_name,
"user": participant.username,
"phone": participant.phone,
"is_bot": participant.bot})
with open('channel_users.json', 'w', encoding='utf8') as outfile:
json.dump(all_users_details, outfile, ensure_ascii=False)
async def dump_all_messages(channel):
"""Записывает json-файл с информацией о всех сообщениях канала/чата"""
offset_msg = 0 # номер записи, с которой начинается считывание
limit_msg = 1 # максимальное число записей, передаваемых за один раз
all_messages = [] # список всех сообщений
total_messages = 0
total_count_limit = 1 # поменяйте это значение, если вам нужны не все сообщения
class DateTimeEncoder(json.JSONEncoder):
'''Класс для сериализации записи дат в JSON'''
while True:
history = await client(GetHistoryRequest(
peer=channel,
offset_id=offset_msg,
offset_date=None, add_offset=0,
limit=limit_msg, max_id=0, min_id=0,
hash=0))
if not history.messages:
break
messages = history.messages
for message in messages:
all_messages.append(message.to_dict())
offset_msg = messages[len(messages) - 1].id
total_messages = len(all_messages)
if total_count_limit != 0 and total_messages >= total_count_limit:
break
with open('channel_messages.json', 'w', encoding='utf8') as outfile:
json.dump(all_messages, outfile, ensure_ascii=False, cls=DateTimeEncoder)
with client:
client.loop.run_until_complete(main())
# self.channels = {"https://t.me/alexmakus": 3337,
# "https://t.me/CicadaHere": 5429,
# "https://t.me/spherechat": 191007,
# "https://t.me/spherechatflood": 455246} | [
11748,
4566,
48610,
198,
11748,
33918,
198,
198,
6738,
5735,
400,
261,
13,
27261,
1330,
50203,
11792,
198,
6738,
5735,
400,
261,
1330,
4637,
198,
198,
2,
12466,
112,
30143,
40623,
12466,
118,
15166,
21169,
21169,
16843,
31583,
20375,
2217... | 1.790109 | 2,487 |
__author__ = 'abhisheksh'
import json
import meetup.api
import pandas as pd
| [
834,
9800,
834,
796,
705,
397,
14363,
258,
50133,
6,
628,
198,
198,
11748,
33918,
198,
11748,
1826,
929,
13,
15042,
198,
11748,
220,
19798,
292,
355,
279,
67,
628,
628,
628,
198
] | 2.606061 | 33 |
"""
Code to extract a series of positions + their next moves from an SGF.
Most of the complexity here is dealing with two features of SGF:
- Stones can be added via "play move" or "add move", the latter being used
to configure L+D puzzles, but also for initial handicap placement.
- Plays don't necessarily alternate colors; they can be repeated B or W moves
This feature is used to handle free handicap placement.
"""
from collections import namedtuple
import numpy as np
import go
from go import Position
from utils import parse_sgf_coords as pc, unparse_sgf_coords as upc
import sgf
SGF_TEMPLATE = '''(;GM[1]FF[4]CA[UTF-8]AP[MuGo_sgfgenerator]RU[{ruleset}]
SZ[{boardsize}]KM[{komi}]PW[{white_name}]PB[{black_name}]RE[{result}]
{game_moves})'''
PROGRAM_IDENTIFIER = "MuGo"
def make_sgf(
move_history,
score,
ruleset="Chinese",
boardsize=19,
komi=7.5,
white_name=PROGRAM_IDENTIFIER,
black_name=PROGRAM_IDENTIFIER,
):
'''Turn a game into SGF.
Doesn't handle handicap games or positions with incomplete history.
'''
game_moves = ''.join(map(translate_sgf_move, move_history))
if score == 0:
result = 'Draw'
elif score > 0:
result = 'B+%s' % score
else:
result = 'W+%s' % -score
return SGF_TEMPLATE.format(**locals())
class PositionWithContext(namedtuple("SgfPosition", "position next_move metadata")):
'''
Wrapper around go.Position.
Stores a position, the move that came next, and the eventual result.
'''
def sgf_prop(value_list):
'Converts raw sgf library output to sensible value'
if value_list is None:
return None
if len(value_list) == 1:
return value_list[0]
else:
return value_list
def handle_node(pos, node):
'A node can either add B+W stones, play as B, or play as W.'
props = node.properties
black_stones_added = [pc(coords) for coords in props.get('AB', [])]
white_stones_added = [pc(coords) for coords in props.get('AW', [])]
if black_stones_added or white_stones_added:
return add_stones(pos, black_stones_added, white_stones_added)
# If B/W props are not present, then there is no move. But if it is present and equal to the empty string, then the move was a pass.
elif 'B' in props:
black_move = pc(props.get('B', [''])[0])
return pos.play_move(black_move, color=go.BLACK)
elif 'W' in props:
white_move = pc(props.get('W', [''])[0])
return pos.play_move(white_move, color=go.WHITE)
else:
return pos
def replay_sgf(sgf_contents):
'''
Wrapper for sgf files, exposing contents as position_w_context instances
with open(filename) as f:
for position_w_context in replay_sgf(f.read()):
print(position_w_context.position)
'''
collection = sgf.parse(sgf_contents)
game = collection.children[0]
props = game.root.properties
assert int(sgf_prop(props.get('GM', ['1']))) == 1, "Not a Go SGF!"
komi = 0
if props.get('KM') != None:
komi = float(sgf_prop(props.get('KM')))
metadata = GameMetadata(
result=sgf_prop(props.get('RE')),
handicap=int(sgf_prop(props.get('HA', [0]))),
board_size=int(sgf_prop(props.get('SZ'))))
go.set_board_size(metadata.board_size)
pos = Position(komi=komi)
current_node = game.root
while pos is not None and current_node is not None:
pos = handle_node(pos, current_node)
maybe_correct_next(pos, current_node.next)
next_move = get_next_move(current_node)
yield PositionWithContext(pos, next_move, metadata)
current_node = current_node.next
def replay_position(position):
'''
Wrapper for a go.Position which replays its history.
Assumes an empty start position! (i.e. no handicap, and history must be exhaustive.)
for position_w_context in replay_position(position):
print(position_w_context.position)
'''
assert position.n == len(position.recent), "Position history is incomplete"
metadata = GameMetadata(
result=position.result(),
handicap=0,
board_size=position.board.shape[0]
)
go.set_board_size(metadata.board_size)
pos = Position(komi=position.komi)
for player_move in position.recent:
color, next_move = player_move
yield PositionWithContext(pos, next_move, metadata)
pos = pos.play_move(next_move, color=color)
# return the original position, with unknown next move
yield PositionWithContext(pos, None, metadata)
| [
37811,
198,
10669,
284,
7925,
257,
2168,
286,
6116,
1343,
511,
1306,
6100,
422,
281,
311,
21713,
13,
198,
198,
6943,
286,
262,
13357,
994,
318,
7219,
351,
734,
3033,
286,
311,
21713,
25,
198,
12,
26596,
460,
307,
2087,
2884,
366,
17... | 2.519071 | 1,809 |
# -*- coding: utf-8 -*-
# Copyright 2017-2019 ControlScan, Inc.
#
# This file is part of Cyphon Engine.
#
# Cyphon Engine is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# Cyphon Engine is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cyphon Engine. If not, see <http://www.gnu.org/licenses/>.
"""
Defines forms for configurations.
"""
# third party
from django import forms
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
# these values should correspond to the field in ConfigToolForm
CONFIG_TEST_VALUE_FIELD = 'config_test_string'
CONFIG_TEST_BUTTON = 'config_test_button'
CONFIG_TEST_RESULT_FIELD = 'config_test_result'
CONFIG_TOOL_INPUTS = (
CONFIG_TEST_VALUE_FIELD,
CONFIG_TEST_BUTTON,
CONFIG_TEST_RESULT_FIELD,
)
class ConfigToolButtonWidget(forms.Widget):
"""
A form Widget for a "Run Test" button for a configuration test.
"""
template_name = 'config_test_button_widget.html'
class ConfigToolForm(forms.ModelForm):
"""
Defines a ModelForm with a tool for testing a configuration.
"""
config_test_string = forms.CharField(
widget=forms.Textarea,
required=False,
help_text='Enter a test string.',
label=''
)
config_test_button = forms.CharField(
widget=ConfigToolButtonWidget,
required=False,
label=''
)
config_test_result = forms.CharField(
widget=forms.Textarea(attrs={'readonly':'readonly'}),
required=False,
label='Test result'
)
def get_test_value(self):
"""
Returns the test value from the form.
"""
return self.cleaned_data[CONFIG_TEST_VALUE_FIELD]
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
15069,
2177,
12,
23344,
6779,
33351,
11,
3457,
13,
198,
2,
198,
2,
770,
2393,
318,
636,
286,
5934,
746,
261,
7117,
13,
198,
2,
198,
2,
5934,
746,
261,
7117,
318... | 2.821477 | 745 |
"""
BFS - Breadth First Search
1) In graphs - we may visit same node more than once
2) Use a boolean array
3) DefaultDict - Representation
"""
from collections import defaultdict
if __name__ == "__main__":
g = Graph()
g.add_edge(0, 1)
g.add_edge(0, 2)
g.add_edge(1, 2)
g.add_edge(2, 0)
g.add_edge(2, 3)
g.add_edge(3, 3)
print("Following is Breadth First Traversal"
" (starting from vertex 2)")
g.print_bfs_graph(2) | [
37811,
198,
33,
10652,
532,
28731,
400,
3274,
11140,
198,
16,
8,
554,
28770,
532,
356,
743,
3187,
976,
10139,
517,
621,
1752,
198,
17,
8,
5765,
257,
25131,
7177,
198,
18,
8,
15161,
35,
713,
532,
10858,
341,
198,
37811,
198,
198,
6... | 2.31 | 200 |
from abc import abstractmethod
import io | [
6738,
450,
66,
1330,
12531,
24396,
198,
11748,
33245,
220
] | 4.1 | 10 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
''' FILE:
AUTHOR: jan
'''
import re
import xml.etree.ElementTree as ET
from functools import cached_property
if __name__=='__main__':
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
7061,
6,
45811,
25,
220,
198,
220,
220,
220,
44746,
25,
42897,
198,
7061,
6,
198,
198,
11748,
302,
198,
11... | 2.344828 | 87 |
import os
from medperf.ui import UI
from medperf.comms import Comms
from medperf.entities import Benchmark, Cube, Registration
from medperf.utils import (
check_cube_validity,
generate_tmp_datapath,
init_storage,
cleanup,
pretty_error,
)
| [
11748,
28686,
198,
198,
6738,
1117,
525,
69,
13,
9019,
1330,
12454,
198,
6738,
1117,
525,
69,
13,
785,
907,
1330,
955,
907,
198,
6738,
1117,
525,
69,
13,
298,
871,
1330,
25187,
4102,
11,
23315,
11,
24610,
198,
6738,
1117,
525,
69,
... | 2.736842 | 95 |
def export(w2v, ws_child, out_dir):
"""
Exports the word-to-vec file to tsv files as input for tensorflow.
:param w2v: word-to-vec file
:param ws_child: tree file
:param out_dir: output directory for the files to generate
"""
if not out_dir.endswith('/'):
out_dir = out_dir+'/'
tmeta = out_dir+'meta.tsv'
tdata = out_dir+'data.tsv'
words = set()
tsv = {}
# read data into memory
with open(ws_child, 'r') as ws, open(w2v, 'r') as vecs:
for line in ws:
wsenses = [elem.split('.')[0] for elem in line.split()]
words.update(wsenses)
for line in vecs:
word, vec = line.split()[0], "\t".join(line.split()[1:])
if word in words:
tsv[word] = vec
# write to file
with open(tdata, 'w+') as data, open(tmeta, 'w+') as meta:
for key in tsv:
tvec = tsv[key]
data.write(tvec+'\n')
meta.write(key+'\n') | [
198,
4299,
10784,
7,
86,
17,
85,
11,
266,
82,
62,
9410,
11,
503,
62,
15908,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1475,
3742,
262,
1573,
12,
1462,
12,
35138,
2393,
284,
256,
21370,
3696,
355,
5128,
329,
11192,
273,
... | 1.983871 | 496 |
from __future__ import print_function
import os
import re
import sys
import json
import errno
import plistlib
import datetime
import sublime
from .colors import names_to_hex, xterm_to_hex, xterm8_to_hex, xterm8b_to_hex, xterm8f_to_hex
DEFAULT_COLOR_SCHEME = 'Monokai.sublime-color-scheme'
all_names_to_hex = dict(names_to_hex, **xterm_to_hex)
log = Log()
if sys.version_info[0] == 3:
if not hasattr(plistlib, 'loads'):
plistlib.loads = lambda data: plistlib.readPlistFromBytes(data)
plistlib.dumps = lambda value: plistlib.writePlistToBytes(value)
else:
plistlib.loads = lambda data: plistlib.readPlistFromString(data)
plistlib.dumps = lambda value: plistlib.writePlistToString(value)
| [
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
198,
11748,
28686,
198,
11748,
302,
198,
11748,
25064,
198,
11748,
33918,
198,
11748,
11454,
3919,
198,
11748,
458,
396,
8019,
198,
11748,
4818,
8079,
198,
198,
11748,
41674,
198,
198,
... | 2.529617 | 287 |
from . import diamond_c
| [
6738,
764,
1330,
15291,
62,
66,
198
] | 3.428571 | 7 |
import sys
import random
import pygame
if __name__ == "__main__":
# Initilize PyGame
pygame.init()
# Set screen size
SCREEN_SIZE = pygame.display.Info().current_w, pygame.display.Info().current_h
# Object dimensions
PADDLE1_WIDTH = SCREEN_SIZE[1]//10
PADDLE1_HEIGHT = SCREEN_SIZE[0]//100
PADDLE2_WIDTH = SCREEN_SIZE[1]//10
PADDLE2_HEIGHT = SCREEN_SIZE[0]//100
BALL_DIAMETER = PADDLE1_WIDTH//2
MAX_PADDLE1_Y = SCREEN_SIZE[1] - PADDLE1_WIDTH
MAX_PADDLE2_Y = SCREEN_SIZE[1] - PADDLE2_WIDTH
MAX_BALL_Y = SCREEN_SIZE[1] - BALL_DIAMETER
# Paddle X coordinate
PADDLE2_X = SCREEN_SIZE[0] - PADDLE2_HEIGHT - (BALL_DIAMETER*3)//2
PADDLE1_X = (BALL_DIAMETER*3)//2
# Color constants
BLACK = (0,0,0)
WHITE = (255,255,255)
DARK_GREEN = (0,100,0)
FIREBRICK = (178,34,34)
MEDIUM_PURPLE = (147,112,219)
# Game states
STATE_PLAYING = 0
PLAYER2_WINS = 1
PLAYER1_WINS = 2
Pong().run()
| [
11748,
25064,
198,
11748,
4738,
198,
198,
11748,
12972,
6057,
628,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
628,
220,
220,
220,
1303,
44707,
346,
1096,
9485,
8777,
198,
220,
220,
220,
12972,
6057,
13,
15003,
... | 2.018443 | 488 |
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
# Implement the manage_addProduct method of object managers
import sys
import types
from AccessControl.class_init import InitializeClass
from AccessControl.owner import UnownableOwner
from AccessControl.SecurityInfo import ClassSecurityInfo
from AccessControl.PermissionMapping import aqwrap
from Acquisition import Acquired
from Acquisition import aq_base
from Acquisition import Implicit
from ExtensionClass import Base
from zExceptions import Redirect
from OFS.metaconfigure import get_registered_packages
def _product_packages():
"""Returns all product packages including the regularly defined
zope2 packages and those without the Products namespace package.
"""
import Products
_packages = {}
for x in dir(Products):
m = getattr(Products, x)
if isinstance(m, types.ModuleType):
_packages[x] = m
for m in get_registered_packages():
_packages[m.__name__] = m
return _packages
class Product(Base):
"""Model a non-persistent product wrapper.
"""
security = ClassSecurityInfo()
meta_type = 'Product'
version = ''
thisIsAnInstalledProduct = True
title = 'This is a non-persistent product wrapper.'
security.declarePublic('Destination')
def Destination(self):
"Return the destination for factory output"
return self
InitializeClass(Product)
class ProductDispatcher(Implicit):
" "
# Allow access to factory dispatchers
__allow_access_to_unprotected_subobjects__ = 1
class FactoryDispatcher(Implicit):
"""Provide a namespace for product "methods"
"""
security = ClassSecurityInfo()
_owner = UnownableOwner
security.declarePublic('Destination')
def Destination(self):
"Return the destination for factory output"
return self.__dict__['_d'] # we don't want to wrap the result!
security.declarePublic('this')
this = Destination
security.declarePublic('DestinationURL')
def DestinationURL(self):
"Return the URL for the destination for factory output"
url = getattr(self, '_u', None)
if url is None:
url = self.Destination().absolute_url()
return url
# Provide acquired indicators for critical OM methods:
_setObject = _getOb = Acquired
# Make sure factory methods are unowned:
_owner = UnownableOwner
# Provide a replacement for manage_main that does a redirection:
def manage_main(trueself, self, REQUEST, update_menu=0):
"""Implement a contents view by redirecting to the true view
"""
REQUEST.RESPONSE.redirect(self.DestinationURL() + '/manage_main')
InitializeClass(FactoryDispatcher)
| [
29113,
29113,
7804,
4242,
2235,
198,
2,
198,
2,
15069,
357,
66,
8,
6244,
1168,
3008,
5693,
290,
25767,
669,
13,
198,
2,
198,
2,
770,
3788,
318,
2426,
284,
262,
8617,
286,
262,
1168,
3008,
5094,
13789,
11,
198,
2,
10628,
362,
13,
... | 3.228739 | 1,023 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from datetime import date
from .templates import template_container_xml
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
11748,
28686,
198,
6738,
4818,
8079,
1330,
3128,
198,
198,
6738,
764,
11498,
17041,
1330,
11055,
62,
... | 3.125 | 48 |
#!/usr/bin/env python3
import sys
src = open('../blobs/bl602_demo_wifi.ghidra.c').readlines()
feature = 'DAT_44c0'
if len(sys.argv) == 2:
feature = sys.argv[1]
outs = []
for i in sys.stdin.readlines():
fname, begin, end = i.strip().split(' ')
begin = int(begin) - 1
end = int(end)
slocal = src[begin:end]
found = False
for i in slocal:
if i.find(feature) != -1:
found = True
break
if found:
outs.extend(slocal)
print(''.join(outs)) | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
11748,
25064,
198,
10677,
796,
1280,
10786,
40720,
2436,
8158,
14,
2436,
31418,
62,
9536,
78,
62,
86,
22238,
13,
456,
312,
430,
13,
66,
27691,
961,
6615,
3419,
198,
198,
30053,
7... | 2.082305 | 243 |
from datetime import date
data_atual = date.today()#guarda a dada atual
#print(data_atual.strftime('%d/%m/%y'))#converte a data em uma str que o programador quiser
data_atual_str = data_atual.strftime('%A %B %Y')
print(type(data_atual))
print(data_atual_str)
print(type(data_atual_str)) | [
6738,
4818,
8079,
1330,
3128,
198,
198,
7890,
62,
265,
723,
796,
3128,
13,
40838,
3419,
2,
14864,
64,
257,
288,
4763,
379,
723,
198,
2,
4798,
7,
7890,
62,
265,
723,
13,
2536,
31387,
10786,
4,
67,
14,
4,
76,
14,
4,
88,
6,
4008,... | 2.452991 | 117 |
import pytest
from profile import get_profile
| [
11748,
12972,
9288,
198,
198,
6738,
7034,
1330,
651,
62,
13317,
628,
628,
628,
628,
628
] | 3.5 | 16 |
#mintbot.py
#Automatically queries Intuit Mint for display on 5in Raspberry Pi Screen
#Copyright A. Thornton 2021
import tkinter as tk
import pandas as pd
import os
import mintapi
import time
import matplotlib.pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import numpy as np
if __name__ == '__main__':
#First call to Mint API
Net_Worth = try_mint()
Running_nw = Net_Worth*np.ones(60) #show last month, updates 2x a day, init all to current NW
#Main loop
while 1:
print(Running_nw[-1])
w = GUI(Running_nw) #Init new GUI
Running_nw = w.Running_nw #Save updated Net Worth history
#Run GUI until reset every 12 hours
w.tk.mainloop()
#Clear Matplotlib Buffer
plt.close(1)
#Update running Net Worth table
#Pop oldest value, Push fresh value
Running_nw[:-1] = Running_nw[1:]
Running_nw[-1] = try_mint()
| [
2,
34289,
13645,
13,
9078,
201,
198,
2,
38062,
4142,
20743,
2558,
5013,
20873,
329,
3359,
319,
642,
259,
24244,
13993,
15216,
201,
198,
2,
15269,
317,
13,
37519,
33448,
201,
198,
201,
198,
11748,
256,
74,
3849,
355,
256,
74,
201,
19... | 2.391198 | 409 |
from abc import ABCMeta, abstractmethod
from dataclasses import dataclass, field
from typing import AbstractSet, Iterable, Iterator, List, Optional, Protocol, Set, Tuple
EMPTY_SET: AbstractSet[str] = frozenset()
@dataclass(frozen=True)
@dataclass
@dataclass
| [
6738,
450,
66,
1330,
9738,
48526,
11,
12531,
24396,
198,
6738,
4818,
330,
28958,
1330,
4818,
330,
31172,
11,
2214,
198,
6738,
19720,
1330,
27741,
7248,
11,
40806,
540,
11,
40806,
1352,
11,
7343,
11,
32233,
11,
20497,
11,
5345,
11,
309... | 3.116279 | 86 |
from masonite.request import Request
from masonite.view import View
from masonite.auth.Csrf import Csrf
from masonite.app import App
from masonite.middleware import CsrfMiddleware
from masonite.testsuite.TestSuite import generate_wsgi
import pytest
from masonite.exceptions import InvalidCSRFToken
| [
6738,
285,
888,
578,
13,
25927,
1330,
19390,
198,
6738,
285,
888,
578,
13,
1177,
1330,
3582,
198,
6738,
285,
888,
578,
13,
18439,
13,
34,
27891,
69,
1330,
327,
27891,
69,
198,
6738,
285,
888,
578,
13,
1324,
1330,
2034,
198,
6738,
... | 3.25 | 92 |
from __future__ import print_function, unicode_literals
import json
from unittest import mock
from unittest.mock import PropertyMock
import os
import shutil
from django.contrib.auth.models import User
from django.core import mail
from django.test import Client
from django.test import override_settings
from os.path import dirname
from rest_framework import status
from rest_framework.test import APITestCase
from archive_api.models import DataSetDownloadLog, DataSet
from ngt_archive import settings
# Mock methods
def get_max_size(size):
""" Return a get_size method for the size given"""
return get_size()
@override_settings(EMAIL_NGEET_TEAM='ngeet-team@testserver',
EMAIL_SUBJECT_PREFIX='[ngt-archive-test]')
| [
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
11,
28000,
1098,
62,
17201,
874,
198,
198,
11748,
33918,
198,
6738,
555,
715,
395,
1330,
15290,
198,
6738,
555,
715,
395,
13,
76,
735,
1330,
14161,
44,
735,
198,
198,
11748,
28686,
198,
... | 3.056911 | 246 |
import numpy as np
import sys
from operator import mul
class point:
"""A simple point class, initialize with coordinates xyz"""
xyz = np.zeros(3,np.double)
class unitCell:
"""
The unitCell is the building block of a lattice. To initialize
the kind of the unit cell needs to be defined. The choices are
cub (stands for cubic) / bcc / fcc
"""
kind = '' # stores the unitCell kind cub/bcc/fcc
p = [] # stores the coordinates of the unitCell (edge is always 1.0)
NNodes = 0 # Nodes per unit cel
def __call__(self, iNode):
"""
Calling a unitCell requires a Node index and returns
the corresponding coords
"""
return self.p[iNode].xyz
class lattice:
"""
A lattice is initialized using the desired kind (cub/bcc/fcc),
a vector with the edge length of the unitary cell and the
minimum number of nodes in the lattice.
Note that the lattice will always be cubic in node numbers, but
it can be trapezoidal by using different unitCell edges in each
direction.
"""
kind = ""
NCells = 0
NCells_1D = 0
NNodes = 0
uCell = 0
edge = 0.0
scaleUC = 0.0
| [
11748,
299,
32152,
355,
45941,
198,
11748,
25064,
198,
198,
6738,
10088,
1330,
35971,
198,
198,
4871,
966,
25,
198,
197,
37811,
32,
2829,
966,
1398,
11,
41216,
351,
22715,
2124,
45579,
37811,
198,
197,
5431,
89,
796,
45941,
13,
9107,
... | 2.816121 | 397 |
import numpy as np
def euclideanDistance(src, targ):
'''
Calculates euclidean distance between two arrays
'''
try:
return np.linalg.norm(a-b)
except:
return "Error while calculating the euclidean distances for the above data types"
def isStringPalindrome(data):
'''
Checks if input string or corresponding string from input is a palindrome
'''
try:
# Convert input type to string
data = str(data)
return True if data==data[::-1] else False
except:
print("Error while processing, please check the input is correct")
def isPrime(n):
'''
Checks if a given number is prime or not
Source: GeeksforGeeks
'''
try:
n = int(n)
return all([(n % j) for j in range(2, int(n**0.5)+1)]) and n>1
except:
print("Error Occured While Checking for prime, please check the input is correct")
return False
| [
11748,
299,
32152,
355,
45941,
198,
198,
4299,
304,
36616,
485,
272,
45767,
7,
10677,
11,
2103,
2599,
198,
220,
220,
220,
705,
7061,
198,
220,
220,
220,
220,
220,
220,
220,
27131,
689,
304,
36616,
485,
272,
5253,
1022,
734,
26515,
1... | 2.457801 | 391 |
#
# PySNMP MIB module ALCATEL-IND1-SYSTEM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ALCATEL-IND1-SYSTEM-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:18:27 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
hardentIND1System, = mibBuilder.importSymbols("ALCATEL-IND1-BASE", "hardentIND1System")
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "SingleValueConstraint")
Ipv6Address, = mibBuilder.importSymbols("IPV6-TC", "Ipv6Address")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
Integer32, iso, Counter32, MibIdentifier, IpAddress, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Counter64, TimeTicks, Unsigned32, Gauge32, NotificationType, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "iso", "Counter32", "MibIdentifier", "IpAddress", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Counter64", "TimeTicks", "Unsigned32", "Gauge32", "NotificationType", "ObjectIdentity")
TextualConvention, DisplayString, TruthValue, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue", "RowStatus")
alcatelIND1SystemMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1))
alcatelIND1SystemMIB.setRevisions(('2011-01-23 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: alcatelIND1SystemMIB.setRevisionsDescriptions(('The latest version of this MIB Module.',))
if mibBuilder.loadTexts: alcatelIND1SystemMIB.setLastUpdated('200709040000Z')
if mibBuilder.loadTexts: alcatelIND1SystemMIB.setOrganization('Alcatel-Lucent')
if mibBuilder.loadTexts: alcatelIND1SystemMIB.setContactInfo('Please consult with Customer Service to ensure the most appropriate version of this document is used with the products in question: Alcatel-Lucent, Enterprise Solutions Division (Formerly Alcatel Internetworking, Incorporated) 26801 West Agoura Road Telephone: North America +1 800 995 2696 Latin America +1 877 919 9526 Europe +31 23 556 0100 Asia +65 394 7933 All Other +1 818 878 4507 Electronic Mail: support@ind.alcatel.com World Wide Web: http://alcatel-lucent.com/wps/portal/enterprise File Transfer Protocol: ftp://ftp.ind.alcatel.com/pub/products/mibs')
if mibBuilder.loadTexts: alcatelIND1SystemMIB.setDescription('This module describes an authoritative enterprise-specific Simple Network Management Protocol (SNMP) Management Information Base (MIB): For the Birds Of Prey Product Line Proprietary System Subsystem. No liability shall be assumed for any incidental, indirect, special, or consequential damages whatsoever arising from or related to this document or the information contained herein. Vendors, end-users, and other interested parties are granted non-exclusive license to use this specification in connection with management of the products for which it is intended to be used. Copyright (C) 1995-2007 Alcatel-Lucent ALL RIGHTS RESERVED WORLDWIDE')
alcatelIND1SystemMIBObjects = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1))
if mibBuilder.loadTexts: alcatelIND1SystemMIBObjects.setStatus('current')
if mibBuilder.loadTexts: alcatelIND1SystemMIBObjects.setDescription('Branch For BOP Proprietary System Subsystem Managed Objects.')
alcatelIND1SystemMIBConformance = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2))
if mibBuilder.loadTexts: alcatelIND1SystemMIBConformance.setStatus('current')
if mibBuilder.loadTexts: alcatelIND1SystemMIBConformance.setDescription('Branch For Alcatel IND BOP Proprietary System Subsystem Conformance Information.')
alcatelIND1SystemMIBGroups = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2, 1))
if mibBuilder.loadTexts: alcatelIND1SystemMIBGroups.setStatus('current')
if mibBuilder.loadTexts: alcatelIND1SystemMIBGroups.setDescription('Branch For Alcatel IND BOP Proprietary System Subsystem Units Of Conformance.')
alcatelIND1SystemMIBCompliances = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2, 2))
if mibBuilder.loadTexts: alcatelIND1SystemMIBCompliances.setStatus('current')
if mibBuilder.loadTexts: alcatelIND1SystemMIBCompliances.setDescription('Branch For Alcatel IND BOP Proprietary System Subsystem Compliance Statements.')
systemMicrocode = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1))
systemBootParams = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 2))
systemHardware = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3))
systemFileSystem = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4))
systemServices = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5))
systemSwitchLogging = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6))
systemDNS = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 7))
systemMicrocodePackageTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1, 1), )
if mibBuilder.loadTexts: systemMicrocodePackageTable.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodePackageTable.setDescription('the microcode package table')
systemMicrocodePackageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1, 1, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemMicrocodePackageDirectoryIndex"), (0, "ALCATEL-IND1-SYSTEM-MIB", "systemMicrocodePackageDirectory"), (0, "ALCATEL-IND1-SYSTEM-MIB", "systemMicrocodePackageIndex"))
if mibBuilder.loadTexts: systemMicrocodePackageEntry.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodePackageEntry.setDescription('a row in the microcode package table')
systemMicrocodePackageDirectoryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1), Unsigned32())
if mibBuilder.loadTexts: systemMicrocodePackageDirectoryIndex.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodePackageDirectoryIndex.setDescription('The package index for directories. Values 1-4 are pre-defined as: loaded(1), certified(2), working(3), issu(4). Values 5+ are user working directories in /flash assigned by the system. Values 5+ remain associated with a directory until reboot, takeover, the directory no longer contains valid images or the directory no longer exists.')
systemMicrocodePackageDirectory = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 108)))
if mibBuilder.loadTexts: systemMicrocodePackageDirectory.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodePackageDirectory.setDescription('The directory name under flash where the microcode package is found. Directories with names longer than 108 characters will be skipped.')
systemMicrocodePackageIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1, 1, 1, 3), Unsigned32())
if mibBuilder.loadTexts: systemMicrocodePackageIndex.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodePackageIndex.setDescription('The index to the package sub table in the microcode table')
systemMicrocodePackageVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1, 1, 1, 4), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemMicrocodePackageVersion.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodePackageVersion.setDescription('The version of the microcode package (Fos.img, Fbase.img, etc.)')
systemMicrocodePackageName = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1, 1, 1, 5), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemMicrocodePackageName.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodePackageName.setDescription('The name of the microcode package')
systemMicrocodePackageDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1, 1, 1, 6), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemMicrocodePackageDescription.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodePackageDescription.setDescription('The description of the microcode package')
systemMicrocodePackageStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("undefined", 1), ("ok", 2), ("inuse", 3))).clone('undefined')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemMicrocodePackageStatus.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodePackageStatus.setDescription('The status of the microcode package')
systemMicrocodePackageSize = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 1, 1, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemMicrocodePackageSize.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodePackageSize.setDescription('The size of the microcode package')
systemBootNetwork = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 2, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemBootNetwork.setStatus('current')
if mibBuilder.loadTexts: systemBootNetwork.setDescription('this object is the base IP address of the EMP for this CMM')
systemBootNetworkGateway = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 2, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemBootNetworkGateway.setStatus('current')
if mibBuilder.loadTexts: systemBootNetworkGateway.setDescription('this object is the gateway of the EMP for this CMM')
systemBootNetworkNetmask = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 2, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemBootNetworkNetmask.setStatus('current')
if mibBuilder.loadTexts: systemBootNetworkNetmask.setDescription('This is the Netmask of the EMP that is used when this CMM boots.')
systemHardwareFlashMfg = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 7, 8, 9, 10, 13))).clone(namedValues=NamedValues(("other", 1), ("amd", 2), ("intel", 3), ("atmel", 4), ("toshiba", 7), ("sandisk", 8), ("sst", 9), ("spansion", 10), ("wintec", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareFlashMfg.setStatus('current')
if mibBuilder.loadTexts: systemHardwareFlashMfg.setDescription('This object identifies the manufacturer of the Flash memory used on this product. toshiba(7) is for hawk only. The reason 7 is used is because 5 is already used for micron and 6 is for kingston. toshiba, sandisk, and sst are compact flashes for the hawk only.')
systemHardwareFlashSize = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareFlashSize.setStatus('current')
if mibBuilder.loadTexts: systemHardwareFlashSize.setDescription('This object identifies the size of the flash memory available on this CMM. It is the total flash hardware memory and does not indicate how much of the memory is free, used, etc.')
systemHardwareMemoryMfg = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 10, 11, 12))).clone(namedValues=NamedValues(("other", 1), ("amd", 2), ("intel", 3), ("atmel", 4), ("micron", 5), ("kingston", 6), ("dataram", 10), ("interward", 11), ("notreadable", 12)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareMemoryMfg.setStatus('current')
if mibBuilder.loadTexts: systemHardwareMemoryMfg.setDescription('This object identifies the manufacturer of the RAM memory used on this CMM.')
systemHardwareMemorySize = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareMemorySize.setStatus('current')
if mibBuilder.loadTexts: systemHardwareMemorySize.setDescription('This object identifies the size of the RAM memory available on this CMM. It is the total RAM hardware memory and does not indicate how much of the memory is free, used, etc.')
systemHardwareNVRAMBatteryLow = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 5), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareNVRAMBatteryLow.setStatus('current')
if mibBuilder.loadTexts: systemHardwareNVRAMBatteryLow.setDescription('This object identifies the whether the NVRAM battery is low. true(1), false(2)')
systemHardwareBootCpuType = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("other", 1), ("sparc380", 2), ("sparcV9", 3), ("ppc", 4), ("ppc8540", 5), ("ppc8572", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareBootCpuType.setStatus('current')
if mibBuilder.loadTexts: systemHardwareBootCpuType.setDescription('Indicates the manufacturer and model number of the CPU. Currently, only two types of processors are enumerated.')
systemHardwareJumperInterruptBoot = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 7), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareJumperInterruptBoot.setStatus('current')
if mibBuilder.loadTexts: systemHardwareJumperInterruptBoot.setDescription('This object identifies whether the Interrupt Boot Jumper on this CMM is set: True(1), False(2)')
systemHardwareJumperForceUartDefaults = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 8), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareJumperForceUartDefaults.setStatus('current')
if mibBuilder.loadTexts: systemHardwareJumperForceUartDefaults.setDescription('This object identifies whether the Force Uart Defaults Jumper on this CMM is set: True(1), False(2)')
systemHardwareJumperRunExtendedMemoryDiagnostics = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 9), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareJumperRunExtendedMemoryDiagnostics.setStatus('current')
if mibBuilder.loadTexts: systemHardwareJumperRunExtendedMemoryDiagnostics.setDescription('This object identifies whether the Run Extended Memory Diagnostics Jumper on this CMM is set: True(1), False(2)')
systemHardwareJumperSpare = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 10), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareJumperSpare.setStatus('current')
if mibBuilder.loadTexts: systemHardwareJumperSpare.setDescription('This object identifies whether the Spare Jumper on this CMM is set: True(1), False(2)')
systemHardwareFpgaVersionTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 11), )
if mibBuilder.loadTexts: systemHardwareFpgaVersionTable.setStatus('current')
if mibBuilder.loadTexts: systemHardwareFpgaVersionTable.setDescription('This table contains the FPGA version for each FPGA on this CMM')
systemHardwareFpgaVersionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 11, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemHardwareFpgaVersionIndex"))
if mibBuilder.loadTexts: systemHardwareFpgaVersionEntry.setStatus('current')
if mibBuilder.loadTexts: systemHardwareFpgaVersionEntry.setDescription('a row in the systemHardwareFpgaVersionTable')
systemHardwareFpgaVersionIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 11, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 3))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareFpgaVersionIndex.setStatus('current')
if mibBuilder.loadTexts: systemHardwareFpgaVersionIndex.setDescription('This is the index to one of the FPGA versions on this CMM')
systemHardwareFpgaVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 11, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareFpgaVersion.setStatus('current')
if mibBuilder.loadTexts: systemHardwareFpgaVersion.setDescription('This is the major version of one of the FPGA devices on this CMM. refer to the systemHardwareMinorFpgaVersion for the minor number.')
systemHardwareBootRomVersion = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 12), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareBootRomVersion.setStatus('current')
if mibBuilder.loadTexts: systemHardwareBootRomVersion.setDescription('A string that identifies the boot rom version')
systemHardwareBackupMiniBootVersion = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 13), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareBackupMiniBootVersion.setStatus('current')
if mibBuilder.loadTexts: systemHardwareBackupMiniBootVersion.setDescription('A string that identifies the backup miniboot version.')
systemHardwareDefaultMiniBootVersion = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 14), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareDefaultMiniBootVersion.setStatus('current')
if mibBuilder.loadTexts: systemHardwareDefaultMiniBootVersion.setDescription('A string that identifies the default miniboot version.')
systemHardwareMinorFpgaVersion = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 15), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareMinorFpgaVersion.setStatus('current')
if mibBuilder.loadTexts: systemHardwareMinorFpgaVersion.setDescription('A string that identifies the minor FPGA version. Refer to the systemHardwareFpgaVersion for the major FPGA version number.')
systemHardwareCpldVersion = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 16), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareCpldVersion.setStatus('current')
if mibBuilder.loadTexts: systemHardwareCpldVersion.setDescription('A string that identifies the CPLD version.')
systemHardwareUbootVersion = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 17), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareUbootVersion.setStatus('current')
if mibBuilder.loadTexts: systemHardwareUbootVersion.setDescription('A string that identifies the Uboot version.')
systemHardwareProdRegId = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 18), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareProdRegId.setStatus('current')
if mibBuilder.loadTexts: systemHardwareProdRegId.setDescription('A string that identifies the product ID register')
systemHardwareRevisionRegister = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 19), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareRevisionRegister.setStatus('current')
if mibBuilder.loadTexts: systemHardwareRevisionRegister.setDescription('A string that identifies the hardware revision register')
systemHardwareXfpId = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 20), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareXfpId.setStatus('current')
if mibBuilder.loadTexts: systemHardwareXfpId.setDescription('A string that identifies the XFP ID')
systemHardwareUbootMinibootVersion = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 3, 21), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemHardwareUbootMinibootVersion.setStatus('current')
if mibBuilder.loadTexts: systemHardwareUbootMinibootVersion.setDescription('A string that identifies the Uboot-miniboot version.')
systemServicesDate = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesDate.setStatus('current')
if mibBuilder.loadTexts: systemServicesDate.setDescription('This object contains the current System Date in the following format: MM/DD/YYYY')
systemServicesTime = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTime.setStatus('current')
if mibBuilder.loadTexts: systemServicesTime.setDescription('This object contains the current System Time in the following format: HH:MM:SS')
systemServicesTimezone = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 3), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTimezone.setStatus('current')
if mibBuilder.loadTexts: systemServicesTimezone.setDescription('This object contains the current Hour Offset from UTC in the following format: -13:00 to +12:00 OR a well known timezone (PST,CST,etc.)')
systemServicesTimezoneStartWeek = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 4), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTimezoneStartWeek.setStatus('current')
if mibBuilder.loadTexts: systemServicesTimezoneStartWeek.setDescription('first, second, third, fourth, fifth, last = 1,2,3,4,5,6')
systemServicesTimezoneStartDay = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 5), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTimezoneStartDay.setStatus('current')
if mibBuilder.loadTexts: systemServicesTimezoneStartDay.setDescription('Sunday, Monday, Tues... = 1,2,3,4,5,6,7')
systemServicesTimezoneStartMonth = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 6), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTimezoneStartMonth.setStatus('current')
if mibBuilder.loadTexts: systemServicesTimezoneStartMonth.setDescription('January, February, march... = 1,2,3,4,5,67,8,9,10,11,12')
systemServicesTimezoneStartTime = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 7), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTimezoneStartTime.setStatus('current')
if mibBuilder.loadTexts: systemServicesTimezoneStartTime.setDescription('2:00, 3:00, 4:00... = 200, 300, 400, etc.')
systemServicesTimezoneOffset = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 8), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTimezoneOffset.setStatus('current')
if mibBuilder.loadTexts: systemServicesTimezoneOffset.setDescription('60 minutes = 60')
systemServicesTimezoneEndWeek = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 9), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTimezoneEndWeek.setStatus('current')
if mibBuilder.loadTexts: systemServicesTimezoneEndWeek.setDescription('first, second, third, fourth, fifth, last = 1,2,3,4,5,6')
systemServicesTimezoneEndDay = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 10), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTimezoneEndDay.setStatus('current')
if mibBuilder.loadTexts: systemServicesTimezoneEndDay.setDescription('Sunday, Monday, Tues... = 1,2,3,4,5,6,7')
systemServicesTimezoneEndMonth = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 11), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTimezoneEndMonth.setStatus('current')
if mibBuilder.loadTexts: systemServicesTimezoneEndMonth.setDescription('January, February, march... = 1,2,3,4,5,6,7,8,9,10,11,12')
systemServicesTimezoneEndTime = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 12), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesTimezoneEndTime.setStatus('current')
if mibBuilder.loadTexts: systemServicesTimezoneEndTime.setDescription('2:00, 3:00, 4:00... = 200, 300, 400, etc.')
systemServicesEnableDST = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 13), Enable().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesEnableDST.setStatus('current')
if mibBuilder.loadTexts: systemServicesEnableDST.setDescription('This object enables and disables the DST.')
systemServicesWorkingDirectory = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 14), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255)).clone('/flash')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesWorkingDirectory.setStatus('current')
if mibBuilder.loadTexts: systemServicesWorkingDirectory.setDescription('This object contains the current file system working directory for this CMM. For example, /flash/certified')
systemServicesArg1 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 15), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesArg1.setStatus('current')
if mibBuilder.loadTexts: systemServicesArg1.setDescription('the 1st argument for system services action routines')
systemServicesArg2 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 16), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesArg2.setStatus('current')
if mibBuilder.loadTexts: systemServicesArg2.setDescription('the 2nd argument for system services action routines')
systemServicesArg3 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 17), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesArg3.setStatus('current')
if mibBuilder.loadTexts: systemServicesArg3.setDescription('the 3rd argument for system services action routines')
systemServicesArg4 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 18), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesArg4.setStatus('current')
if mibBuilder.loadTexts: systemServicesArg4.setDescription('the 4th argument for system services action routines')
systemServicesArg5 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 19), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesArg5.setStatus('current')
if mibBuilder.loadTexts: systemServicesArg5.setDescription('the 5th argument for system services action routines')
systemServicesArg6 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 20), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesArg6.setStatus('current')
if mibBuilder.loadTexts: systemServicesArg6.setDescription('the 6th argument for system services action routines')
systemServicesArg7 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 21), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesArg7.setStatus('current')
if mibBuilder.loadTexts: systemServicesArg7.setDescription('the 7th argument for system services action routines')
systemServicesArg8 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 22), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesArg8.setStatus('current')
if mibBuilder.loadTexts: systemServicesArg8.setDescription('the 8th argument for system services action routines')
systemServicesArg9 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 23), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesArg9.setStatus('current')
if mibBuilder.loadTexts: systemServicesArg9.setDescription('the 9th argument for system services action routines')
systemServicesAction = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49))).clone(namedValues=NamedValues(("noaction", 0), ("mkdir", 1), ("rmdir", 2), ("mv", 3), ("rm", 4), ("rmr", 5), ("cp", 6), ("cpr", 7), ("chmodpw", 8), ("chmodmw", 9), ("fsck", 10), ("ftp", 11), ("rz", 12), ("vi", 13), ("telnet", 14), ("install", 15), ("ed", 16), ("more", 17), ("newfs", 18), ("dshell", 19), ("view", 20), ("modbootparams", 21), ("filedir", 22), ("ssh", 23), ("sftp", 24), ("debugPmdNi", 25), ("bootrom", 26), ("defaultminiboot", 27), ("backupminiboot", 28), ("fpgacmm", 29), ("ubootcmm", 30), ("ubootni", 31), ("scp", 32), ("aclman", 33), ("ubootMinibootAllSlots", 34), ("miniboot", 35), ("upgradeLicence", 36), ("restoreLicence", 37), ("updateDSineXtroller", 38), ("ftp6", 39), ("telnet6", 40), ("ssh6", 41), ("sftp6", 42), ("mount", 43), ("unmount", 44), ("backup", 45), ("restore", 46), ("tftp", 47), ("fscollect", 48), ("fpgani", 49)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesAction.setStatus('current')
if mibBuilder.loadTexts: systemServicesAction.setDescription('This object identifies which of the above Actions is to be performed by the System Services Application. Most Actions require one or more parameters be set before the Action is executed. systemServicesAction - 26 for bootrom, 27 for default miniboot, and 28 for backup miniboot use systemServicesArg1 for name of the file scp- the first 2 arguments are going to be used. Set arg 1 with the source and the arg 2 with the destination file(s). E.g. scp LocalDir/testfileX.txt admin@10.255.11.1:RemoteDir/testfileX.txt For mount and umount, systemServicesArg1 specifies the mount point (such as /uflash). For newfs, systemServicesArg1 contains the name of the file-system (/uflash or /flash) which will be created. On /uflash, a FAT16 file system is created with long name naming conventions. For fsck, systemServicesArg1 contains the name of the file-system (/uflash or /flash) which will be checked and systemServicesArg2 will contain the string repair or no-repair depending on if autocorrection is requested. To see all file systems currently mounted, refer to the systemFileSystemTable. For backup and restore, systemServicesArg1 contains the archive name and systemServicesArg2 through Arg9 contains the directories and/or files to be archived/extracted. For newfs, fsck, backup and restore, the systemServicesActionPercentComplete variable can be inspected to see a progress indication.')
systemServicesResultCode = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 25), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesResultCode.setStatus('current')
if mibBuilder.loadTexts: systemServicesResultCode.setDescription('the result of a system services application')
systemServicesResultString = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 26), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesResultString.setStatus('current')
if mibBuilder.loadTexts: systemServicesResultString.setDescription('the string result of a system services application')
systemServicesKtraceEnable = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 27), Enable().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesKtraceEnable.setStatus('obsolete')
if mibBuilder.loadTexts: systemServicesKtraceEnable.setDescription('enables/disables the Ktrace facility')
systemServicesSystraceEnable = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 28), Enable().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesSystraceEnable.setStatus('obsolete')
if mibBuilder.loadTexts: systemServicesSystraceEnable.setDescription('enables/disables the Systrace facility')
systemServicesTtyLines = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 29), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(24)).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesTtyLines.setStatus('current')
if mibBuilder.loadTexts: systemServicesTtyLines.setDescription('the number of tty lines for a console screen')
systemServicesTtyColumns = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 30), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(80)).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesTtyColumns.setStatus('current')
if mibBuilder.loadTexts: systemServicesTtyColumns.setDescription('the number of tty columns for a console screen')
systemServicesMemMonitorEnable = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 31), Enable().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesMemMonitorEnable.setStatus('current')
if mibBuilder.loadTexts: systemServicesMemMonitorEnable.setDescription('disables/enables the kernel Memory Monitor feature')
systemServicesKtraceLevelTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 32), )
if mibBuilder.loadTexts: systemServicesKtraceLevelTable.setStatus('obsolete')
if mibBuilder.loadTexts: systemServicesKtraceLevelTable.setDescription('the table of Ktrace severity level settings')
systemServicesKtraceLevelEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 32, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemServicesKtraceLevelAppId"))
if mibBuilder.loadTexts: systemServicesKtraceLevelEntry.setStatus('obsolete')
if mibBuilder.loadTexts: systemServicesKtraceLevelEntry.setDescription('A row in the system services ktrace level table. There is one entry for each possible Application ID')
systemServicesKtraceLevelAppId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 32, 1, 1), AppIdIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesKtraceLevelAppId.setStatus('obsolete')
if mibBuilder.loadTexts: systemServicesKtraceLevelAppId.setDescription('the index into the ktrace level table')
systemServicesKtraceLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 32, 1, 2), SeverityLevel().clone('severityLevelDbg3')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesKtraceLevel.setStatus('obsolete')
if mibBuilder.loadTexts: systemServicesKtraceLevel.setDescription('the ktrace level for a specific Application ID')
systemServicesSystraceLevelTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 33), )
if mibBuilder.loadTexts: systemServicesSystraceLevelTable.setStatus('obsolete')
if mibBuilder.loadTexts: systemServicesSystraceLevelTable.setDescription('the table of Systrace severity level settings')
systemServicesSystraceLevelEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 33, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemServicesSystraceLevelAppId"))
if mibBuilder.loadTexts: systemServicesSystraceLevelEntry.setStatus('obsolete')
if mibBuilder.loadTexts: systemServicesSystraceLevelEntry.setDescription('A row in the system services systrace level table. There is one entry for each possible Application ID')
systemServicesSystraceLevelAppId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 33, 1, 1), AppIdIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesSystraceLevelAppId.setStatus('obsolete')
if mibBuilder.loadTexts: systemServicesSystraceLevelAppId.setDescription('the Systrace level for a specific Application ID.')
systemServicesSystraceLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 33, 1, 2), SeverityLevel().clone('severityLevelDbg3')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesSystraceLevel.setStatus('obsolete')
if mibBuilder.loadTexts: systemServicesSystraceLevel.setDescription('the Systrace level for a specific Application ID.')
systemUpdateStatusTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 34), )
if mibBuilder.loadTexts: systemUpdateStatusTable.setStatus('current')
if mibBuilder.loadTexts: systemUpdateStatusTable.setDescription('Provides update status for firmware updates')
systemUpdateStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 34, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemUpdateIndex"))
if mibBuilder.loadTexts: systemUpdateStatusEntry.setStatus('current')
if mibBuilder.loadTexts: systemUpdateStatusEntry.setDescription('A row in the update status table.')
systemUpdateIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 34, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 72)))
if mibBuilder.loadTexts: systemUpdateIndex.setStatus('current')
if mibBuilder.loadTexts: systemUpdateIndex.setDescription('The index to the CMM for which status is required.')
systemUpdateStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 34, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inProgress", 1), ("doneOk", 2), ("doneNok", 3), ("noOp", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemUpdateStatus.setStatus('current')
if mibBuilder.loadTexts: systemUpdateStatus.setDescription('Status of a firmware update. In the case of doneNok, further information can be obtained from systemUpdateErrorCode.')
systemUpdateErrorCode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 34, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28))).clone(namedValues=NamedValues(("msgSendIpcErr", 1), ("fXferOPenErr", 2), ("fXferFtpErr", 3), ("fXferReadErr", 4), ("fXferWriteErr", 5), ("fXferReplyErr", 6), ("fXferQuitErr", 7), ("fXferFcloseErr", 8), ("fileNameErr", 9), ("rmFileErr", 10), ("noInstallComp", 11), ("notSysResource", 12), ("notSupported", 13), ("invalidValue", 14), ("waitMsgMaxTry", 15), ("installDrvErr", 16), ("fileNotFound", 17), ("notPrimary", 18), ("commandBlocked", 19), ("noError", 20), ("invalidNi", 21), ("niNotPresent", 22), ("dupSerialNum", 23), ("upToDate", 24), ("invalidModType", 25), ("maxFaiCount", 26), ("invalidKey", 27), ("niLocked", 28)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemUpdateErrorCode.setStatus('current')
if mibBuilder.loadTexts: systemUpdateErrorCode.setDescription('Error codes for done_nok.')
systemServicesActionPercentComplete = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 35), CommandPercentComplete()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesActionPercentComplete.setStatus('current')
if mibBuilder.loadTexts: systemServicesActionPercentComplete.setDescription('This object identifies the percent completion of the currently executing systemServicesAction.')
systemServicesCurrentArchivePathName = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 36), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesCurrentArchivePathName.setStatus('current')
if mibBuilder.loadTexts: systemServicesCurrentArchivePathName.setDescription('This object identifies the archive currently being read via the systemServicesArchiveTable. This object is the complete pathname to the archive and must be set prior to reading the systemServicesArchiveTable.')
systemServicesArchiveTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 37), )
if mibBuilder.loadTexts: systemServicesArchiveTable.setStatus('current')
if mibBuilder.loadTexts: systemServicesArchiveTable.setDescription('This table contains the contents of a backup archive. This table is used by the restore action command to display (rather than backup) an archive. The restore command will populate this table with archive information read from the archive specified by the systemServicesAction restore command. This is done as follows. Set the systemServicesArg1 object to the archive name to be read. Set the systemServicesArg2 object to the string: display-only. Set the systemServicesAction object to restore. Then read this table.')
systemServicesArchiveEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 37, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemServicesArchiveIndex"))
if mibBuilder.loadTexts: systemServicesArchiveEntry.setStatus('current')
if mibBuilder.loadTexts: systemServicesArchiveEntry.setDescription('A row in the system services archive table.')
systemServicesArchiveIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 37, 1, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesArchiveIndex.setStatus('current')
if mibBuilder.loadTexts: systemServicesArchiveIndex.setDescription('Index to a specific entry in the backup archive file.')
systemServicesArchiveName = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 37, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesArchiveName.setStatus('current')
if mibBuilder.loadTexts: systemServicesArchiveName.setDescription('The name of a file in the backup archive file.')
systemServicesArchiveType = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 37, 1, 3), SystemFileType().clone('undefined')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesArchiveType.setStatus('current')
if mibBuilder.loadTexts: systemServicesArchiveType.setDescription('The type of a file in the backup archive file.')
systemServicesArchiveSize = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 37, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesArchiveSize.setStatus('current')
if mibBuilder.loadTexts: systemServicesArchiveSize.setDescription('The size of a file in the backup archive file.')
systemServicesArchiveAttr = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 37, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("undefined", 1), ("readOnly", 2), ("readWrite", 3), ("writeOnly", 4))).clone('undefined')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesArchiveAttr.setStatus('current')
if mibBuilder.loadTexts: systemServicesArchiveAttr.setDescription('The attributes of a file in the backup archive file.')
systemServicesUsbEnable = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 38), Enable()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesUsbEnable.setStatus('current')
if mibBuilder.loadTexts: systemServicesUsbEnable.setDescription('disable/enable the USB interface')
systemServicesUsbAutoCopyEnable = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 39), Enable()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemServicesUsbAutoCopyEnable.setStatus('current')
if mibBuilder.loadTexts: systemServicesUsbAutoCopyEnable.setDescription('disable/enable the USB auto-copy facility')
systemServicesUsbMounted = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 5, 40), Enable()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemServicesUsbMounted.setStatus('current')
if mibBuilder.loadTexts: systemServicesUsbMounted.setDescription('determinse if usb is mounted')
systemFileSystemTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 1), )
if mibBuilder.loadTexts: systemFileSystemTable.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemTable.setDescription('system file system table for this CMM.')
systemFileSystemEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 1, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemIndex"))
if mibBuilder.loadTexts: systemFileSystemEntry.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemEntry.setDescription('A row in the system file system table')
systemFileSystemIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 1, 1, 1), FileSystemIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemFileSystemIndex.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemIndex.setDescription('Index to a specific file system.')
systemFileSystemName = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 1, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemFileSystemName.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemName.setDescription('The name of the file system.')
systemFileSystemFreeSpace = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 1, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemFileSystemFreeSpace.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemFreeSpace.setDescription('the free space in octets of this file system')
systemFileSystemDirectoryName = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemFileSystemDirectoryName.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemDirectoryName.setDescription('The name of a file system directory. This object is used in conjunction with an Action command. The Action command will set this directory name to the name of a specific directory. Information for all of the files in that directory will then be read from the file system and the appropriate values written in the entries in the systemFileSystemFile table. All this is being done to give snmp access to the file system files.')
systemFileSystemDirectoryDateTime = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 3), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemFileSystemDirectoryDateTime.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemDirectoryDateTime.setDescription('the date and time (in system format) of the last access to this directory')
systemFileSystemFileTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 4), )
if mibBuilder.loadTexts: systemFileSystemFileTable.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemFileTable.setDescription('system file system File table for this CMM. This table is used by an Action command which will populate it with file information read from the files in the specified directory.')
systemFileSystemFileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 4, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemFileIndex"))
if mibBuilder.loadTexts: systemFileSystemFileEntry.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemFileEntry.setDescription('A row in the system file system File table')
systemFileSystemFileIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 4, 1, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemFileSystemFileIndex.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemFileIndex.setDescription('Index to a specific file system File.')
systemFileSystemFileName = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 4, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemFileSystemFileName.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemFileName.setDescription('The name of a file system File')
systemFileSystemFileType = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 4, 1, 3), SystemFileType().clone('undefined')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemFileSystemFileType.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemFileType.setDescription('The Type of a file system File')
systemFileSystemFileSize = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 4, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemFileSystemFileSize.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemFileSize.setDescription('size of this file')
systemFileSystemFileAttr = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("undefined", 1), ("readOnly", 2), ("readWrite", 3), ("writeOnly", 4))).clone('undefined')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemFileSystemFileAttr.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemFileAttr.setDescription('attributes of this file')
systemFileSystemFileDateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 4, 4, 1, 6), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemFileSystemFileDateTime.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemFileDateTime.setDescription('the modification date and time of a file')
systemSwitchLoggingIndex = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 1), SwitchLoggingIndex().clone('flash')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingIndex.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingIndex.setDescription('A small positive integer used to identify a switch logging output')
systemSwitchLoggingEnable = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 2), Enable().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingEnable.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingEnable.setDescription('Global switch logging enable/disable')
systemSwitchLoggingFlash = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 3), Enable().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingFlash.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingFlash.setDescription('Enable/disable switch logging to flash')
systemSwitchLoggingSocket = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 4), Enable().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingSocket.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingSocket.setDescription('Enable/disable switch logging to a socket')
systemSwitchLoggingSocketIpAddr = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingSocketIpAddr.setStatus('deprecated')
if mibBuilder.loadTexts: systemSwitchLoggingSocketIpAddr.setDescription('The IP Address of a remote host that can be used to send switch logging records to as an option')
systemSwitchLoggingConsole = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 6), Enable().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingConsole.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingConsole.setDescription('Enable/disable switch logging to the console')
systemSwitchLoggingApplicationTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 7), )
if mibBuilder.loadTexts: systemSwitchLoggingApplicationTable.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingApplicationTable.setDescription('The table of switch logging registered applications,one for each Application ID')
systemSwitchLoggingLevelEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 7, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingApplicationAppId"), (0, "ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingApplicationSubAppId"), (0, "ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingApplicationSubAppVrfLevelIndex"))
if mibBuilder.loadTexts: systemSwitchLoggingLevelEntry.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingLevelEntry.setDescription('A row in the system switch logging application table')
systemSwitchLoggingApplicationAppId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 7, 1, 1), AppIdIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingApplicationAppId.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingApplicationAppId.setDescription('A specific application ID which has a registered application associated with it. This application ID is used as an index into the application table.')
systemSwitchLoggingApplicationSubAppId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 7, 1, 2), AppIdIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingApplicationSubAppId.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingApplicationSubAppId.setDescription('A specific sub-application ID which belongs to a registered application associated with it. This sub-application ID is used as part of the index into the application table.')
systemSwitchLoggingApplicationSubAppVrfLevelIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 7, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingApplicationSubAppVrfLevelIndex.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingApplicationSubAppVrfLevelIndex.setDescription("The specific sub-application's VRF severity level. This severity level is used as part of the index into the application table.")
systemSwitchLoggingApplicationAppName = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 7, 1, 4), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingApplicationAppName.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingApplicationAppName.setDescription('Application name in an entry in the table')
systemSwitchLoggingApplicationSubAppName = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 7, 1, 5), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingApplicationSubAppName.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingApplicationSubAppName.setDescription('The name of the specific sub-application ID.')
systemSwitchLoggingApplicationSubAppLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 7, 1, 6), SeverityLevel().clone('severityLevelOff')).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingApplicationSubAppLevel.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingApplicationSubAppLevel.setDescription('The severity level of the specific sub-application ID.')
systemSwitchLoggingApplicationSubAppVrfLevelString = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 7, 1, 7), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingApplicationSubAppVrfLevelString.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingApplicationSubAppVrfLevelString.setDescription("A string with comma-delimited VRF IDs or ranges of VRF IDs that belong to this sub-application's VRF severity level.")
systemSwitchLoggingClear = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 8), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingClear.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingClear.setDescription('Enable clearing of switch logging entries')
systemSwitchLoggingFileSize = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 9), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingFileSize.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingFileSize.setDescription('Set size of swlog logging file')
systemSwitchLoggingHostTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 10), )
if mibBuilder.loadTexts: systemSwitchLoggingHostTable.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostTable.setDescription('The table of switch logging remote hosts.')
systemSwitchLoggingHostEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 10, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostIpAddr"))
if mibBuilder.loadTexts: systemSwitchLoggingHostEntry.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostEntry.setDescription('A remote switch logging server entry.')
systemSwitchLoggingHostIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 10, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingHostIpAddr.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostIpAddr.setDescription('The IP Address of a remote host that can be used to send switch logging records to.')
systemSwitchLoggingHostPort = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 10, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(514)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemSwitchLoggingHostPort.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostPort.setDescription('The port number of a remote host that can be used to send switch logging records to.')
systemSwitchLoggingHostStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 10, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemSwitchLoggingHostStatus.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostStatus.setDescription('Provides the ability to add or remove a remote host entry.')
systemSwitchLoggingHostUserCommandHost = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 10, 1, 4), Enable().clone('disabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemSwitchLoggingHostUserCommandHost.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostUserCommandHost.setDescription('Indicates whether this host may receive user command data.')
systemSwitchLoggingHostVrfName = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 10, 1, 5), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 20))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemSwitchLoggingHostVrfName.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostVrfName.setDescription('The name or number of the VRF to be used to send switch logging records to.')
systemSwitchLoggingHostv6Table = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 11), )
if mibBuilder.loadTexts: systemSwitchLoggingHostv6Table.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostv6Table.setDescription('The table of switch logging remote hosts.')
systemSwitchLoggingHostv6Entry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 11, 1), ).setIndexNames((0, "ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostv6IpAddr"))
if mibBuilder.loadTexts: systemSwitchLoggingHostv6Entry.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostv6Entry.setDescription('A remote switch logging server entry.')
systemSwitchLoggingHostv6IpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 11, 1, 1), Ipv6Address()).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingHostv6IpAddr.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostv6IpAddr.setDescription('The IP Address of a remote host that can be used to send switch logging records to.')
systemSwitchLoggingHostv6Port = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 11, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(514)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemSwitchLoggingHostv6Port.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostv6Port.setDescription('The port number of a remote host that can be used to send switch logging records to.')
systemSwitchLoggingHostv6Status = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 11, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemSwitchLoggingHostv6Status.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostv6Status.setDescription('Provides the ability to add or remove a remote host entry.')
systemSwitchLoggingHostv6UserCommandHost = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 11, 1, 4), Enable().clone('disabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemSwitchLoggingHostv6UserCommandHost.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostv6UserCommandHost.setDescription('Indicates whether this host may receive user command data.')
systemSwitchLoggingHostv6VrfName = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 11, 1, 5), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 20))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemSwitchLoggingHostv6VrfName.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostv6VrfName.setDescription('The name or number of the VRF to be used to send switch logging records to.')
systemSwitchLoggingHostCount = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemSwitchLoggingHostCount.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHostCount.setDescription('The number of remote hosts currently defined.')
systemSwitchLoggingConsoleLevel = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 13), SeverityLevel().clone('severityLevelWarn')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemSwitchLoggingConsoleLevel.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingConsoleLevel.setDescription('Messages whose severity level is equal to or more severe than this value will be displayed to the console.')
systemSwitchLoggingUserCommandStatus = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 14), Enable().clone('disabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemSwitchLoggingUserCommandStatus.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingUserCommandStatus.setDescription('User commands will be logged to remote hosts when enabled.')
systemSwitchLoggingSysLogFacilityId = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 15), SysLogFacilityId().clone('uucp')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingSysLogFacilityId.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingSysLogFacilityId.setDescription("This textual convention enumerates the facilities that originate syslog messages. The Facilities of syslog messages are numerically coded with decimal values. Some of the operating system daemons and processes are traditionally designated by the Facility values given below. Daemons and processes that do not have an explicitly assigned Facility may use any of the 'local use' Facilities or they may use the 'user-level' Facility. For interoperability and backwards compatibility reasons, mapping specified in this document between a label which represents a Facility and the value which represents the corresponding code, is normative. So the mapping from a label configured by operators in syslog.conf or equivalent will consistently map to the same Facility code regardless of implementation, but the label itself is often semantically meaningless, because it is impractical to attempt to enumerate all possible facilities, and the enumeration (label and corresponding value) that is used by an actual Facility is, and has historically been, implementation-dependent. For example, the foobar application might log messages as having come from local7, even though there is no 'local' process on the device, and the operator can configure syslog.conf to have local7.critical messages be relayed, even though there might be multiple facilities using Facility local7. This is typical current practice, and originators, relays and collectors know how to handle this situation. For improved accuracy, the foobar application can also include an APPNAME Structured Data Element.")
systemSwitchLoggingLevel = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 16), SeverityLevel()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingLevel.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingLevel.setDescription('The logging level for a specific application id.')
systemSwitchLoggingAppName = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 17), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingAppName.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingAppName.setDescription('Application name used as an index into the table')
systemSwitchLoggingDuplicateDetect = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 18), Enable().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingDuplicateDetect.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingDuplicateDetect.setDescription('Global switch logging enable/disable duplicate detection')
systemSwitchLoggingPreamble = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 19), Enable().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingPreamble.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingPreamble.setDescription('Global switch logging enable/disable printing preamble')
systemSwitchLoggingDebug = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 20), Enable().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingDebug.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingDebug.setDescription('Global switch logging enable/disable debug information')
systemSwitchLoggingVrf = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 21), VrfId().clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingVrf.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingVrf.setDescription('Global switch logging subapplicastion VRF ID')
systemSwitchLoggingHashAgeLimit = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 22), AgeLimit().clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingHashAgeLimit.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingHashAgeLimit.setDescription('Global switch logging Hash Table Age Limit')
systemSwitchLoggingTty = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 23), Enable().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingTty.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingTty.setDescription('Enable/disable switch logging to tty')
systemSwitchLoggingSubAppNbr = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 24), AppIdIndex()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingSubAppNbr.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingSubAppNbr.setDescription('The subapplication number of a given application')
systemSwitchLoggingLibraryName = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 25), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingLibraryName.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingLibraryName.setDescription('Library name used as an index into the table of registered libraries')
systemSwitchLoggingLoopback0 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 6, 26), Enable().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemSwitchLoggingLoopback0.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingLoopback0.setDescription('Enable/disable switch logging using Loopback0 interface as the source ip address when logging to syslog server')
systemDNSEnableDnsResolver = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 7, 1), Enable().clone('disabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemDNSEnableDnsResolver.setStatus('current')
if mibBuilder.loadTexts: systemDNSEnableDnsResolver.setDescription('Global Domain Name Service enable/disable')
systemDNSDomainName = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 7, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemDNSDomainName.setStatus('current')
if mibBuilder.loadTexts: systemDNSDomainName.setDescription('current domain name used by the Domain Name Service')
systemDNSNsAddr1 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 7, 3), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemDNSNsAddr1.setStatus('current')
if mibBuilder.loadTexts: systemDNSNsAddr1.setDescription('1st part of address used by the Domain Name Service')
systemDNSNsAddr2 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 7, 4), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemDNSNsAddr2.setStatus('current')
if mibBuilder.loadTexts: systemDNSNsAddr2.setDescription('2nd part of address used by the Domain Name Service')
systemDNSNsAddr3 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 7, 5), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemDNSNsAddr3.setStatus('current')
if mibBuilder.loadTexts: systemDNSNsAddr3.setDescription('3rd part of address used by the Domain Name Service')
systemDNSNsIPv6Addr1 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 7, 6), Ipv6Address().clone(hexValue="00000000000000000000000000000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemDNSNsIPv6Addr1.setStatus('current')
if mibBuilder.loadTexts: systemDNSNsIPv6Addr1.setDescription('IPv6 address of the Primary DNS server')
systemDNSNsIPv6Addr2 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 7, 7), Ipv6Address().clone(hexValue="00000000000000000000000000000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemDNSNsIPv6Addr2.setStatus('current')
if mibBuilder.loadTexts: systemDNSNsIPv6Addr2.setDescription('IPv6 address of the Secondary DNS server')
systemDNSNsIPv6Addr3 = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 1, 7, 8), Ipv6Address().clone(hexValue="00000000000000000000000000000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: systemDNSNsIPv6Addr3.setStatus('current')
if mibBuilder.loadTexts: systemDNSNsIPv6Addr3.setDescription('IPv6 address of the third DNS server')
alcatelIND1SystemMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2, 2, 1)).setObjects(("ALCATEL-IND1-SYSTEM-MIB", "systemMicrocodeGroup"), ("ALCATEL-IND1-SYSTEM-MIB", "systemBootParamsGroup"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareGroup"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesGroup"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemGroup"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingGroup"), ("ALCATEL-IND1-SYSTEM-MIB", "systemDNSGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1SystemMIBCompliance = alcatelIND1SystemMIBCompliance.setStatus('current')
if mibBuilder.loadTexts: alcatelIND1SystemMIBCompliance.setDescription('Compliance statement for Alcatel BOP Proprietary System Subsystem.')
systemMicrocodeGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2, 1, 1)).setObjects(("ALCATEL-IND1-SYSTEM-MIB", "systemMicrocodePackageVersion"), ("ALCATEL-IND1-SYSTEM-MIB", "systemMicrocodePackageName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemMicrocodePackageDescription"), ("ALCATEL-IND1-SYSTEM-MIB", "systemMicrocodePackageStatus"), ("ALCATEL-IND1-SYSTEM-MIB", "systemMicrocodePackageSize"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
systemMicrocodeGroup = systemMicrocodeGroup.setStatus('current')
if mibBuilder.loadTexts: systemMicrocodeGroup.setDescription('Group all the system microcode objects together')
systemBootParamsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2, 1, 2)).setObjects(("ALCATEL-IND1-SYSTEM-MIB", "systemBootNetwork"), ("ALCATEL-IND1-SYSTEM-MIB", "systemBootNetworkGateway"), ("ALCATEL-IND1-SYSTEM-MIB", "systemBootNetworkNetmask"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
systemBootParamsGroup = systemBootParamsGroup.setStatus('current')
if mibBuilder.loadTexts: systemBootParamsGroup.setDescription('Group all the system boot parameters together')
systemHardwareGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2, 1, 3)).setObjects(("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareFlashMfg"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareFlashSize"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareMemoryMfg"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareMemorySize"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareNVRAMBatteryLow"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareBootCpuType"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareJumperInterruptBoot"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareJumperForceUartDefaults"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareJumperRunExtendedMemoryDiagnostics"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareJumperSpare"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareFpgaVersionIndex"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareFpgaVersion"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareBootRomVersion"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareDefaultMiniBootVersion"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareBackupMiniBootVersion"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareCpldVersion"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareMinorFpgaVersion"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareProdRegId"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareRevisionRegister"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareUbootMinibootVersion"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareUbootVersion"), ("ALCATEL-IND1-SYSTEM-MIB", "systemHardwareXfpId"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
systemHardwareGroup = systemHardwareGroup.setStatus('current')
if mibBuilder.loadTexts: systemHardwareGroup.setDescription('Group all the system Hardware Data together')
systemServicesGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2, 1, 4)).setObjects(("ALCATEL-IND1-SYSTEM-MIB", "systemServicesDate"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTime"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTimezone"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTimezoneStartWeek"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTimezoneStartDay"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTimezoneStartMonth"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTimezoneStartTime"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTimezoneOffset"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTimezoneEndWeek"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTimezoneEndDay"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTimezoneEndMonth"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTimezoneEndTime"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesEnableDST"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesWorkingDirectory"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArg1"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArg2"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArg3"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArg4"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArg5"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArg6"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArg7"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArg8"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArg9"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesAction"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesResultCode"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesResultString"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesKtraceEnable"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesSystraceEnable"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTtyLines"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesTtyColumns"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesMemMonitorEnable"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesKtraceLevelAppId"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesKtraceLevel"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesSystraceLevelAppId"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesSystraceLevel"), ("ALCATEL-IND1-SYSTEM-MIB", "systemUpdateStatus"), ("ALCATEL-IND1-SYSTEM-MIB", "systemUpdateErrorCode"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesActionPercentComplete"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArchiveName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArchiveType"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArchiveSize"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArchiveAttr"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesUsbEnable"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesUsbAutoCopyEnable"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesUsbMounted"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesArchiveIndex"), ("ALCATEL-IND1-SYSTEM-MIB", "systemServicesCurrentArchivePathName"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
systemServicesGroup = systemServicesGroup.setStatus('current')
if mibBuilder.loadTexts: systemServicesGroup.setDescription('Group all the system services parameters together')
systemFileSystemGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2, 1, 5)).setObjects(("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemIndex"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemFreeSpace"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemDirectoryName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemDirectoryDateTime"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemFileIndex"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemFileName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemFileType"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemFileSize"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemFileAttr"), ("ALCATEL-IND1-SYSTEM-MIB", "systemFileSystemFileDateTime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
systemFileSystemGroup = systemFileSystemGroup.setStatus('current')
if mibBuilder.loadTexts: systemFileSystemGroup.setDescription('Group all the system flash file parameters together')
systemSwitchLoggingGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2, 1, 6)).setObjects(("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingIndex"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingEnable"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingFlash"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingSocket"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingSocketIpAddr"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingConsole"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingClear"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingFileSize"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingLevel"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingApplicationAppId"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingApplicationAppName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingApplicationSubAppId"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingApplicationSubAppName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingApplicationSubAppLevel"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingApplicationSubAppVrfLevelIndex"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingApplicationSubAppVrfLevelString"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingAppName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingDuplicateDetect"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingPreamble"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingDebug"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingVrf"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHashAgeLimit"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingTty"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingSubAppNbr"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingLibraryName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingLoopback0"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingConsoleLevel"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostCount"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingUserCommandStatus"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingSysLogFacilityId"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostIpAddr"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostPort"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostStatus"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostUserCommandHost"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostVrfName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostv6IpAddr"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostv6Port"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostv6Status"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostv6UserCommandHost"), ("ALCATEL-IND1-SYSTEM-MIB", "systemSwitchLoggingHostv6VrfName"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
systemSwitchLoggingGroup = systemSwitchLoggingGroup.setStatus('current')
if mibBuilder.loadTexts: systemSwitchLoggingGroup.setDescription('Group all the switch logging parameters together')
systemDNSGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 1, 1, 2, 1, 2, 1, 7)).setObjects(("ALCATEL-IND1-SYSTEM-MIB", "systemDNSEnableDnsResolver"), ("ALCATEL-IND1-SYSTEM-MIB", "systemDNSDomainName"), ("ALCATEL-IND1-SYSTEM-MIB", "systemDNSNsAddr1"), ("ALCATEL-IND1-SYSTEM-MIB", "systemDNSNsAddr2"), ("ALCATEL-IND1-SYSTEM-MIB", "systemDNSNsAddr3"), ("ALCATEL-IND1-SYSTEM-MIB", "systemDNSNsIPv6Addr1"), ("ALCATEL-IND1-SYSTEM-MIB", "systemDNSNsIPv6Addr2"), ("ALCATEL-IND1-SYSTEM-MIB", "systemDNSNsIPv6Addr3"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
systemDNSGroup = systemDNSGroup.setStatus('current')
if mibBuilder.loadTexts: systemDNSGroup.setDescription('Group all the systemDNS parameters together')
mibBuilder.exportSymbols("ALCATEL-IND1-SYSTEM-MIB", systemSwitchLoggingAppName=systemSwitchLoggingAppName, systemServicesArg9=systemServicesArg9, SwitchLoggingIndex=SwitchLoggingIndex, systemHardwareDefaultMiniBootVersion=systemHardwareDefaultMiniBootVersion, systemServicesTimezoneEndMonth=systemServicesTimezoneEndMonth, systemServicesWorkingDirectory=systemServicesWorkingDirectory, systemServicesSystraceLevelEntry=systemServicesSystraceLevelEntry, systemHardwareProdRegId=systemHardwareProdRegId, systemFileSystemFileType=systemFileSystemFileType, SysLogFacilityId=SysLogFacilityId, systemHardware=systemHardware, systemFileSystemGroup=systemFileSystemGroup, systemServicesArg3=systemServicesArg3, systemUpdateErrorCode=systemUpdateErrorCode, systemMicrocode=systemMicrocode, systemServicesTimezoneOffset=systemServicesTimezoneOffset, systemBootParamsGroup=systemBootParamsGroup, systemServicesUsbEnable=systemServicesUsbEnable, systemHardwareBootCpuType=systemHardwareBootCpuType, systemServicesAction=systemServicesAction, systemServicesTimezoneStartMonth=systemServicesTimezoneStartMonth, systemHardwareCpldVersion=systemHardwareCpldVersion, systemSwitchLoggingIndex=systemSwitchLoggingIndex, systemSwitchLoggingLevel=systemSwitchLoggingLevel, systemHardwareXfpId=systemHardwareXfpId, systemDNS=systemDNS, systemMicrocodePackageDirectory=systemMicrocodePackageDirectory, alcatelIND1SystemMIBConformance=alcatelIND1SystemMIBConformance, systemHardwareFpgaVersionIndex=systemHardwareFpgaVersionIndex, systemSwitchLoggingApplicationAppName=systemSwitchLoggingApplicationAppName, systemDNSEnableDnsResolver=systemDNSEnableDnsResolver, systemBootNetworkNetmask=systemBootNetworkNetmask, systemServicesSystraceLevel=systemServicesSystraceLevel, systemUpdateStatusEntry=systemUpdateStatusEntry, CommandPercentComplete=CommandPercentComplete, systemSwitchLoggingLevelEntry=systemSwitchLoggingLevelEntry, systemSwitchLoggingConsoleLevel=systemSwitchLoggingConsoleLevel, systemMicrocodeGroup=systemMicrocodeGroup, systemSwitchLogging=systemSwitchLogging, systemHardwareUbootMinibootVersion=systemHardwareUbootMinibootVersion, systemServicesActionPercentComplete=systemServicesActionPercentComplete, systemMicrocodePackageDescription=systemMicrocodePackageDescription, systemUpdateIndex=systemUpdateIndex, systemSwitchLoggingHostUserCommandHost=systemSwitchLoggingHostUserCommandHost, systemSwitchLoggingVrf=systemSwitchLoggingVrf, systemUpdateStatus=systemUpdateStatus, systemHardwareFpgaVersionTable=systemHardwareFpgaVersionTable, systemServicesSystraceLevelAppId=systemServicesSystraceLevelAppId, systemFileSystemDirectoryDateTime=systemFileSystemDirectoryDateTime, systemServicesTimezoneEndDay=systemServicesTimezoneEndDay, systemServicesArg1=systemServicesArg1, systemServicesArg6=systemServicesArg6, systemMicrocodePackageVersion=systemMicrocodePackageVersion, systemServicesTimezoneEndWeek=systemServicesTimezoneEndWeek, systemServicesArchiveType=systemServicesArchiveType, systemSwitchLoggingFlash=systemSwitchLoggingFlash, systemServicesEnableDST=systemServicesEnableDST, systemDNSNsIPv6Addr2=systemDNSNsIPv6Addr2, systemServicesDate=systemServicesDate, systemServicesArchiveName=systemServicesArchiveName, alcatelIND1SystemMIBCompliance=alcatelIND1SystemMIBCompliance, systemMicrocodePackageTable=systemMicrocodePackageTable, systemHardwareJumperForceUartDefaults=systemHardwareJumperForceUartDefaults, systemSwitchLoggingFileSize=systemSwitchLoggingFileSize, systemSwitchLoggingApplicationAppId=systemSwitchLoggingApplicationAppId, systemSwitchLoggingHostv6Status=systemSwitchLoggingHostv6Status, systemHardwareRevisionRegister=systemHardwareRevisionRegister, systemSwitchLoggingApplicationSubAppId=systemSwitchLoggingApplicationSubAppId, systemSwitchLoggingUserCommandStatus=systemSwitchLoggingUserCommandStatus, systemServicesResultString=systemServicesResultString, AgeLimit=AgeLimit, systemFileSystemFileEntry=systemFileSystemFileEntry, systemFileSystemDirectoryName=systemFileSystemDirectoryName, systemSwitchLoggingApplicationSubAppName=systemSwitchLoggingApplicationSubAppName, FileSystemIndex=FileSystemIndex, systemHardwareNVRAMBatteryLow=systemHardwareNVRAMBatteryLow, systemFileSystemIndex=systemFileSystemIndex, systemHardwareMinorFpgaVersion=systemHardwareMinorFpgaVersion, systemServicesKtraceLevelTable=systemServicesKtraceLevelTable, systemSwitchLoggingHostCount=systemSwitchLoggingHostCount, systemFileSystemFreeSpace=systemFileSystemFreeSpace, alcatelIND1SystemMIBObjects=alcatelIND1SystemMIBObjects, systemFileSystemFileSize=systemFileSystemFileSize, systemSwitchLoggingHostPort=systemSwitchLoggingHostPort, systemSwitchLoggingLibraryName=systemSwitchLoggingLibraryName, systemServicesKtraceEnable=systemServicesKtraceEnable, systemServicesGroup=systemServicesGroup, systemServicesMemMonitorEnable=systemServicesMemMonitorEnable, systemServicesResultCode=systemServicesResultCode, systemUpdateStatusTable=systemUpdateStatusTable, alcatelIND1SystemMIBCompliances=alcatelIND1SystemMIBCompliances, systemDNSNsAddr2=systemDNSNsAddr2, systemFileSystemFileDateTime=systemFileSystemFileDateTime, systemHardwareBootRomVersion=systemHardwareBootRomVersion, systemSwitchLoggingGroup=systemSwitchLoggingGroup, systemSwitchLoggingApplicationSubAppLevel=systemSwitchLoggingApplicationSubAppLevel, systemFileSystemEntry=systemFileSystemEntry, systemMicrocodePackageEntry=systemMicrocodePackageEntry, Enable=Enable, systemServicesKtraceLevelEntry=systemServicesKtraceLevelEntry, systemFileSystemFileIndex=systemFileSystemFileIndex, systemSwitchLoggingApplicationTable=systemSwitchLoggingApplicationTable, systemServicesArchiveEntry=systemServicesArchiveEntry, systemFileSystemTable=systemFileSystemTable, systemServicesArchiveAttr=systemServicesArchiveAttr, systemSwitchLoggingTty=systemSwitchLoggingTty, systemServicesTimezone=systemServicesTimezone, systemSwitchLoggingDuplicateDetect=systemSwitchLoggingDuplicateDetect, systemSwitchLoggingHostIpAddr=systemSwitchLoggingHostIpAddr, systemFileSystemFileAttr=systemFileSystemFileAttr, systemSwitchLoggingClear=systemSwitchLoggingClear, systemServicesArchiveSize=systemServicesArchiveSize, systemHardwareFpgaVersionEntry=systemHardwareFpgaVersionEntry, systemMicrocodePackageDirectoryIndex=systemMicrocodePackageDirectoryIndex, systemServicesArg8=systemServicesArg8, systemServicesCurrentArchivePathName=systemServicesCurrentArchivePathName, VrfId=VrfId, systemServicesTtyColumns=systemServicesTtyColumns, systemSwitchLoggingEnable=systemSwitchLoggingEnable, systemDNSNsAddr1=systemDNSNsAddr1, systemHardwareJumperRunExtendedMemoryDiagnostics=systemHardwareJumperRunExtendedMemoryDiagnostics, systemServicesUsbMounted=systemServicesUsbMounted, systemSwitchLoggingApplicationSubAppVrfLevelIndex=systemSwitchLoggingApplicationSubAppVrfLevelIndex, systemSwitchLoggingPreamble=systemSwitchLoggingPreamble, systemServicesArg2=systemServicesArg2, systemFileSystemFileName=systemFileSystemFileName, systemMicrocodePackageName=systemMicrocodePackageName, systemServicesTimezoneEndTime=systemServicesTimezoneEndTime, systemDNSNsIPv6Addr3=systemDNSNsIPv6Addr3, systemSwitchLoggingDebug=systemSwitchLoggingDebug, systemDNSGroup=systemDNSGroup, systemMicrocodePackageSize=systemMicrocodePackageSize, systemDNSNsAddr3=systemDNSNsAddr3, systemSwitchLoggingHostVrfName=systemSwitchLoggingHostVrfName, PYSNMP_MODULE_ID=alcatelIND1SystemMIB, systemFileSystemFileTable=systemFileSystemFileTable, systemSwitchLoggingHostv6Table=systemSwitchLoggingHostv6Table, systemSwitchLoggingLoopback0=systemSwitchLoggingLoopback0, systemServicesTimezoneStartTime=systemServicesTimezoneStartTime, systemSwitchLoggingHostv6IpAddr=systemSwitchLoggingHostv6IpAddr, systemBootParams=systemBootParams, systemServicesTtyLines=systemServicesTtyLines, systemHardwareFlashSize=systemHardwareFlashSize, systemServicesTimezoneStartDay=systemServicesTimezoneStartDay, systemSwitchLoggingConsole=systemSwitchLoggingConsole, systemBootNetworkGateway=systemBootNetworkGateway, systemMicrocodePackageIndex=systemMicrocodePackageIndex, alcatelIND1SystemMIB=alcatelIND1SystemMIB, systemServicesArg4=systemServicesArg4, systemHardwareBackupMiniBootVersion=systemHardwareBackupMiniBootVersion, systemServices=systemServices, systemBootNetwork=systemBootNetwork, systemSwitchLoggingHostTable=systemSwitchLoggingHostTable, systemSwitchLoggingApplicationSubAppVrfLevelString=systemSwitchLoggingApplicationSubAppVrfLevelString, systemSwitchLoggingHostv6Port=systemSwitchLoggingHostv6Port, systemHardwareFlashMfg=systemHardwareFlashMfg, systemServicesArchiveIndex=systemServicesArchiveIndex, systemSwitchLoggingSocket=systemSwitchLoggingSocket, systemDNSDomainName=systemDNSDomainName, alcatelIND1SystemMIBGroups=alcatelIND1SystemMIBGroups, systemServicesSystraceLevelTable=systemServicesSystraceLevelTable, systemSwitchLoggingHostv6UserCommandHost=systemSwitchLoggingHostv6UserCommandHost, AppIdIndex=AppIdIndex, systemServicesUsbAutoCopyEnable=systemServicesUsbAutoCopyEnable, systemHardwareJumperSpare=systemHardwareJumperSpare, systemHardwareJumperInterruptBoot=systemHardwareJumperInterruptBoot, systemServicesTimezoneStartWeek=systemServicesTimezoneStartWeek, systemFileSystemName=systemFileSystemName, systemSwitchLoggingHostv6Entry=systemSwitchLoggingHostv6Entry, systemSwitchLoggingSysLogFacilityId=systemSwitchLoggingSysLogFacilityId, systemServicesTime=systemServicesTime, systemHardwareMemoryMfg=systemHardwareMemoryMfg, SeverityLevel=SeverityLevel, systemSwitchLoggingHostv6VrfName=systemSwitchLoggingHostv6VrfName, systemServicesArchiveTable=systemServicesArchiveTable, systemSwitchLoggingSocketIpAddr=systemSwitchLoggingSocketIpAddr, systemSwitchLoggingHashAgeLimit=systemSwitchLoggingHashAgeLimit, systemServicesArg7=systemServicesArg7, systemHardwareGroup=systemHardwareGroup, systemServicesKtraceLevelAppId=systemServicesKtraceLevelAppId, systemSwitchLoggingHostStatus=systemSwitchLoggingHostStatus, systemDNSNsIPv6Addr1=systemDNSNsIPv6Addr1, systemHardwareUbootVersion=systemHardwareUbootVersion, systemHardwareFpgaVersion=systemHardwareFpgaVersion, systemHardwareMemorySize=systemHardwareMemorySize, systemServicesKtraceLevel=systemServicesKtraceLevel, systemMicrocodePackageStatus=systemMicrocodePackageStatus, systemServicesSystraceEnable=systemServicesSystraceEnable, systemSwitchLoggingSubAppNbr=systemSwitchLoggingSubAppNbr, systemFileSystem=systemFileSystem, SystemFileType=SystemFileType, systemSwitchLoggingHostEntry=systemSwitchLoggingHostEntry, systemServicesArg5=systemServicesArg5)
| [
2,
198,
2,
9485,
15571,
7378,
337,
9865,
8265,
317,
5639,
1404,
3698,
12,
12115,
16,
12,
23060,
25361,
12,
8895,
33,
357,
4023,
1378,
16184,
76,
489,
8937,
13,
785,
14,
79,
893,
11632,
8,
198,
2,
7054,
45,
13,
16,
2723,
2393,
13... | 3.028576 | 31,215 |
#!/usr/bin/env python
"""Exercises using Netmiko"""
from getpass import getpass
from netmiko import ConnectHandler
def save_file(filename, show_run):
"""Save the show run to a file"""
with open(filename, "w") as f:
f.write(show_run)
def main():
"""Exercises using Netmiko"""
rtr1_pass = getpass("Enter router password: ")
sw1_pass = getpass("Enter switch password: ")
pynet_rtr1 = {
'device_type': 'cisco_ios',
'ip': '184.105.247.70',
'username': 'pyclass',
'password': rtr1_pass,
}
pynet_sw1 = {
'device_type': 'arista_eos',
'ip': '184.105.247.72',
'username': 'admin1',
'password': sw1_pass,
}
for a_device in (pynet_rtr1, pynet_sw1):
net_connect = ConnectHandler(**a_device)
print "Current Prompt: " + net_connect.find_prompt()
show_arp = net_connect.send_command("show arp")
print
print '#' * 80
print show_arp
print '#' * 80
print
show_run = net_connect.send_command("show run")
filename = net_connect.base_prompt + ".txt"
print "Save show run output: {}\n".format(filename)
save_file(filename, show_run)
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
37811,
3109,
2798,
2696,
1262,
3433,
76,
12125,
37811,
198,
6738,
651,
6603,
1330,
651,
6603,
198,
6738,
2010,
76,
12125,
1330,
8113,
25060,
198,
198,
4299,
3613,
62,
7753,
7,
34345,
1... | 2.178694 | 582 |
# This is allowed
from ..person import api as person_api
# This is NOT allowed
from ..person import _reading
# This is allowed
from ._legal import api as legal_api
# This is NOT allowed
from ._legal import _compliance
| [
198,
198,
2,
770,
318,
3142,
198,
6738,
11485,
6259,
1330,
40391,
355,
1048,
62,
15042,
198,
198,
2,
770,
318,
5626,
3142,
198,
6738,
11485,
6259,
1330,
4808,
25782,
198,
198,
2,
770,
318,
3142,
198,
6738,
47540,
18011,
1330,
40391,
... | 3.655738 | 61 |
import time
import requests
from bs4 import BeautifulSoup
import smtplib
from email.message import EmailMessage
email_id = "Enter your Email address over here"
email_pass = "Enter your password over here"
URL = "Enter the URL of the product here"
x = 1
prev = 0
while x:
x, prev = check_price(prev)
if x == 1:
time.sleep(1440)
else:
time.sleep(10080)
| [
11748,
640,
198,
11748,
7007,
198,
6738,
275,
82,
19,
1330,
23762,
50,
10486,
198,
11748,
895,
83,
489,
571,
198,
6738,
3053,
13,
20500,
1330,
9570,
12837,
198,
198,
12888,
62,
312,
796,
366,
17469,
534,
9570,
2209,
625,
994,
1,
198... | 2.742857 | 140 |
# misc.py, GUI helper functions for QT lab environment
# Reinier Heeres, <reinier@heeres.eu>, 2008
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import gtk
def build_menu(tree, accelgroup=None, root=True):
"""Build a gtk menu, including submenu's
tree is an array of items, each item being a dictionary with:
-'name': the visible item name
-'icon': an (optional) icon
-'submenu': an array of items representing a submenu
"""
if root:
menu = gtk.MenuBar()
else:
menu = gtk.Menu()
for element in tree:
item = gtk.MenuItem(element['name'])
if element.has_key('icon'):
pass
if element.has_key('submenu'):
item.set_submenu(build_menu(element['submenu'],
root=False, accelgroup=accelgroup))
if element.has_key('action'):
item.connect('activate', element['action'])
if element.has_key('accel') and accelgroup is not None:
(key, mod) = gtk.accelerator_parse(element['accel'])
item.add_accelerator('activate', accelgroup, key, mod,
gtk.ACCEL_VISIBLE)
menu.add(item)
return menu
def pack_hbox(items, expand=True, fill=True):
'''Pack widgets in a HBox and return that.'''
hbox = gtk.HBox()
for i in items:
hbox.pack_start(i, expand, fill)
return hbox
def pack_vbox(items, expand=True, fill=True):
'''Pack widgets in a VBox and return that.'''
vbox = gtk.VBox()
for i in items:
vbox.pack_start(i, expand, fill)
return vbox
| [
2,
12747,
13,
9078,
11,
25757,
31904,
5499,
329,
1195,
51,
2248,
2858,
198,
2,
22299,
959,
679,
68,
411,
11,
1279,
260,
259,
959,
31,
21067,
411,
13,
12496,
22330,
3648,
198,
2,
198,
2,
770,
1430,
318,
1479,
3788,
26,
345,
460,
... | 2.640904 | 841 |
import importlib
import re
import os
import yaml
from yaml.constructor import ConstructorError
from wopmars.utils.various import get_current_time
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
from sqlalchemy.orm.exc import NoResultFound, ObjectDeletedError
from wopmars.SQLManager import SQLManager
from wopmars.models.Execution import Execution
from wopmars.models.TableInputOutputInformation import TableInputOutputInformation
from wopmars.models.FileInputOutputInformation import FileInputOutputInformation
from wopmars.models.TableModificationTime import TableModificationTime
from wopmars.models.Option import Option
from wopmars.models.TypeInputOrOutput import TypeInputOrOutput
from wopmars.utils.DictUtils import DictUtils
from wopmars.utils.Logger import Logger
from wopmars.utils.OptionManager import OptionManager
from wopmars.utils.WopMarsException import WopMarsException
class Reader:
"""
This class is responsible of the parsing of the user's entries:
- the arguments given with the ``tool`` command
- the content of the workflow definition file, in normal mode
This module uses the ``yaml`` library in order to parse the workflow definition file. Some additional rules have been
added to the ``yaml`` library in order to prevent duplicate rules.
Also, once the ``Reader`` has gotten the workflow definition informations, it'll check for eventual errors and then
store them in the database. Those stored informations are what we call the "history" of **WoPMaRS**.
"""
def load_wopfile_as_yml_dic(self, wopfile_path):
"""
Open the definition file and load it's content in a dictionnary thanks to the ``yaml`` library. ``yaml`` can
raise an exception if the yaml specifications are not respected or if there is duplicates at the same level of
hierarchy in the definition file. If so, the exception is caught then wrapped into a ``WopMarsException``.
The check of the grammar of the definition file is done during this step but no tests are performed regarding
to the actual content of the definition file.
:param wopfile_path: Path to the definition file
:type wopfile_path: str
:raises WopMarsException: The yaml specifications are not respected
"""
# Tests about grammar and syntax are performed here (file's existence is also tested here)
try:
with open(wopfile_path, 'r') as def_file:
wopfile_content_str = def_file.read()
try:
# The workflow definition file is loaded as-it in memory by the pyyaml library
Logger.instance().info("Reading the Wopfile.yml: " + str(wopfile_path))
# Replace jinja2 variables with environment variable values
#s_def_file_content = jinja2.Environment().from_string(s_def_file_content).render(os.environ)
# Parse the file to find duplicates rule names (it is a double check with the following step)
Reader.check_duplicate_rules(wopfile_content_str)
# Allows to raise an exception if duplicate keys are found on the same document hirearchy level.
yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, Reader.no_duplicates_constructor)
# The whole content of the definition file is loaded in this dict.
# yaml.load return None if there is no content in the String
self.__wopfile_yml_dict = yaml.load(wopfile_content_str, Loader=yaml.SafeLoader) or {}
if self.__wopfile_yml_dict == {}:
Logger.instance().warning("The workflow definition file is empty")
Logger.instance().debug("\n" + DictUtils.pretty_repr(self.__wopfile_yml_dict))
Logger.instance().debug("Read complete.")
Logger.instance().debug("Checking whether the file is well formed...")
# raise an exception if there is a problem with the grammar
self.is_grammar_respected()
Logger.instance().debug("File well formed.")
# YAMLError is thrown if the YAML specifications are not respected by the definition file
except yaml.YAMLError as exc:
raise WopMarsException("Error while parsing the configuration file: \n\t"
"The YAML specification is not respected:", str(exc))
except ConstructorError as CE:
raise WopMarsException("Error while parsing the configuration file: \n\t",
str(CE))
except FileNotFoundError:
raise WopMarsException("Error while parsing the configuration file: \n\tInput error:",
"The specified file at " + wopfile_path + " doesn't exist.")
# Code from the github: https://gist.github.com/pypt/94d747fe5180851196eb
@staticmethod
def no_duplicates_constructor(loader, node, deep=False):
"""
Make the yaml constructor to check for duplicate keys.
"""
mapping = {}
for key_node, value_node in node.value:
key = loader.construct_object(key_node, deep=deep)
value = loader.construct_object(value_node, deep=deep)
if key in mapping:
raise ConstructorError("while constructing a mapping", node.start_mark,
"found duplicate key (%s)" % key, key_node.start_mark)
mapping[key] = value
return loader.construct_mapping(node, deep)
@staticmethod
def check_duplicate_rules(wopfile_content_str):
"""
This method raises an exception if the workflow definition file contains duplicate rule names.
The workflow definition file should contain rules with different is_input. It is therefore recommended to not
call rules with tool names but functionality instead. Example:
.. code-block:: yaml
rule get_snp:
tool: SNPGetter
input:
file:
etc..
table:
etc..
output:
file:
etc..
table:
etc..
params:
etc..
:param wopfile_content_str: The content of the definition file
:type wopfile_content_str: str
:raises WopMarsException: There is a duplicate rule is_input
"""
Logger.instance().debug("Looking for duplicate rules...")
# All rules are found using this regex.
rules = re.findall(r'rule (.+?):', str(wopfile_content_str))
seen = set()
# for each rule is_input
for r in rules:
# if the rule has not been seen before
if r not in seen:
# add it to the set of seen rules
seen.add(r)
else:
# There is a duplicate rule is_input
raise WopMarsException("Error while parsing the configuration file:\n\t",
"The rule " + r + " is duplicated.")
Logger.instance().debug("No Duplicate.")
def is_grammar_respected(self):
"""
Check if the definition file respects the grammar. Throw a WopMarsException exception if not.
The formal representation of the grammar is::
WoPMaRS = rule
identifier = String
ni = NEWLINE INDENT
rule = "rule" identifier ":" ruleparams
ruleparams = [ni tool] [ni input] [ni output] [ni params]
filesortables = (ni files|ni models){0-2}
files = "file" ":" (ni identifier ”:” stringliteral)+
models = "table" ":" (ni identifier ”:” stringliteral)+
tool = "tool" ":" stringliteral
input = "input" ":" ni filesortables
output = "output" ":" ni filesortables
params = "params" ":" (ni identifier ”:” stringliteral)+
(NEWLINE WoPMaRS)+
:raises WopMarsException: The grammar is not respected
"""
exemple_file_def = """
rule RULENAME:
tool: TOOLNAME
input:
file:
INPUTNAME: INPUTVALUE
table:
- path.to.table
output:
file:
OUTPUTNAME: OUTPUTVALUE
table:
- path.to.table
params:
OPTIONNAME: OPTIONVALUE
rule ...etc...
"""
# recognize the rule blocks
regex_step1 = re.compile(r"(^rule [^\s]+$)")
# recognize the elements of the rule
regex_step2 = re.compile(r"(^params$)|(^tool$)|(^input$)|(^output$)")
# recognize the file/table blocks
regex_step3 = re.compile(r"(^file$)|(^table$)")
# The words found are tested against the regex to see if they match or not
for s_key_step1 in self.__wopfile_yml_dict:
bool_toolwrapper = False
# The first level of indentation should only contain rules
if not regex_step1.search(s_key_step1):
raise WopMarsException("Error while parsing the configuration file: \n\t"
"The grammar of the WopMars's definition file is not respected:",
"The line containing:\'" +
str(s_key_step1) +
"\' doesn't match the grammar: it should start with 'rule'" +
"and contains only one word after the 'rule' keyword" +
"\nexemple:" + exemple_file_def)
for s_key_step2 in self.__wopfile_yml_dict[s_key_step1]:
# the second level of indentation should only contain elements of rule
if not regex_step2.search(s_key_step2):
raise WopMarsException("Error while parsing the configuration file: \n\t"
"The grammar of the WopMars's definition file is not respected:",
"The line containing:'" + str(s_key_step2) + "'" +
" for rule '" + str(s_key_step1) + "'" +
" doesn't match the grammar: it should be " +
"'tool', 'params', 'input' or 'output'" +
"\nexemple:" + exemple_file_def)
elif s_key_step2 == "input" or s_key_step2 == "output":
for s_key_step3 in self.__wopfile_yml_dict[s_key_step1][s_key_step2]:
if not regex_step3.search(s_key_step3):
raise WopMarsException("Error while parsing the configuration file: \n\t"
"The grammar of the WopMars's definition file is not respected:",
"The line containing:'" + str(s_key_step3) + "'" +
" for rule '" + str(s_key_step1) + "'" +
" doesn't match the grammar: it should be " +
"'file' or 'table'" +
"\nexemple:" + exemple_file_def)
elif s_key_step3 == "file":
for s_variable_name in self.__wopfile_yml_dict[s_key_step1][s_key_step2][s_key_step3]:
if type(self.__wopfile_yml_dict[s_key_step1][s_key_step2][s_key_step3][s_variable_name]) != str:
raise WopMarsException("Error while parsing the configuration file: \n\t" +
"The grammar of the WopMars's definition file is not respected:",
"The line containing:'" + str(s_variable_name) + "'" +
" for rule '" + str(s_key_step1) + "'" +
" doesn't match the grammar: it should be the string containing the path to the file."
"\nexemple:" + exemple_file_def)
elif s_key_step3 == "table":
for s_tablename in self.__wopfile_yml_dict[s_key_step1][s_key_step2][s_key_step3]:
if type(s_tablename) != str:
raise WopMarsException("Error while parsing the configuration file: \n\t"
"The grammar of the WopMars's definition file is not respected:",
"The line containing:'" + str(s_variable_name) + "'" +
" for rule '" + str(s_key_step1) + "'" +
" doesn't match the grammar: it should be the string containing the is_input of the Model."
"\nexemple:" + exemple_file_def)
# There should be one tool at max in each rule
elif s_key_step2 == "tool":
if bool_toolwrapper == False:
bool_toolwrapper = True
elif bool_toolwrapper == True:
raise WopMarsException("Error while parsing the configuration file: \n\t",
"There is multiple tools specified for the " + str(s_key_step1))
# All rules should contain a tool
if not bool_toolwrapper:
raise WopMarsException("Error while parsing the configuration file: \n\t"
"The grammar of the WopMars's definition file is not respected:",
"The rule '" + str(s_key_step1) + "' doesn't contain any tool." +
"\nexemple:" + exemple_file_def
)
def load_one_toolwrapper(self, s_toolwrapper, s_dict_inputs, s_dict_outputs, s_dict_params):
"""
Method called when the ``tool`` command is used. It is equivalent to the :meth:`~.wopmars.framework.parsing.Reader.Reader.iterate_wopfile_yml_dic_and_insert_rules_in_db` method but create a workflow
with only one tool_python_path. The workflow is also stored inside the database.
:param s_toolwrapper: The is_input of the tool_python_path (will be imported)
:type s_toolwrapper: str
:param s_dict_inputs: A string containing the dict of input files
:type s_dict_inputs: str
:param s_dict_outputs: A string containing the dict of output files
:type s_dict_outputs: str
:param s_dict_params: A string containing the dict of params
:type s_dict_params: str
:raise WopMarsException: There is an error while accessing the database
"""
session = SQLManager.instance().get_session()
dict_inputs = dict(eval(s_dict_inputs))
dict_outputs = dict(eval(s_dict_outputs))
dict_params = dict(eval(s_dict_params))
try:
# The same execution entry for the whole workflow-related database entries.
time_unix_ms, time_human = get_current_time()
execution = Execution(started_at=time_human)
# get the types that should have been created previously
input_entry = session.query(TypeInputOrOutput).filter(TypeInputOrOutput.is_input == True).one()
output_entry = session.query(TypeInputOrOutput).filter(TypeInputOrOutput.is_input == False).one()
Logger.instance().debug("Loading unique tool_python_path " + s_toolwrapper)
dict_dict_dict_elm = dict(dict_input={"file": {}, "table": {}},
dict_params={},
dict_output={"file": {}, "table": {}})
for type in dict_inputs:
if type == "file":
for s_input in dict_inputs[type]:
obj_created = FileInputOutputInformation(file_key=s_input,
path=os.path.join(OptionManager.instance()["--directory"],
dict_inputs[type][s_input]))
dict_dict_dict_elm["dict_input"][type][s_input] = obj_created
Logger.instance().debug("Object input file: " + s_input + " created.")
elif type == "table":
for s_input in dict_inputs[type]:
model_py_path = dict_inputs[type][s_input]
table_name = model_py_path.split('.')[-1]
obj_created = TableInputOutputInformation(model_py_path=model_py_path, table_key=s_input,
table_name=table_name)
dict_dict_dict_elm["dict_input"][type][s_input] = obj_created
Logger.instance().debug("Object input table: " + s_input + " created.")
for type in dict_outputs:
if type == "file":
for s_output in dict_outputs[type]:
obj_created = FileInputOutputInformation(file_key=s_output, path=dict_outputs[type][s_output])
dict_dict_dict_elm["dict_output"]["file"][s_output] = obj_created
Logger.instance().debug("Object output file: " + s_output + " created.")
elif type == "table":
for s_output in dict_outputs[type]:
model_py_path = dict_outputs[type][s_output]
table_name = model_py_path.split('.')[-1]
obj_created = TableInputOutputInformation(model_py_path=model_py_path, table_key=s_output,
table_name=table_name)
dict_dict_dict_elm["dict_output"]["table"][s_output] = obj_created
Logger.instance().debug("Object output table: " + s_output + " created.")
for s_param in dict_params:
obj_created = Option(name=s_param,
value=dict_params[s_param])
dict_dict_dict_elm["dict_params"][s_param] = obj_created
Logger.instance().debug("Object option: " + s_param + " created.")
# Instantiate the refered class
wrapper_entry = self.create_tool_wrapper_inst("rule_" + s_toolwrapper, s_toolwrapper,
dict_dict_dict_elm, input_entry, output_entry)
wrapper_entry.relation_toolwrapper_to_execution = execution
Logger.instance().debug("Object tool_python_path: " + s_toolwrapper + " created.")
session.add(wrapper_entry)
session.commit()
session.rollback()
TableInputOutputInformation.set_tables_properties(TableInputOutputInformation.get_execution_tables())
# commit /rollback trick to clean the session
# totodo LucG ask lionel est-ce-que tu as deja eu ce problème à ne pas pouvoir faire des queries et des ajouts
# dans la meme session?
session.commit()
session.rollback()
# if not SQLManager.instance().d_database_config['db_connection'] == 'postgresql':
# This command will create all the triggers that will create timestamp after modification
TableModificationTime.create_triggers()
# This create_all will create all models that have been found in the tool_python_path
SQLManager.instance().create_all()
wrapper_entry.is_content_respected()
except NoResultFound as e:
session.rollback()
raise WopMarsException("Error while parsing the configuration file. The database has not been setUp Correctly.",
str(e))
def iterate_wopfile_yml_dic_and_insert_rules_in_db(self, wopfile_path):
"""
Reads the file given and insert the rules of the workflow in the database.
The definition file is supposed to be properly formed. The validation of the content of the definition is done
during the instanciation of the tools.
:param: s_definition_file: String containing the path to the definition file.
:type wopfile_path: str
:raise: WopmarsException: The content is not validated
"""
self.load_wopfile_as_yml_dic(wopfile_path)
session = SQLManager.instance().get_session()
# The dict_workflow_definition is assumed to be well formed
try:
# The same execution entry for the whole workflow-related database entries.
time_unix_ms, time_human = get_current_time()
execution = Execution(started_at=time_human)
# get the types database entries that should have been created previously
input_entry = session.query(TypeInputOrOutput).filter(TypeInputOrOutput.is_input == True).one()
output_entry = session.query(TypeInputOrOutput).filter(TypeInputOrOutput.is_input == False).one()
tool_wrapper_set = set()
# Encounter a rule block
for yml_key_level1 in self.__wopfile_yml_dict:
tool_wrapper_py_path = None
# the is_input of the rule is extracted after the "rule" keyword. There shouldn't be a ":" but it costs nothing.
rule_name_str = yml_key_level1.split()[-1].strip(":")
Logger.instance().debug("Encounter rule " + rule_name_str + ": \n" +
str(DictUtils.pretty_repr(self.__wopfile_yml_dict[yml_key_level1])))
# The dict of "input"s, "output"s and "params" is re-initialized for each tool wrapper
tool_wrapper_inst_dic = dict(dict_input={"file": {}, "table": {}}, dict_params={}, dict_output={"file": {}, "table": {}})
for yml_key_level2 in self.__wopfile_yml_dict[yml_key_level1]:
# key_second_step is supposed to be "tool", "input", "output" or "params"
# if type(self.__wopfile_yml_dict[rule_header][yml_key_level_2nd]) == dict:
if yml_key_level2 in {"input", "output", "params"}:
# if it is a dict, then inputs, outputs or params are coming
for yml_key_level3 in self.__wopfile_yml_dict[yml_key_level1][yml_key_level2]:
if yml_key_level2 == "params":
# yml_key = yml_key_level3
value = self.__wopfile_yml_dict[yml_key_level1][yml_key_level2][yml_key_level3]
option_inst = Option(name=yml_key_level3, value=value)
tool_wrapper_inst_dic["dict_params"][yml_key_level3] = option_inst
else: # file or table
for yml_key_level4 in self.__wopfile_yml_dict[yml_key_level1][yml_key_level2][yml_key_level3]:
file_or_table_inst = None
if yml_key_level3 == "file":
# yml_key = yml_key_level4
# str_path_to_file = os.path.join(OptionManager.instance()["--directory"],
# self.__wopfile_yml_dict[rule][
# key_second_step][key_third_step][key])
str_path_to_file = self.__wopfile_yml_dict[yml_key_level1][yml_key_level2][yml_key_level3][yml_key_level4]
file_or_table_inst = FileInputOutputInformation(file_key=yml_key_level4, path=str_path_to_file)
elif yml_key_level3 == "table":
yml_key = yml_key_level4
modelname = self.__wopfile_yml_dict[yml_key_level1][yml_key_level2][
yml_key_level3][
yml_key]
model_py_path = modelname
table_name = model_py_path.split('.')[-1]
file_or_table_inst = TableInputOutputInformation(model_py_path=model_py_path,
table_key=yml_key_level4, table_name=table_name)
# all elements of the current rule block are stored in there
# key_second_step is input or output here
# tool_wrapper_inst_dic["dict_" + yml_key_level2][yml_key_level3][yml_key] = obj_created
tool_wrapper_inst_dic["dict_" + yml_key_level2][yml_key_level3][yml_key_level4] \
= file_or_table_inst
Logger.instance().debug("Object " + yml_key_level2 + " " + yml_key_level3 + ": " +
yml_key_level4 + " created.")
else:
# if the step is not a dict, then it is supposed to be the "tool" line
tool_wrapper_py_path = self.__wopfile_yml_dict[yml_key_level1][yml_key_level2]
# At this point, "tool_wrapper_inst_dic" is like this:
# {
# 'dict_params': {
# 'option1': Option('option1', 'valueofoption1')
# },
# 'dict_input': {
# 'file' : {
# 'input1': FileInputOutputInformation('input1', 'path/to/input1')
# }
# 'table': {
# 'table1': TableInputOutputInformation('table1', 'package.of.table1')
# }
# },
# }
# Instantiate the referred class and add it to the set of objects
tool_wrapper_inst = self.create_tool_wrapper_inst(rule_name_str, tool_wrapper_py_path, tool_wrapper_inst_dic,
input_entry, output_entry)
# Associating a tool_python_path to an execution
tool_wrapper_inst.relation_toolwrapper_to_execution = execution
tool_wrapper_set.add(tool_wrapper_inst)
Logger.instance().debug("Instance tool_python_path: " + tool_wrapper_py_path + " created.")
# commit/rollback trick to clean the session - SQLAchemy bug suspected
session.commit()
session.rollback()
# totodo LucG set_table_properties outside the rules loop to take into account all the models at once
# (error if one tool has a foreign key refering to a table that is not in its I/O put
TableInputOutputInformation.set_tables_properties(TableInputOutputInformation.get_execution_tables())
session.commit()
session.rollback()
# This command is creating the triggers that will update the modification
TableModificationTime.create_triggers()
# This create_all will create all models that have been found in the tool_python_path
SQLManager.instance().create_all()
session.add_all(tool_wrapper_set)
# save all operations done so far.
session.commit()
for tool_wrapper in tool_wrapper_set:
tool_wrapper.is_content_respected()
except NoResultFound as e:
session.rollback()
raise WopMarsException("Error while parsing the configuration file. The database has not been setUp Correctly.",
str(e))
def create_tool_wrapper_inst(self, rule_name, tool_python_path, dict_dict_dict_elm, input_entry, output_entry):
"""
Actual creating of the Toolwrapper object.
The tool_python_path object is an entry of the table rule in the resulting database.
If the scoped_session has current modification, they probably will be commited during this method:
models are created and this can only be done with clean session.
:param rule_name: Contains the is_input of the rule in which the tool_python_path will be used.
:type rule_name: str
:param tool_python_path: Contains the is_input of the tool_python_path. It will be used for importing the correct module and then for creating the class
:type tool_python_path: str
:param dict_dict_dict_elm: "input"s "output"s and "params" and will be used to make relations between options / input / output and the tool_python_path.
:type dict_dict_dict_elm: dict(dict(dict()))
:param input_entry: input entry
:type input_entry: :class:`wopmars.framework.bdd.models.TypeInputOrOutput.TypeInputOrOutput`
:param output_entry: output entry
:type output_entry: :class:`wopmars.framework.bdd.models.TypeInputOrOutput.TypeInputOrOutput`
:return: TooLWrapper instance
"""
session = SQLManager.instance().get_session()
# Importing the module in the mod variable
try:
mod = importlib.import_module(tool_python_path)
# Building the class object
ToolWrapper_class = eval("mod." + tool_python_path.split('.')[-1])
except AttributeError:
raise WopMarsException("Error while parsing the configuration file: \n\t",
"The class " + tool_python_path + " doesn't exist.")
except ImportError as IE:
if tool_python_path in str(IE):
raise WopMarsException("Error while parsing the configuration file:",
tool_python_path + " module is not in the pythonpath. ")
else:
raise WopMarsException("Error while parsing the configuration file:",
tool_python_path + " module contains an ImportError: " + str(IE))
# Initialize the instance of the user ToolWrapper
tool_wrapper_inst = ToolWrapper_class(rule_name=rule_name)
# associating ToolWrapper instances with their files / models
for elm in dict_dict_dict_elm["dict_input"]:
if elm == "file":
for input_f in dict_dict_dict_elm["dict_input"][elm]:
# set the type of FileInputOutputInformation object
iofileput_entry = dict_dict_dict_elm["dict_input"][elm][input_f]
iofileput_entry.relation_file_or_tableioinfo_to_typeio = input_entry
try:
# associating file and tool_python_path
tool_wrapper_inst.relation_toolwrapper_to_fileioinfo.append(iofileput_entry)
except ObjectDeletedError as e:
raise WopMarsException("Error in the tool_python_path class declaration. Please, notice the developer",
"The error is probably caused by the lack of the 'polymorphic_identity' attribute"
" in the tool_python_path. Error message: \n" + str(e))
elif elm == "table":
for input_t in dict_dict_dict_elm["dict_input"][elm]:
# input_t is the is_input of the table (not the model)
# this is a preventing commit because next statement will create a new table and the session has to
# be clean. I think it is a bug in SQLAlchemy which not allows queries then insert statements in
# the same session
session.commit()
iodbput_entry = dict_dict_dict_elm["dict_input"][elm][input_t]
# the user-side models are created during the reading of the definition file
# table_entry = TableInputOutputInformation(is_input=dict_dict_dict_elm["dict_input"][elm][input_t], tablename=input_t)
# insert in the database the mtime_epoch_millis of last modification of a developper-side table
time_unix_ms, time_human = get_current_time()
model_py_path_suffix = dict_dict_dict_elm["dict_input"][elm][input_t].model_py_path.split('.')[-1]
modification_table_entry, created = session.get_or_create(TableModificationTime,
defaults={
"mtime_epoch_millis": time_unix_ms,
"mtime_human": time_human},
table_name=model_py_path_suffix)
iodbput_entry.relation_tableioinfo_to_tablemodiftime = modification_table_entry
iodbput_entry.relation_file_or_tableioinfo_to_typeio = input_entry
try:
tool_wrapper_inst.relation_toolwrapper_to_tableioinfo.append(iodbput_entry)
except ObjectDeletedError as e:
raise WopMarsException("Error in the tool_python_path class declaration. Please, notice the developer",
"The error is probably caused by the lack of the 'polymorphic_identity' attribute"
" in the tool_python_path. Error message: \n" + str(e))
for elm in dict_dict_dict_elm["dict_output"]:
if elm == "file":
for output_f in dict_dict_dict_elm["dict_output"][elm]:
iofileput_entry = dict_dict_dict_elm["dict_output"][elm][output_f]
iofileput_entry.relation_file_or_tableioinfo_to_typeio = output_entry
try:
tool_wrapper_inst.relation_toolwrapper_to_fileioinfo.append(iofileput_entry)
except ObjectDeletedError as e:
raise WopMarsException("Error in the tool_python_path class declaration. Please, notice the developer",
"The error is probably caused by the lack of the 'polymorphic_identity' attribute"
" in the tool_python_path. Error message: \n" + str(e))
elif elm == "table":
for output_t in dict_dict_dict_elm["dict_output"][elm]:
# output_t is the table is_input (not the model)
session.commit()
iodbput_entry = dict_dict_dict_elm["dict_output"][elm][output_t]
time_unix_ms, time_human = get_current_time()
# This corresponds the __tablename__ of the database in the database
model_py_path_suffix = dict_dict_dict_elm["dict_output"][elm][output_t].model_py_path.split('.')[-1]
modification_table_entry, created = session.get_or_create(TableModificationTime,
defaults={
"mtime_epoch_millis": time_unix_ms,
"mtime_human": time_human},
table_name=model_py_path_suffix)
iodbput_entry.relation_tableioinfo_to_tablemodiftime = modification_table_entry
iodbput_entry.relation_file_or_tableioinfo_to_typeio = output_entry
try:
tool_wrapper_inst.relation_toolwrapper_to_tableioinfo.append(iodbput_entry)
except ObjectDeletedError as e:
raise WopMarsException(
"Error in the tool_python_path class declaration. Please, notice the developer",
"The error is probably caused by the lack of the 'polymorphic_identity' attribute"
" in the tool_python_path. Error message: \n" + str(
e))
for opt in dict_dict_dict_elm["dict_params"]:
# associating option and tool_python_path
tool_wrapper_inst.relation_toolwrapper_to_option.append(dict_dict_dict_elm["dict_params"][opt])
# toolwrapper_wrapper.is_content_respected()
return tool_wrapper_inst
| [
11748,
1330,
8019,
198,
11748,
302,
198,
11748,
28686,
198,
198,
11748,
331,
43695,
198,
6738,
331,
43695,
13,
41571,
273,
1330,
28407,
273,
12331,
198,
198,
6738,
266,
404,
76,
945,
13,
26791,
13,
7785,
699,
1330,
651,
62,
14421,
62,... | 1.987332 | 19,024 |
from flask import request
from flask_restx import Resource
from app.main.service.auth_service import *
from app.main.representation.auth import AuthRepresentation
from app.main.decorator.auth_decorator import token_required
api = AuthRepresentation.api
user_auth = AuthRepresentation.user_auth
auth_details = AuthRepresentation.auth_details
@api.route('/login')
@api.route('/verify')
@api.route('/logout')
| [
6738,
42903,
1330,
2581,
198,
6738,
42903,
62,
2118,
87,
1330,
20857,
198,
198,
6738,
598,
13,
12417,
13,
15271,
13,
18439,
62,
15271,
1330,
1635,
198,
6738,
598,
13,
12417,
13,
15603,
341,
13,
18439,
1330,
26828,
40171,
341,
198,
673... | 3.312 | 125 |
"""
mbed CMSIS-DAP debugger
Copyright (c) 2015-2018 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from enum import Enum
import six
from functools import total_ordering
class MemoryType(Enum):
"""! @brief Known types of memory."""
OTHER = 0
RAM = 1
ROM = 2
FLASH = 3
DEVICE = 4
EXTERNAL = 5
## @brief A range of memory within a region.
@total_ordering
## @brief A range of memory within a region.
## @brief One contiguous range of memory.
## @brief Contiguous region of RAM.
## @brief Contiguous region of ROM.
## @brief Contiguous region of flash memory.
## @brief Contiguous region of external memory.
## @brief Device or peripheral memory.
## @brief Memory map consisting of memory regions.
| [
37811,
198,
285,
3077,
40773,
1797,
12,
35,
2969,
49518,
198,
15069,
357,
66,
8,
1853,
12,
7908,
20359,
15302,
628,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
345,
743,
407,
779,
428,
2... | 3.45 | 360 |
import json
if __name__ == '__main__':
save()
load() | [
11748,
33918,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
3613,
3419,
198,
220,
220,
220,
3440,
3419
] | 2.384615 | 26 |
from src.utils.program3.node import Node
from .class_block import ClassBlock | [
6738,
12351,
13,
26791,
13,
23065,
18,
13,
17440,
1330,
19081,
198,
6738,
764,
4871,
62,
9967,
1330,
5016,
12235
] | 3.8 | 20 |
import distutils.spawn
import os.path
import sys
import subprocess
import pprint
import json
import base64
import signal
import pprint
import time
import http.cookiejar
import urllib.parse
import ChromeController.filter_funcs as filter_funcs
from ChromeController.cr_exceptions import ChromeResponseNotReceived
from ChromeController.cr_exceptions import ChromeNavigateTimedOut
from ChromeController.cr_exceptions import ChromeError
from ChromeController.resources import js
# We use the generated wrapper. If you want a different version, use the CLI interface to update.
from ChromeController.Generator.Generated import ChromeRemoteDebugInterface as ChromeRemoteDebugInterface_base
DEFAULT_TIMEOUT_SECS = 10
class ChromeRemoteDebugInterface(ChromeRemoteDebugInterface_base):
'''
Remote control class for Chromium.
'''
def update_headers(self, header_args):
'''
Given a set of headers, update both the user-agent
and additional headers for the remote browser.
header_args must be a dict. Keys are the names of
the corresponding HTTP header.
return value is a 2-tuple of the results of the user-agent
update, as well as the extra headers update.
If no 'User-Agent' key is present in the new headers,
the first item in the tuple will be None
'''
assert isinstance(header_args, dict), "header_args must be a dict, passed type was %s" \
% (type(header_args), )
ua = header_args.pop('User-Agent', None)
ret_1 = None
if ua:
ret_1 = self.Network_setUserAgentOverride(userAgent=ua)
ret_2 = self.Network_setExtraHTTPHeaders(headers = header_args)
return (ret_1, ret_2)
def __exec_js(self, script, args=None, **extra_params):
'''
Execute the passed javascript statement, optionally with passed
arguments.
Note that if `script` is not a function, it must be a single statement.
The presence of semicolons not enclosed in a bracket scope will produce
an error.
'''
if args is None:
args = {}
# How chromedriver does this:
# std::unique_ptr<base::Value>* result) {
# std::string json;
# base::JSONWriter::Write(args, &json);
# // TODO(zachconrad): Second null should be array of shadow host ids.
# std::string expression = base::StringPrintf(
# "(%s).apply(null, [null, %s, %s])",
# kCallFunctionScript,
# function.c_str(),
# json.c_str());
expression = "({}).apply(null, [null, {}, {}])".format(
js.kCallFunctionScript,
script,
json.dumps(args)
)
resp3 = self.Runtime_evaluate(expression=expression, **extra_params)
return resp3
# Interact with http.cookiejar.Cookie() instances
def get_cookies(self):
'''
Retreive the cookies from the remote browser.
Return value is a list of http.cookiejar.Cookie() instances.
These can be directly used with the various http.cookiejar.XXXCookieJar
cookie management classes.
'''
ret = self.Network_getAllCookies()
assert 'result' in ret, "No return value in function response!"
assert 'cookies' in ret['result'], "No 'cookies' key in function response"
cookies = []
for raw_cookie in ret['result']['cookies']:
# Chromium seems to support the following key values for the cookie dict:
# "name"
# "value"
# "domain"
# "path"
# "expires"
# "httpOnly"
# "session"
# "secure"
#
# This seems supported by the fact that the underlying chromium cookie implementation has
# the following members:
# std::string name_;
# std::string value_;
# std::string domain_;
# std::string path_;
# base::Time creation_date_;
# base::Time expiry_date_;
# base::Time last_access_date_;
# bool secure_;
# bool httponly_;
# CookieSameSite same_site_;
# CookiePriority priority_;
#
# See chromium/net/cookies/canonical_cookie.h for more.
#
# I suspect the python cookie implementation is derived exactly from the standard, while the
# chromium implementation is more of a practically derived structure.
# Network.setCookie
baked_cookie = http.cookiejar.Cookie(
# We assume V0 cookies, principally because I don't think I've /ever/ actually encountered a V1 cookie.
# Chromium doesn't seem to specify it.
version = 0,
name = raw_cookie['name'],
value = raw_cookie['value'],
port = None,
port_specified = False,
domain = raw_cookie['domain'],
domain_specified = True,
domain_initial_dot = False,
path = raw_cookie['path'],
path_specified = False,
secure = raw_cookie['secure'],
expires = raw_cookie['expires'],
discard = raw_cookie['session'],
comment = None,
comment_url = None,
rest = {"httponly":"%s" % raw_cookie['httpOnly']},
rfc2109 = False
)
cookies.append(baked_cookie)
return cookies
def set_cookie(self, cookie):
'''
Add a cookie to the remote chromium instance.
Passed value `cookie` must be an instance of `http.cookiejar.Cookie()`.
'''
# Function path: Network.setCookie
# Domain: Network
# Method name: setCookie
# WARNING: This function is marked 'Experimental'!
# Parameters:
# Required arguments:
# 'url' (type: string) -> The request-URI to associate with the setting of the cookie. This value can affect the default domain and path values of the created cookie.
# 'name' (type: string) -> The name of the cookie.
# 'value' (type: string) -> The value of the cookie.
# Optional arguments:
# 'domain' (type: string) -> If omitted, the cookie becomes a host-only cookie.
# 'path' (type: string) -> Defaults to the path portion of the url parameter.
# 'secure' (type: boolean) -> Defaults ot false.
# 'httpOnly' (type: boolean) -> Defaults to false.
# 'sameSite' (type: CookieSameSite) -> Defaults to browser default behavior.
# 'expirationDate' (type: Timestamp) -> If omitted, the cookie becomes a session cookie.
# Returns:
# 'success' (type: boolean) -> True if successfully set cookie.
# Description: Sets a cookie with the given cookie data; may overwrite equivalent cookies if they exist.
assert isinstance(cookie, http.cookiejar.Cookie), 'The value passed to `set_cookie` must be an instance of http.cookiejar.Cookie().' + \
' Passed: %s ("%s").' % (type(cookie), cookie)
# Yeah, the cookielib stores this attribute as a string, despite it containing a
# boolean value. No idea why.
is_http_only = str(cookie.get_nonstandard_attr('httponly', 'False')).lower() == "true"
# I'm unclear what the "url" field is actually for. A cookie only needs the domain and
# path component to be fully defined. Considering the API apparently allows the domain and
# path parameters to be unset, I think it forms a partially redundant, with some
# strange interactions with mode-changing between host-only and more general
# cookies depending on what's set where.
# Anyways, given we need a URL for the API to work properly, we produce a fake
# host url by building it out of the relevant cookie properties.
fake_url = urllib.parse.urlunsplit((
"http" if is_http_only else "https", # Scheme
cookie.domain, # netloc
cookie.path, # path
'', # query
'', # fragment
))
params = {
'url' : fake_url,
'name' : cookie.name,
'value' : cookie.value if cookie.value else "",
'domain' : cookie.domain,
'path' : cookie.path,
'secure' : cookie.secure,
'expires' : float(cookie.expires) if cookie.expires else float(2**32),
'httpOnly' : is_http_only,
# The "sameSite" flag appears to be a chromium-only extension for controlling
# cookie sending in non-first-party contexts. See:
# https://bugs.chromium.org/p/chromium/issues/detail?id=459154
# Anyways, we just use the default here, whatever that is.
# sameSite = cookie.xxx
}
ret = self.Network_setCookie(**params)
return ret
def clear_cookies(self):
'''
At this point, this is just a thin shim around the Network_clearBrowserCookies() operation.
That function postdates the clear_cookies() call here.
'''
self.Network_clearBrowserCookies()
def navigate_to(self, url):
'''
Trigger a page navigation to url `url`.
Note that this is done via javascript injection, and as such results in
the `referer` header being sent with the url of the network location.
This is useful when a page's navigation is stateful, or for simple
cases of referrer spoofing.
'''
assert "'" not in url
return self.__exec_js("window.location.href = '{}'".format(url))
def get_current_url(self):
'''
Probe the remote session for the current window URL.
This is primarily used to do things like unwrap redirects,
or circumvent outbound url wrappers.
'''
res = self.Page_getNavigationHistory()
assert 'result' in res
assert 'currentIndex' in res['result']
assert 'entries' in res['result']
return res['result']['entries'][res['result']['currentIndex']]['url']
def get_page_url_title(self):
'''
Get the title and current url from the remote session.
Return is a 2-tuple: (page_title, page_url).
'''
cr_tab_id = self.transport._get_cr_tab_meta_for_key(self.tab_id)['id']
targets = self.Target_getTargets()
assert 'result' in targets
assert 'targetInfos' in targets['result']
for tgt in targets['result']['targetInfos']:
if tgt['targetId'] == cr_tab_id:
# {
# 'title': 'Page Title 1',
# 'targetId': '9d2c503c-e39e-42cc-b950-96db073918ee',
# 'attached': True,
# 'url': 'http://localhost:47181/with_title_1',
# 'type': 'page'
# }
title = tgt['title']
cur_url = tgt['url']
return title, cur_url
def click_link_containing_url(self, url):
'''
TODO
'''
# exec_func =
self.__exec_js("window.location.href = '/test'")
# js.kCallFunctionScript
# "window.history.back();"
# elem = self.find_element("//a".format(url))
# print(elem)
def execute_javascript(self, *args, **kwargs):
'''
Execute a javascript string in the context of the browser tab.
'''
ret = self.__exec_js(*args, **kwargs)
return ret
def find_element(self, search):
'''
DOM_performSearch(self, query, includeUserAgentShadowDOM)
Python Function: DOM_performSearch
Domain: DOM
Method name: performSearch
WARNING: This function is marked 'Experimental'!
Parameters:
'query' (type: string) -> Plain text or query selector or XPath search query.
'includeUserAgentShadowDOM' (type: boolean) -> True to search in user agent shadow DOM.
Returns:
'searchId' (type: string) -> Unique search session identifier.
'resultCount' (type: integer) -> Number of search results.
Description: Searches for a given string in the DOM tree. Use <code>getSearchResults</code> to access search results or <code>cancelSearch</code> to end this search session.
Python Function: DOM_getSearchResults
Domain: DOM
Method name: getSearchResults
WARNING: This function is marked 'Experimental'!
Parameters:
'searchId' (type: string) -> Unique search session identifier.
'fromIndex' (type: integer) -> Start index of the search result to be returned.
'toIndex' (type: integer) -> End index of the search result to be returned.
Returns:
'nodeIds' (type: array) -> Ids of the search result nodes.
Description: Returns search results from given <code>fromIndex</code> to given <code>toIndex</code> from the sarch with the given identifier.
DOM_discardSearchResults(self, searchId)
Python Function: DOM_discardSearchResults
Domain: DOM
Method name: discardSearchResults
WARNING: This function is marked 'Experimental'!
Parameters:
'searchId' (type: string) -> Unique search session identifier.
No return value.
Description: Discards search results from the session with the given id. <code>getSearchResults</code> should no longer be called for that search.
'''
res = self.DOM_performSearch(search, includeUserAgentShadowDOM=False)
assert 'result' in res
assert 'searchId' in res['result']
searchid = res['result']['searchId']
res_cnt = res['result']['resultCount']
self.log.debug("%s", res)
self.log.debug("%s", searchid)
if res_cnt == 0:
return None
items = self.DOM_getSearchResults(searchId=searchid, fromIndex=0, toIndex=res_cnt)
self.log.debug("Results:")
self.log.debug("%s", items)
# DOM_getSearchResults
def click_element(self, contains_url):
'''
TODO
ChromeDriver source for how to click an element:
Status ExecuteClickElement(Session* session,
WebView* web_view,
const std::string& element_id,
const base::DictionaryValue& params,
std::unique_ptr<base::Value>* value) {
std::string tag_name;
Status status = GetElementTagName(session, web_view, element_id, &tag_name);
if (status.IsError())
return status;
if (tag_name == "option") {
bool is_toggleable;
status = IsOptionElementTogglable(
session, web_view, element_id, &is_toggleable);
if (status.IsError())
return status;
if (is_toggleable)
return ToggleOptionElement(session, web_view, element_id);
else
return SetOptionElementSelected(session, web_view, element_id, true);
} else {
WebPoint location;
status = GetElementClickableLocation(
session, web_view, element_id, &location);
if (status.IsError())
return status;
std::list<MouseEvent> events;
events.push_back(
MouseEvent(kMovedMouseEventType, kNoneMouseButton,
location.x, location.y, session->sticky_modifiers, 0));
events.push_back(
MouseEvent(kPressedMouseEventType, kLeftMouseButton,
location.x, location.y, session->sticky_modifiers, 1));
events.push_back(
MouseEvent(kReleasedMouseEventType, kLeftMouseButton,
location.x, location.y, session->sticky_modifiers, 1));
status =
web_view->DispatchMouseEvents(events, session->GetCurrentFrameId());
if (status.IsOk())
session->mouse_position = location;
return status;
}
}
'''
pass
def get_unpacked_response_body(self, requestId, mimetype="application/unknown"):
'''
Return a unpacked, decoded resposne body from Network_getResponseBody()
'''
content = self.Network_getResponseBody(requestId)
assert 'result' in content
result = content['result']
assert 'base64Encoded' in result
assert 'body' in result
if result['base64Encoded']:
content = base64.b64decode(result['body'])
else:
content = result['body']
self.log.info("Navigate complete. Received %s byte response with type %s.", len(content), mimetype)
return {'binary' : result['base64Encoded'], 'mimetype' : mimetype, 'content' : content}
def handle_page_location_changed(self, timeout=None):
'''
If the chrome tab has internally redirected (generally because jerberscript), this
will walk the page navigation responses and attempt to fetch the response body for
the tab's latest location.
'''
# In general, this is often called after other mechanisms have confirmed
# that the tab has already navigated. As such, we want to not wait a while
# to discover something went wrong, so use a timeout that basically just
# results in checking the available buffer, and nothing else.
if not timeout:
timeout = 0.1
self.log.debug("We may have redirected. Checking.")
messages = self.transport.recv_all_filtered(filter_funcs.capture_loading_events, tab_key=self.tab_id)
if not messages:
raise ChromeError("Couldn't track redirect! No idea what to do!")
last_message = messages[-1]
self.log.info("Probably a redirect! New content url: '%s'", last_message['params']['documentURL'])
resp = self.transport.recv_filtered(filter_funcs.network_response_recieved_for_url(last_message['params']['documentURL'], last_message['params']['frameId']), tab_key=self.tab_id)
resp = resp['params']
ctype = 'application/unknown'
resp_response = resp['response']
if 'mimeType' in resp_response:
ctype = resp_response['mimeType']
if 'headers' in resp_response and 'content-type' in resp_response['headers']:
ctype = resp_response['headers']['content-type'].split(";")[0]
# We assume the last document request was the redirect.
# This is /probably/ kind of a poor practice, but what the hell.
# I have no idea what this would do if there are non-html documents (or if that can even happen.)
return self.get_unpacked_response_body(last_message['params']['requestId'], mimetype=ctype)
def blocking_navigate_and_get_source(self, url, timeout=DEFAULT_TIMEOUT_SECS):
'''
Do a blocking navigate to url `url`, and then extract the
response body and return that.
This effectively returns the *unrendered* page content that's sent over the wire. As such,
if the page does any modification of the contained markup during rendering (via javascript), this
function will not reflect the changes made by the javascript.
The rendered page content can be retreived by calling `get_rendered_page_source()`.
Due to the remote api structure, accessing the raw content after the content has been loaded
is not possible, so any task requiring the raw content must be careful to request it
before it actually navigates to said content.
Return value is a dictionary with two keys:
{
'binary' : (boolean, true if content is binary, false if not)
'content' : (string of bytestring, depending on whether `binary` is true or not)
}
'''
resp = self.blocking_navigate(url, timeout)
assert 'requestId' in resp
assert 'response' in resp
# self.log.debug('blocking_navigate Response %s', pprint.pformat(resp))
ctype = 'application/unknown'
resp_response = resp['response']
if 'mimeType' in resp_response:
ctype = resp_response['mimeType']
if 'headers' in resp_response and 'content-type' in resp_response['headers']:
ctype = resp_response['headers']['content-type'].split(";")[0]
self.log.debug("Trying to get response body")
try:
ret = self.get_unpacked_response_body(resp['requestId'], mimetype=ctype)
except ChromeError:
ret = self.handle_page_location_changed(timeout)
return ret
def get_rendered_page_source(self, dom_idle_requirement_secs=3, max_wait_timeout=30):
'''
Get the HTML markup for the current page.
This is done by looking up the root DOM node, and then requesting the outer HTML
for that node ID.
This calls return will reflect any modifications made by javascript to the
page. For unmodified content, use `blocking_navigate_and_get_source()`
dom_idle_requirement_secs specifies the period of time for which there must have been no
DOM modifications before treating the rendered output as "final". This call will therefore block for
at least dom_idle_requirement_secs seconds.
'''
# There are a bunch of events which generally indicate a page is still doing *things*.
# I have some concern about how this will handle things like advertisements, which
# basically load crap forever. That's why we have the max_wait_timeout.
target_events = [
"Page.frameResized",
"Page.frameStartedLoading",
"Page.frameNavigated",
"Page.frameAttached",
"Page.frameStoppedLoading",
"Page.frameScheduledNavigation",
"Page.domContentEventFired",
"Page.frameClearedScheduledNavigation",
"Page.loadEventFired",
"DOM.documentUpdated",
"DOM.childNodeInserted",
"DOM.childNodeRemoved",
"DOM.childNodeCountUpdated",
]
start_time = time.time()
try:
while 1:
if time.time() - start_time > max_wait_timeout:
self.log.debug("Page was not idle after waiting %s seconds. Giving up and extracting content now.", max_wait_timeout)
self.transport.recv_filtered(filter_funcs.wait_for_methods(target_events),
tab_key=self.tab_id, timeout=dom_idle_requirement_secs)
except ChromeResponseNotReceived:
# We timed out, the DOM is probably idle.
pass
# We have to find the DOM root node ID
dom_attr = self.DOM_getDocument(depth=-1, pierce=False)
assert 'result' in dom_attr
assert 'root' in dom_attr['result']
assert 'nodeId' in dom_attr['result']['root']
# Now, we have the root node ID.
root_node_id = dom_attr['result']['root']['nodeId']
# Use that to get the HTML for the specified node
response = self.DOM_getOuterHTML(nodeId=root_node_id)
assert 'result' in response
assert 'outerHTML' in response['result']
return response['result']['outerHTML']
def take_screeshot(self):
'''
Take a screenshot of the virtual viewport content.
Return value is a png image as a bytestring.
'''
resp = self.Page_captureScreenshot()
assert 'result' in resp
assert 'data' in resp['result']
imgdat = base64.b64decode(resp['result']['data'])
return imgdat
def blocking_navigate(self, url, timeout=DEFAULT_TIMEOUT_SECS):
'''
Do a blocking navigate to url `url`.
This function triggers a navigation, and then waits for the browser
to claim the page has finished loading.
Roughly, this corresponds to the javascript `DOMContentLoaded` event,
meaning the dom for the page is ready.
Internals:
A navigation command results in a sequence of events:
- Page.frameStartedLoading" (with frameid)
- Page.frameStoppedLoading" (with frameid)
- Page.loadEventFired" (not attached to an ID)
Therefore, this call triggers a navigation option,
and then waits for the expected set of response event messages.
'''
self.transport.flush(tab_key=self.tab_id)
self.log.debug("Blocking navigate to URL: '%s'", url)
ret = self.Page_navigate(url = url)
assert("result" in ret), "Missing return content"
assert("frameId" in ret['result']), "Missing 'frameId' in return content"
assert("loaderId" in ret['result']), "Missing 'loaderId' in return content"
expected_id = ret['result']['frameId']
loader_id = ret['result']['loaderId']
try:
self.log.debug("Waiting for frame navigated command response.")
self.transport.recv_filtered(filter_funcs.check_frame_navigated_command(expected_id), tab_key=self.tab_id, timeout=timeout)
self.log.debug("Waiting for frameStartedLoading response.")
self.transport.recv_filtered(filter_funcs.check_frame_load_command("Page.frameStartedLoading"), tab_key=self.tab_id, timeout=timeout)
self.log.debug("Waiting for frameStoppedLoading response.")
self.transport.recv_filtered(filter_funcs.check_frame_load_command("Page.frameStoppedLoading"), tab_key=self.tab_id, timeout=timeout)
# self.transport.recv_filtered(check_load_event_fired, tab_key=self.tab_id, timeout=timeout)
self.log.debug("Waiting for responseReceived response.")
resp = self.transport.recv_filtered(filter_funcs.network_response_recieved_for_url(url=None, expected_id=expected_id), tab_key=self.tab_id, timeout=timeout)
if resp is None:
raise ChromeNavigateTimedOut("Blocking navigate timed out!")
return resp['params']
# The `Page.frameNavigated ` event does not get fired for non-markup responses.
# Therefore, if we timeout on waiting for that, check to see if we received a binary response.
except ChromeResponseNotReceived:
# So this is basically broken, fix is https://bugs.chromium.org/p/chromium/issues/detail?id=831887
# but that bug report isn't fixed yet.
# Siiiigh.
self.log.warning("Failed to receive expected response to navigate command. Checking if response is a binary object.")
resp = self.transport.recv_filtered(
keycheck = filter_funcs.check_frame_loader_command(
method_name = "Network.responseReceived",
loader_id = loader_id
),
tab_key = self.tab_id,
timeout = timeout)
return resp['params']
| [
198,
11748,
1233,
26791,
13,
48183,
198,
11748,
28686,
13,
6978,
198,
11748,
25064,
198,
11748,
850,
14681,
198,
11748,
279,
4798,
198,
11748,
33918,
198,
11748,
2779,
2414,
198,
11748,
6737,
198,
11748,
279,
4798,
198,
11748,
640,
198,
... | 2.711956 | 9,075 |
#
# Copyright (c) 2015, Nikolay Polyarnyi
# All rights reserved.
#
import numpy as np
from triangulum.rendering.entities.abstract import Renderable
from triangulum.rendering.entities.points_cloud import PointsCloud
from triangulum.rendering.entities.camera import Camera
from triangulum.rendering.entities.stripes_projector import StripesProjector
| [
2,
198,
2,
15069,
357,
66,
8,
1853,
11,
48127,
323,
12280,
1501,
48111,
198,
2,
1439,
2489,
10395,
13,
198,
2,
198,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
1333,
648,
14452,
13,
13287,
278,
13,
298,
871,
13,
397,
8709,... | 3.407767 | 103 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module that defines that implements different types of actions
"""
from __future__ import print_function, division, absolute_import
from Qt.QtCore import Qt
from Qt.QtWidgets import QSizePolicy, QFrame, QMenu, QWidgetAction
from tpDcc.libs.qt.widgets import layouts, label, checkbox, sliders, color, icon
| [
2,
0,
1220,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
198,
26796,
326,
15738,
326,
23986,
1180,
3858,
286,
4028,
198,
37811,
198,
198,
6738,
11593,
37443,
834,... | 3.041322 | 121 |
import argparse
import copy
from distutils import ccompiler
from distutils import errors
from distutils import msvccompiler
from distutils import sysconfig
from distutils import unixccompiler
import os
from os import path
import shutil
import sys
import pkg_resources
import setuptools
from setuptools.command import build_ext
from setuptools.command import sdist
from install import build
from install.build import PLATFORM_DARWIN
from install.build import PLATFORM_LINUX
from install.build import PLATFORM_WIN32
required_cython_version = pkg_resources.parse_version('0.28.0')
ignore_cython_versions = [
]
use_hip = bool(int(os.environ.get('CUPY_INSTALL_USE_HIP', '0')))
# The value of the key 'file' is a list that contains extension names
# or tuples of an extension name and a list of other souces files
# required to build the extension such as .cpp files and .cu files.
#
# <extension name> | (<extension name>, a list of <other source>)
#
# The extension name is also interpreted as the name of the Cython
# source file required to build the extension with appending '.pyx'
# file extension.
MODULES = []
cuda_files = [
'cupy_backends.cuda.api.driver',
'cupy_backends.cuda.api.runtime',
'cupy_backends.cuda.libs.cublas',
'cupy_backends.cuda.libs.curand',
'cupy_backends.cuda.libs.cusparse',
'cupy_backends.cuda.libs.nvrtc',
'cupy_backends.cuda.libs.profiler',
'cupy_backends.cuda.stream',
'cupy.core._accelerator',
'cupy.core._carray',
'cupy.core._cub_reduction',
'cupy.core._dtype',
'cupy.core._fusion_kernel',
'cupy.core._fusion_thread_local',
'cupy.core._fusion_trace',
'cupy.core._fusion_variable',
'cupy.core._kernel',
'cupy.core._memory_range',
'cupy.core._optimize_config',
'cupy.core._reduction',
'cupy.core._routines_binary',
'cupy.core._routines_indexing',
'cupy.core._routines_linalg',
'cupy.core._routines_logic',
'cupy.core._routines_manipulation',
'cupy.core._routines_math',
'cupy.core._routines_sorting',
'cupy.core._routines_statistics',
'cupy.core._scalar',
'cupy.core.core',
'cupy.core.dlpack',
'cupy.core.flags',
'cupy.core.internal',
'cupy.core.fusion',
'cupy.core.new_fusion',
'cupy.core.raw',
'cupy.cuda.common',
'cupy.cuda.cufft',
'cupy.cuda.device',
'cupy.cuda.memory',
'cupy.cuda.memory_hook',
'cupy.cuda.pinned_memory',
'cupy.cuda.function',
'cupy.cuda.stream',
'cupy.cuda.texture',
'cupy.fft._cache',
'cupy.lib.polynomial',
'cupy._util'
]
if use_hip:
# We handle nvtx (and likely any other future support) here, because
# the HIP stubs (hip/cupy_*.h) would cause many symbols
# to leak into all these modules even if unused. It's easier for all of
# them to link to the same set of shared libraries.
MODULES.append({
'name': 'cuda',
'file': cuda_files + [
'cupy.cuda.nvtx',
'cupy_backends.cuda.libs.cusolver',
],
'include': [
'hip/hip_runtime_api.h',
'hip/hiprtc.h',
'hipblas.h',
'hiprand/hiprand.h',
'hipfft.h',
'roctx.h',
'rocsolver.h',
],
'libraries': [
'hiprtc',
'hip_hcc',
'hipblas',
'hiprand',
'rocfft',
'roctx64',
'rocblas',
'rocsolver',
],
})
else:
MODULES.append({
'name': 'cuda',
'file': cuda_files,
'include': [
'cublas_v2.h',
'cuda.h',
'cuda_profiler_api.h',
'cuda_runtime.h',
'cufft.h',
'curand.h',
'cusparse.h',
'nvrtc.h',
],
'libraries': [
'cublas',
'cuda',
'cudart',
'cufft',
'curand',
'cusparse',
'nvrtc',
],
'check_method': build.check_cuda_version,
'version_method': build.get_cuda_version,
})
if not use_hip:
MODULES.append({
'name': 'cusolver',
'file': [
'cupy_backends.cuda.libs.cusolver',
],
'include': [
'cusolverDn.h',
],
'libraries': [
'cusolver',
],
'check_method': build.check_cuda_version,
})
if not use_hip:
MODULES.append({
'name': 'cudnn',
'file': [
'cupy_backends.cuda.libs.cudnn',
'cupy.cudnn',
],
'include': [
'cudnn.h',
],
'libraries': [
'cudnn',
],
'check_method': build.check_cudnn_version,
'version_method': build.get_cudnn_version,
})
MODULES.append({
'name': 'nccl',
'file': [
'cupy.cuda.nccl',
],
'include': [
'nccl.h',
],
'libraries': [
'nccl',
],
'check_method': build.check_nccl_version,
'version_method': build.get_nccl_version,
})
MODULES.append({
'name': 'nvtx',
'file': [
'cupy.cuda.nvtx',
],
'include': [
'nvToolsExt.h',
],
'libraries': [
'nvToolsExt' if not PLATFORM_WIN32 else 'nvToolsExt64_1',
],
'check_method': build.check_nvtx,
})
MODULES.append({
'name': 'cutensor',
'file': [
'cupy_backends.cuda.libs.cutensor',
'cupy.cutensor',
],
'include': [
'cutensor.h',
],
'libraries': [
'cutensor',
'cublas',
],
'check_method': build.check_cutensor_version,
'version_method': build.get_cutensor_version,
})
MODULES.append({
'name': 'cub',
'file': [
('cupy.cuda.cub', ['cupy/cuda/cupy_cub.cu']),
],
'include': [
'cub/util_namespace.cuh', # dummy
],
'libraries': [
'cudart',
],
'check_method': build.check_cub_version,
'version_method': build.get_cub_version,
})
else:
MODULES.append({
'name': 'cub',
'file': [
('cupy.cuda.cub', ['cupy/cuda/cupy_cub.cu']),
],
'include': [
'hipcub/hipcub_version.hpp', # dummy
],
'libraries': [
'hiprtc',
'hip_hcc',
],
'check_method': build.check_cub_version,
'version_method': build.get_cub_version,
})
if bool(int(os.environ.get('CUPY_SETUP_ENABLE_THRUST', 1))):
if use_hip:
MODULES.append({
'name': 'thrust',
'file': [
('cupy.cuda.thrust', ['cupy/cuda/cupy_thrust.cu']),
],
'include': [
'thrust/version.h',
],
'libraries': [
'hiprtc',
'hip_hcc',
],
})
else:
MODULES.append({
'name': 'thrust',
'file': [
('cupy.cuda.thrust', ['cupy/cuda/cupy_thrust.cu']),
],
'include': [
'thrust/device_ptr.h',
'thrust/sequence.h',
'thrust/sort.h',
],
'libraries': [
'cudart',
],
'check_method': build.check_thrust_version,
'version_method': build.get_thrust_version,
})
def preconfigure_modules(compiler, settings):
"""Returns a list of modules buildable in given environment and settings.
For each module in MODULES list, this function checks if the module
can be built in the current environment and reports it.
Returns a list of module names available.
"""
nvcc_path = build.get_nvcc_path()
summary = [
'',
'************************************************************',
'* CuPy Configuration Summary *',
'************************************************************',
'',
'Build Environment:',
' Include directories: {}'.format(str(settings['include_dirs'])),
' Library directories: {}'.format(str(settings['library_dirs'])),
' nvcc command : {}'.format(
nvcc_path if nvcc_path else '(not found)'),
'',
'Environment Variables:',
]
for key in ['CFLAGS', 'LDFLAGS', 'LIBRARY_PATH',
'CUDA_PATH', 'NVTOOLSEXT_PATH', 'NVCC',
'ROCM_HOME']:
summary += [' {:<16}: {}'.format(key, os.environ.get(key, '(none)'))]
summary += [
'',
'Modules:',
]
ret = []
for module in MODULES:
installed = False
status = 'No'
errmsg = []
if module['name'] == 'cutensor':
cuda_version = build.get_cuda_version()
cuda_version = str(cuda_version // 1000) + '.' + \
str((cuda_version // 10) % 100)
cutensor_path = os.environ.get('CUTENSOR_PATH', '')
inc_path = os.path.join(cutensor_path, 'include')
if os.path.exists(inc_path):
settings['include_dirs'].append(inc_path)
lib_path = os.path.join(cutensor_path, 'lib', cuda_version)
if os.path.exists(lib_path):
settings['library_dirs'].append(lib_path)
print('')
print('-------- Configuring Module: {} --------'.format(
module['name']))
sys.stdout.flush()
if not check_library(
compiler,
includes=module['include'],
include_dirs=settings['include_dirs'],
define_macros=settings['define_macros'],
extra_compile_args=settings['extra_compile_args']):
errmsg = ['Include files not found: %s' % module['include'],
'Check your CFLAGS environment variable.']
elif not check_library(
compiler,
libraries=module['libraries'],
library_dirs=settings['library_dirs'],
define_macros=settings['define_macros'],
extra_compile_args=settings['extra_compile_args']):
errmsg = ['Cannot link libraries: %s' % module['libraries'],
'Check your LDFLAGS environment variable.']
elif ('check_method' in module and
not module['check_method'](compiler, settings)):
# Fail on per-library condition check (version requirements etc.)
installed = True
errmsg = ['The library is installed but not supported.']
elif module['name'] in ('thrust', 'cub') and nvcc_path is None:
installed = True
errmsg = ['nvcc command could not be found in PATH.',
'Check your PATH environment variable.']
else:
installed = True
status = 'Yes'
ret.append(module['name'])
if installed and 'version_method' in module:
status += ' (version {})'.format(module['version_method'](True))
summary += [
' {:<10}: {}'.format(module['name'], status)
]
# If error message exists...
if len(errmsg) != 0:
summary += [' -> {}'.format(m) for m in errmsg]
# Skip checking other modules when CUDA is unavailable.
if module['name'] == 'cuda':
break
if len(ret) != len(MODULES):
if 'cuda' in ret:
lines = [
'WARNING: Some modules could not be configured.',
'CuPy will be installed without these modules.',
]
else:
lines = [
'ERROR: CUDA could not be found on your system.',
]
summary += [
'',
] + lines + [
'Please refer to the Installation Guide for details:',
'https://docs.cupy.dev/en/stable/install.html',
'',
]
summary += [
'************************************************************',
'',
]
print('\n'.join(summary))
return ret, settings
def make_extensions(options, compiler, use_cython):
"""Produce a list of Extension instances which passed to cythonize()."""
no_cuda = options['no_cuda']
use_hip = not no_cuda and options['use_hip']
settings = build.get_compiler_setting(use_hip)
include_dirs = settings['include_dirs']
settings['include_dirs'] = [
x for x in include_dirs if path.exists(x)]
settings['library_dirs'] = [
x for x in settings['library_dirs'] if path.exists(x)]
# Adjust rpath to use CUDA libraries in `cupy/.data/lib/*.so`) from CuPy.
use_wheel_libs_rpath = (
0 < len(options['wheel_libs']) and not PLATFORM_WIN32)
# In the environment with CUDA 7.5 on Ubuntu 16.04, gcc5.3 does not
# automatically deal with memcpy because string.h header file has
# been changed. This is a workaround for that environment.
# See details in the below discussions:
# https://github.com/BVLC/caffe/issues/4046
# https://groups.google.com/forum/#!topic/theano-users/3ihQYiTRG4E
settings['define_macros'].append(('_FORCE_INLINES', '1'))
if options['linetrace']:
settings['define_macros'].append(('CYTHON_TRACE', '1'))
settings['define_macros'].append(('CYTHON_TRACE_NOGIL', '1'))
if no_cuda:
settings['define_macros'].append(('CUPY_NO_CUDA', '1'))
if use_hip:
settings['define_macros'].append(('CUPY_USE_HIP', '1'))
settings['define_macros'].append(('__HIP_PLATFORM_HCC__', '1'))
available_modules = []
if no_cuda:
available_modules = [m['name'] for m in MODULES]
else:
available_modules, settings = preconfigure_modules(compiler, settings)
if 'cuda' not in available_modules:
raise Exception('Your CUDA environment is invalid. '
'Please check above error log.')
ret = []
for module in MODULES:
if module['name'] not in available_modules:
continue
s = settings.copy()
if not no_cuda:
s['libraries'] = module['libraries']
compile_args = s.setdefault('extra_compile_args', [])
link_args = s.setdefault('extra_link_args', [])
if module['name'] == 'cusolver':
compile_args = s.setdefault('extra_compile_args', [])
link_args = s.setdefault('extra_link_args', [])
# openmp is required for cusolver
if use_hip:
pass
elif compiler.compiler_type == 'unix' and not PLATFORM_DARWIN:
# In mac environment, openmp is not required.
compile_args.append('-fopenmp')
link_args.append('-fopenmp')
elif compiler.compiler_type == 'msvc':
compile_args.append('/openmp')
original_s = s
for f in module['file']:
s = copy.deepcopy(original_s)
name = module_extension_name(f)
rpath = []
if not options['no_rpath']:
# Add library directories (e.g., `/usr/local/cuda/lib64`) to
# RPATH.
rpath += s['library_dirs']
if use_wheel_libs_rpath:
# Add `cupy/.data/lib` (where shared libraries included in
# wheels reside) to RPATH.
# The path is resolved relative to the module, e.g., use
# `$ORIGIN/../cupy/.data/lib` for `cupy/cudnn.so` and
# `$ORIGIN/../../../cupy/.data/lib` for
# `cupy_backends/cuda/libs/cudnn.so`.
depth = name.count('.')
rpath.append(
'{}{}/cupy/.data/lib'.format(_rpath_base(), '/..' * depth))
if not PLATFORM_WIN32 and not PLATFORM_LINUX:
s['runtime_library_dirs'] = rpath
if (PLATFORM_LINUX and s['library_dirs']) or PLATFORM_DARWIN:
ldflag = '-Wl,'
if PLATFORM_LINUX:
ldflag += '--disable-new-dtags,'
ldflag += ','.join('-rpath,' + p for p in rpath)
args = s.setdefault('extra_link_args', [])
args.append(ldflag)
if PLATFORM_DARWIN:
# -rpath is only supported when targeting Mac OS X 10.5 or
# later
args.append('-mmacosx-version-min=10.5')
sources = module_extension_sources(f, use_cython, no_cuda)
extension = setuptools.Extension(name, sources, **s)
ret.append(extension)
return ret
# TODO(oktua): use enviriment variable
cupy_setup_options = parse_args()
print('Options:', cupy_setup_options)
def prepare_wheel_libs():
"""Prepare shared libraries and include files for wheels.
Shared libraries are placed under `cupy/.data/lib` and
RUNPATH will be set to this directory later (Linux only).
Include files are placed under `cupy/.data/include`.
Returns the list of files (path relative to `cupy` module) to add to
the sdist/wheel distribution.
"""
data_dir = os.path.abspath(os.path.join('cupy', '.data'))
if os.path.exists(data_dir):
print('Removing directory: {}'.format(data_dir))
shutil.rmtree(data_dir)
# Generate list files to copy
# tuple of (src_path, dst_path)
files_to_copy = []
# Library files
for srcpath in cupy_setup_options['wheel_libs']:
relpath = os.path.basename(srcpath)
dstpath = os.path.join(data_dir, 'lib', relpath)
files_to_copy.append((srcpath, dstpath))
# Include files
for include_path_spec in cupy_setup_options['wheel_includes']:
srcpath, relpath = include_path_spec.rsplit(':', 1)
dstpath = os.path.join(data_dir, 'include', relpath)
files_to_copy.append((srcpath, dstpath))
# Wheel meta data
wheel_metadata = cupy_setup_options['wheel_metadata']
if wheel_metadata:
files_to_copy.append(
(wheel_metadata, os.path.join(data_dir, '_wheel.json')))
# Copy
for srcpath, dstpath in files_to_copy:
# Note: symlink is resolved by shutil.copy2.
print('Copying file for wheel: {}'.format(srcpath))
dirpath = os.path.dirname(dstpath)
if not os.path.isdir(dirpath):
os.makedirs(dirpath)
shutil.copy2(srcpath, dstpath)
return [os.path.relpath(x[1], 'cupy') for x in files_to_copy]
try:
import Cython
import Cython.Build
cython_version = pkg_resources.parse_version(Cython.__version__)
cython_available = (
cython_version >= required_cython_version and
cython_version not in ignore_cython_versions)
except ImportError:
cython_available = False
def _nvcc_gencode_options(cuda_version):
"""Returns NVCC GPU code generation options."""
if sys.argv == ['setup.py', 'develop']:
return []
envcfg = os.getenv('CUPY_NVCC_GENERATE_CODE', None)
if envcfg:
return ['--generate-code={}'.format(arch)
for arch in envcfg.split(';') if len(arch) > 0]
# The arch_list specifies virtual architectures, such as 'compute_61', and
# real architectures, such as 'sm_61', for which the CUDA input files are
# to be compiled.
#
# The syntax of an entry of the list is
#
# entry ::= virtual_arch | (virtual_arch, real_arch)
#
# where virtual_arch is a string which means a virtual architecture and
# real_arch is a string which means a real architecture.
#
# If a virtual architecture is supplied, NVCC generates a PTX code for the
# virtual architecture. If a pair of a virtual architecture and a real
# architecture is supplied, NVCC generates a PTX code for the virtual
# architecture as well as a cubin code for the real architecture.
#
# For example, making NVCC generate a PTX code for 'compute_60' virtual
# architecture, the arch_list has an entry of 'compute_60'.
#
# arch_list = ['compute_60']
#
# For another, making NVCC generate a PTX code for 'compute_61' virtual
# architecture and a cubin code for 'sm_61' real architecture, the
# arch_list has an entry of ('compute_61', 'sm_61').
#
# arch_list = [('compute_61', 'sm_61')]
if cuda_version >= 11000:
arch_list = ['compute_50',
('compute_60', 'sm_60'),
('compute_61', 'sm_61'),
('compute_70', 'sm_70'),
('compute_75', 'sm_75'),
('compute_80', 'sm_80'),
'compute_80']
elif cuda_version >= 10000:
arch_list = ['compute_30',
'compute_50',
('compute_60', 'sm_60'),
('compute_61', 'sm_61'),
('compute_70', 'sm_70'),
('compute_75', 'sm_75'),
'compute_70']
elif cuda_version >= 9000:
arch_list = ['compute_30',
'compute_50',
('compute_60', 'sm_60'),
('compute_61', 'sm_61'),
('compute_70', 'sm_70'),
'compute_70']
else:
# This should not happen.
assert False
options = []
for arch in arch_list:
if type(arch) is tuple:
virtual_arch, real_arch = arch
options.append('--generate-code=arch={},code={}'.format(
virtual_arch, real_arch))
else:
options.append('--generate-code=arch={},code={}'.format(
arch, arch))
return options
class sdist_with_cython(sdist.sdist):
"""Custom `sdist` command with cyhonizing."""
class custom_build_ext(build_ext.build_ext):
"""Custom `build_ext` command to include CUDA C source files."""
| [
11748,
1822,
29572,
198,
11748,
4866,
198,
6738,
1233,
26791,
1330,
269,
5589,
5329,
198,
6738,
1233,
26791,
1330,
8563,
198,
6738,
1233,
26791,
1330,
13845,
85,
535,
3361,
5329,
198,
6738,
1233,
26791,
1330,
25064,
11250,
198,
6738,
1233... | 2.037099 | 10,836 |
from django.utils import formats
from wagtail.core.blocks import CharBlock, StructBlock, TextBlock, StreamBlock, PageChooserBlock, \
URLBlock, DateBlock, ListBlock, BooleanBlock
from wagtail.documents.blocks import DocumentChooserBlock
from wagtail.embeds.blocks import EmbedBlock
from wagtail.images.blocks import ImageChooserBlock
from wagtail.contrib.table_block.blocks import TableBlock
from isi_mip.contrib.blocks import EmailBlock, IntegerBlock, HeadingBlock, HRBlock, ImageBlock, RichTextBlock, MonospaceTextBlock
from isi_mip.twitter import TwitterTimeline
BASE_BLOCKS = [
('heading', HeadingBlock()),
('rich_text', RichTextBlock()),
('horizontal_ruler', HRBlock()),
('embed', EmbedBlock()),
('image', ImageBlock()),
('table', TableBlock()),
('monospace_text', MonospaceTextBlock())
]
# def render_basic(self, value):
# ret = super().render_basic(value)
# if ret:
# ret = 'PDF' + ret
# return ret
_COLUMNS_BLOCKS = BASE_BLOCKS + [
('small_teaser', SmallTeaserBlock()),
('big_teaser', BigTeaserBlock()),
('isinumbers', IsiNumbersBlock()),
('link', LinkBlock()),
('faqs', FAQsBlock()),
('pdf', PDFBlock()),
]
COLUMNS_BLOCKS = [
('columns_1_to_1', Columns1To1Block()),
('columns_1_to_2', Columns1To2Block()),
('columns_2_to_1', Columns2To1Block()),
('columns_1_to_1_to_1', Columns1To1To1Block()),
('columns_1_to_1_to_1_to_1', Columns1To1To1To1Block()),
]
| [
6738,
42625,
14208,
13,
26791,
1330,
17519,
198,
6738,
266,
363,
13199,
13,
7295,
13,
27372,
1330,
3178,
12235,
11,
32112,
12235,
11,
8255,
12235,
11,
13860,
12235,
11,
7873,
22164,
13416,
12235,
11,
3467,
198,
220,
220,
220,
10289,
122... | 2.518395 | 598 |
import matplotlib.pyplot as plt
import numpy as np
from pyha.common.util import is_float, is_complex
from pyha.cores.util import SQNR
from pyha.simulation.tracer import Tracer
figsize = (9.75, 5)
| [
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
12972,
3099,
13,
11321,
13,
22602,
1330,
318,
62,
22468,
11,
318,
62,
41887,
198,
6738,
12972,
3099,
13,
66,
2850,
13,
22602,... | 2.756757 | 74 |
from config import CMS
from optparse import OptionParser
from config import XSectionConfig
from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists
from tools.plotting import Histogram_properties
from matplotlib import pyplot as plt
from matplotlib import rc
rc( 'font', **CMS.font )
rc( 'text', usetex = True )
import rootpy.plotting.root2matplotlib as rplt
from rootpy.plotting import Hist2D
import linecache
from config.variable_binning import variable_bins_ROOT
from config.latex_labels import samples_latex
if __name__ == '__main__':
parser = OptionParser()
parser.add_option( "-p", "--path", dest = "path", default = 'data/',
help = "set path to JSON files" )
parser.add_option( "-v", "--variable", dest = "variable", default = 'MET',
help = "set the variable to analyse (MET, HT, ST, MT)" )
parser.add_option( "-f", "--fit-variables", dest = "fit_variables", default = 'absolute_eta',
help = "set the fit variable to use in the minimalisation" +
" (absolute_eta, M3, M_bl, angle_bl) or any" +
" combination separated by commas" )
parser.add_option( "-o", "--output_folder", dest = "output_folder", default = 'plots/fitchecks/',
help = "set path to save plots" )
parser.add_option( "-m", "--metType", dest = "metType", default = 'type1',
help = "set MET type used in the analysis of MET-dependent variables" )
parser.add_option( "-c", "--category", dest = "category", default = 'central',
help = "set the category to take the fit results from (default: central)" )
parser.add_option( "-n", "--normalise_to_fit", dest = "normalise_to_fit", action = "store_true",
help = "normalise the MC to fit results" )
parser.add_option( "-e", "--centre-of-mass-energy", dest = "CoM", default = 8, type = int,
help = "set the centre of mass energy for analysis. Default = 8 [TeV]" )
# parser.add_option("-i", "--use_inputs", dest="use_inputs", action="store_true",
# help="use fit inputs instead of fit results")
( options, args ) = parser.parse_args()
measurement_config = XSectionConfig( options.CoM )
# caching of variables for shorter access
translate_options = measurement_config.translate_options
lumi = measurement_config.new_luminosity
CoM = measurement_config.centre_of_mass_energy
electron_histogram_title = 'CMS Preliminary, $\mathcal{L}$ = %.1f fb$^{-1}$ at $\sqrt{s}$ = %d TeV \n e+jets, $\geq$4 jets' % ( lumi/1000.0, CoM )
muon_histogram_title = 'CMS Preliminary, $\mathcal{L}$ = %.1f fb$^{-1}$ at $\sqrt{s}$ = %d TeV \n $\mu$+jets, $\geq$4 jets' % ( lumi/1000.0, CoM )
path_to_JSON = options.path + '/' + str( CoM ) + 'TeV/'
output_folder = options.output_folder + '/%dTeV/' % CoM
make_folder_if_not_exists(output_folder)
normalise_to_fit = options.normalise_to_fit
category = options.category
met_type = translate_options[options.metType]
fit_variables = options.fit_variables.replace( ',' , '_' )
histogram_files = {
'electron' : measurement_config.data_file_electron,
'muon' : measurement_config.data_file_muon,
'TTJet': measurement_config.ttbar_category_templates[category],
'V+Jets': measurement_config.VJets_category_templates[category],
'QCD': measurement_config.electron_QCD_MC_file, # this should also be category-dependent, but unimportant and not available atm
'SingleTop': measurement_config.SingleTop_category_templates[category]
}
# make correlation plots for electron and muon channel
make_correlation_plot_from_file( channel = 'electron',
variable = options.variable,
fit_variables = fit_variables,
CoM = options.CoM,
title = electron_histogram_title,
x_title = '',
y_title = '',
x_limits = [0, 4],
y_limits = [0, 4],
rebin = 1,
save_folder = output_folder,
save_as = ['pdf'] )
make_correlation_plot_from_file( channel = 'muon',
variable = options.variable,
CoM = options.CoM,
fit_variables = fit_variables,
title = muon_histogram_title,
x_title = '',
y_title = '',
x_limits = [0, 3],
y_limits = [0, 3],
rebin = 1,
save_folder = output_folder,
save_as = ['pdf'] )
| [
6738,
4566,
1330,
40773,
198,
6738,
2172,
29572,
1330,
16018,
46677,
198,
6738,
4566,
1330,
1395,
16375,
16934,
198,
6738,
4899,
13,
7753,
62,
315,
2410,
1330,
1100,
62,
7890,
62,
6738,
62,
40386,
11,
787,
62,
43551,
62,
361,
62,
1662... | 2.053828 | 2,508 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import date
from django.core.management.base import BaseCommand
from visualSHARK.models import VCSSystem, Commit, Project, File, Issue, MailingList, Message, IssueSystem
from visualSHARK.models import ProjectStats
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
6738,
4818,
8079,
1330,
3128,
198,
198,
6738,
42625,
14208,
13,
7295,
13,
27604,
13,
8692,
1330,
7308,
21575,
198,... | 3.349398 | 83 |