content
stringlengths
1
1.04M
input_ids
listlengths
1
774k
ratio_char_token
float64
0.38
22.9
token_count
int64
1
774k
#!/usr/bin/env python3 # Copyright 2016-2018 Scality # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Runner for Bert-E Flask server.""" import argparse from . import setup_bert_e, setup_server def parse_args(): """Read command line arguments for server.""" parser = argparse.ArgumentParser( add_help=True, description='Bert-E debug server.' ) parser.add_argument('--host', type=str, default='0.0.0.0', help='server host (defaults to 0.0.0.0)') parser.add_argument('--port', '-p', type=int, default=5000, help='server port (defaults to 5000)') parser.add_argument('--settings-file', '-f', type=str, default='settings', help='settings-file location (defaults to `settings`') parser.add_argument('--verbose', '-v', action='store_true', default=True, help='verbose mode') return parser.parse_args() # Start up Bert-E and server! args = parse_args() bert_e = setup_bert_e(args.settings_file, args.verbose) app = setup_server(bert_e) def main(): """Debug server entry point.""" app.run(host=args.host, port=args.port, debug=args.verbose) if __name__ == '__main__': main()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 198, 2, 15069, 1584, 12, 7908, 1446, 1483, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428,...
2.735433
635
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Dec 16 12:47:50 2021 @author: Sebastian Schäfer @institution: Martin-Luther-Universität Halle-Wittenberg @email: sebastian.schaefer@student.uni-halle.de """ import os import sys import tkinter as tk import tkinter.ttk as ttk from .src.classes.install_dnd import InstallDnD from .src.classes.main_viewer import MainApplication from .src.classes.profile import ProfileHandler from .src.classes.update import CheckForUpdates if __name__ == "__main__": topasgraphsim()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 198, 41972, 319, 26223, 4280, 1467, 1105, 25, 2857, 25, 1120, 33448, 198, 198, 31, 9800, 25, 26190, 3059, ...
2.78866
194
from aetherling.space_time.type_helpers import * from aetherling.space_time.nested_counters import * import fault from aetherling.helpers.fault_helpers import compile_and_run
[ 6738, 257, 6750, 1359, 13, 13200, 62, 2435, 13, 4906, 62, 16794, 364, 1330, 1635, 198, 6738, 257, 6750, 1359, 13, 13200, 62, 2435, 13, 77, 7287, 62, 66, 15044, 1330, 1635, 198, 11748, 8046, 198, 6738, 257, 6750, 1359, 13, 16794, 364...
3.125
56
# Generated by Django 3.1.3 on 2020-11-08 06:01 from django.db import migrations, models
[ 2, 2980, 515, 416, 37770, 513, 13, 16, 13, 18, 319, 12131, 12, 1157, 12, 2919, 9130, 25, 486, 198, 198, 6738, 42625, 14208, 13, 9945, 1330, 15720, 602, 11, 4981, 628 ]
2.84375
32
from django import forms from .models import Subscribe from django.db import transaction from video.models import Video
[ 6738, 42625, 14208, 1330, 5107, 198, 6738, 764, 27530, 1330, 19808, 198, 6738, 42625, 14208, 13, 9945, 1330, 8611, 198, 6738, 2008, 13, 27530, 1330, 7623 ]
4.576923
26
""" MIT License Copyright (c) 2018 Rafael Felix Alves Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import h5py import numpy as np from .experiments import AverageMeter from .tensors import NpJSONEncoder class Bunch(dict): """Container object for datasets copied from scikit-learn Dictionary-like object that exposes its keys as attributes. """ # def as_dict(self): # return self.__dict__ if __name__ == '__main__': print('-'*100) print(':: Testing file: {}'.format(__file__)) print('-'*100)
[ 37811, 198, 36393, 13789, 198, 198, 15269, 357, 66, 8, 2864, 31918, 29721, 978, 1158, 198, 198, 5990, 3411, 318, 29376, 7520, 11, 1479, 286, 3877, 11, 284, 597, 1048, 16727, 257, 4866, 198, 1659, 428, 3788, 290, 3917, 10314, 3696, 357...
3.259336
482
from decimal import Decimal import pytest from freezegun import freeze_time from beanbot.errors import UserError from beanbot.models import Action, Event from beanbot.parser import parse_keyboard_data, parse_message invalid_messages = [ 'No money', 'No currency 12.0 USD', '3.0', '+ no money' '+12 stuff', '-12 items', '# ', ] @freeze_time() @freeze_time()
[ 6738, 32465, 1330, 4280, 4402, 198, 198, 11748, 12972, 9288, 198, 6738, 1479, 89, 1533, 403, 1330, 16611, 62, 2435, 198, 198, 6738, 26394, 13645, 13, 48277, 1330, 11787, 12331, 198, 6738, 26394, 13645, 13, 27530, 1330, 7561, 11, 8558, 1...
2.689655
145
from qtstrap import * from qtpy.QtSql import * from .log_table_view import LogTableView from .log_filter_controls import FilterControls from .log_database_handler import DatabaseHandler, db_conn_name try: from command_palette import CommandPalette, Command command_palette_available = True except: command_palette_available = False
[ 6738, 10662, 83, 26418, 1330, 1635, 198, 6738, 10662, 83, 9078, 13, 48, 83, 50, 13976, 1330, 1635, 198, 6738, 764, 6404, 62, 11487, 62, 1177, 1330, 5972, 10962, 7680, 198, 6738, 764, 6404, 62, 24455, 62, 13716, 82, 1330, 25853, 15988,...
3.222222
108
# -*- coding: utf-8 -*- ''' @author: Milan Cermak <milan.cermak@gmail.com> @author: Moloch This module implements sessions for Tornado using Memcached. ''' import re import os import time import base64 import logging import datetime import collections import cPickle as pickle from os import _exit SID_SIZE = 16 # Size in bytes class BaseSession(collections.MutableMapping): ''' The base class for the session object. Work with the session object is really simple, just treat is as any other dictionary: class Handler(tornado.web.RequestHandler): def get(self): var = self.session['key'] self.session['another_key'] = 'value' Session is automatically saved on handler finish. Session expiration is updated with every request. If configured, session ID is regenerated periodically. The session_id attribute stores a unique, random, 64 characters long string serving as an indentifier. To create a new storage system for the sessions, subclass BaseSession and define save(), load() and delete(). For inspiration, check out any of the already available classes and documentation to aformentioned functions. ''' def _is_expired(self): '''Check if the session has expired.''' if self.expires is None: # never expire return False return datetime.datetime.utcnow() > self.expires def _expires_at(self): '''Find out the expiration time. Returns datetime.datetime.''' v = self.duration if v is None: # never expire return None elif isinstance(v, datetime.timedelta): pass elif isinstance(v, (int, long)): self.duration = datetime.timedelta(seconds=v) elif isinstance(v, basestring): self.duration = datetime.timedelta(seconds=int(v)) else: self.duration = datetime.timedelta(seconds=900) # 15 mins return datetime.datetime.utcnow() + self.duration def _serialize_expires(self): ''' Determines what value of expires is stored to DB during save().''' if self.expires is None: return None else: return int(time.mktime(self.expires.timetuple())) def _should_regenerate(self): '''Determine if the session_id should be regenerated.''' if self.regeneration_interval is None: # never regenerate return False return datetime.datetime.utcnow() > self.next_regeneration def _next_regeneration_at(self): '''Return a datetime object when the next session id regeneration should occur.''' # convert whatever value to an timedelta (period in seconds) # store it in self.regeneration_interval to prevent # converting in later calls and return the datetime # of next planned regeneration v = self.regeneration_interval if v is None: # never regenerate return None elif isinstance(v, datetime.timedelta): pass elif isinstance(v, (int, long)): self.regeneration_interval = datetime.timedelta(seconds=v) elif isinstance(v, basestring): self.regeneration_interval = datetime.timedelta(seconds=int(v)) else: self.regeneration_interval = datetime.timedelta(seconds=240) return datetime.datetime.utcnow() + self.regeneration_interval def refresh(self, duration=None, new_session_id=False): ''' Prolongs the session validity. You can specify for how long passing a value in the duration argument (the same rules as for session_age apply). Be aware that henceforward this particular session may have different expiry date, not respecting the global setting. If new_session_id is True, a new session identifier will be generated. This should be used e.g. on user authentication for security reasons. ''' if duration: self.duration = duration self.expires = self._expires_at() else: self.expires = self._expires_at() if new_session_id: self.delete() self.session_id = self._generate_session_id() self.next_regeneration = self._next_regeneration_at() self.dirty = True # force save self.save() def save(self): '''Save the session data and metadata to the backend storage if necessary (self.dirty == True). On successful save set dirty to False.''' pass @staticmethod def load(session_id, location): '''Load the stored session from storage backend or return None if the session was not found, in case of stale cookie.''' pass def delete(self): '''Remove all data representing the session from backend storage.''' pass @staticmethod def delete_expired(file_path): '''Deletes sessions with timestamps in the past form storage.''' pass @staticmethod try: import pylibmc # lint:ok class MemcachedSession(BaseSession): ''' Class responsible for Memcached stored sessions. It uses the pylibmc library because it's fast. It communicates with the memcached server through the binary protocol and uses async I/O (no_block set to 1) to speed things up even more. Session ID is used as a key. The value consists of colon separated values of serializes session object, expiry timestamp, IP address and User-Agent. Values are stored with timeout set to the difference between saving time and expiry time in seconds. Therefore, no old sessions will be held in Memcached memory. ''' @staticmethod def _serialize_expires(self): ''' Determines what value of expires is stored to DB during save(). ''' if self.expires is None: return '-1' else: return str(int(time.mktime(self.expires.timetuple()))) def save(self): ''' Write the session to Memcached. Session ID is used as key, value is constructed as colon separated values of serialized session, session expiry timestamp, ip address and User-Agent. The value is not stored indefinitely. It's expiration time in seconds is calculated as the difference between the saving time and session expiry. ''' if not self.dirty: return value = ':'.join((self.serialize(), self._serialize_expires(), self.ip_address, self.user_agent)) # count how long should it last and then add or rewrite if self.expires is None: # set expiry 30 days, max for memcache # http://code.google.com/p/memcached/wiki/FAQ#What_are_the_limi # ts_on_setting_expire_time?_%28why_is_there_a_30_d self.connection.set( self.session_id, value, time=datetime.timedelta.max.seconds * 30 ) else: live_sec = self.expires - datetime.datetime.utcnow() self.connection.set( self.session_id, value, time=live_sec.seconds) self.dirty = False @staticmethod def load(session_id, connection): '''Load the session from storage.''' try: value = connection.get(session_id) if value: data = value.split(':', 1)[0] kwargs = MemcachedSession.deserialize(data) return MemcachedSession(connection, **kwargs) except: return None return None def delete(self): '''Delete the session from storage.''' self.connection.delete(self.session_id) except ImportError: logging.exception("Failed to import PyLibmc, no session support.") _exit(1)
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 7061, 6, 198, 31, 9800, 25, 21574, 327, 7780, 461, 1279, 25433, 272, 13, 2189, 76, 461, 31, 14816, 13, 785, 29, 198, 31, 9800, 25, 17958, 5374, 198, 198, 1212, 8265,...
2.367271
3,477
from multiprocessing import Process from bilibili import Bilibili from config import (enable_youtube, enable_twitcasting, enable_openrec, enable_mirrativ, enable_bilibili, enable_youtube_temp, channel_id, userid, oprec_id, twitcasting_ld, bilibili_id) from daemon import VideoUpload from mirrativ import Mirrativ from openrec import Openrec from twitcasting import Twitcasting from youtube import Youtube, start_temp_daemon if __name__ == '__main__': uploader = VideoUpload() uploader.start() e = Event() e.start()
[ 6738, 18540, 305, 919, 278, 1330, 10854, 198, 198, 6738, 275, 22282, 2403, 1330, 347, 22282, 2403, 198, 6738, 4566, 1330, 357, 21633, 62, 11604, 11, 7139, 62, 4246, 270, 19913, 11, 7139, 62, 9654, 8344, 11, 7139, 62, 10793, 10366, 452...
2.905759
191
__title__ = "dti" __author__ = "diceroll123" __license__ = "MIT" __copyright__ = "Copyright 2020-present diceroll123" __version__ = "0.0.1a" import logging from . import utils from .client import Client from .constants import * from .enums import * from .errors import * from .models import * logging.getLogger(__name__).addHandler(logging.NullHandler())
[ 834, 7839, 834, 796, 366, 67, 20259, 1, 198, 834, 9800, 834, 796, 366, 67, 16647, 692, 10163, 1, 198, 834, 43085, 834, 796, 366, 36393, 1, 198, 834, 22163, 4766, 834, 796, 366, 15269, 12131, 12, 25579, 288, 16647, 692, 10163, 1, 1...
2.934426
122
import requests, base64 import config id = config.GAPI_CONFIG['client_id'] secret = config.GAPI_CONFIG['client_secret'] type = config.GAPI_CONFIG['grant_type'] # api1 = GapiClass() # a = api1.getMember('planning_d') # if __name__ == "__main__": # print(a)
[ 11748, 7007, 11, 2779, 2414, 198, 11748, 4566, 198, 198, 312, 796, 4566, 13, 38, 17614, 62, 10943, 16254, 17816, 16366, 62, 312, 20520, 198, 21078, 796, 4566, 13, 38, 17614, 62, 10943, 16254, 17816, 16366, 62, 21078, 20520, 198, 4906, ...
2.558824
102
from django.conf import settings from django.contrib import messages from django.contrib.auth.decorators import login_required from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin from django.core.mail import EmailMessage from django.shortcuts import redirect, render from .models import Newsletter from django.views.generic import ( CreateView, DeleteView, DetailView, ListView, UpdateView, ) from .models import ( Blog, Business, Contact, Covid, Education, Entertainment, Health, International, LifeStyle, Literature, National, Politics, Post, Sports, Tech, ) # Class BASED VIEWS END HERE @login_required()
[ 6738, 42625, 14208, 13, 10414, 1330, 6460, 198, 6738, 42625, 14208, 13, 3642, 822, 1330, 6218, 198, 6738, 42625, 14208, 13, 3642, 822, 13, 18439, 13, 12501, 273, 2024, 1330, 17594, 62, 35827, 198, 6738, 42625, 14208, 13, 3642, 822, 13, ...
2.838462
260
# # PySNMP MIB module A3COM-HUAWEI-EPON-DEVICE-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/A3COM-HUAWEI-EPON-DEVICE-MIB # Produced by pysmi-0.3.4 at Wed May 1 11:04:50 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # h3cEpon, = mibBuilder.importSymbols("A3COM-HUAWEI-OID-MIB", "h3cEpon") ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint") ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex") SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString") NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance") ObjectIdentity, Unsigned32, Counter32, NotificationType, Gauge32, MibIdentifier, ModuleIdentity, zeroDotZero, Counter64, IpAddress, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, iso, Integer32, mib_2 = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "Unsigned32", "Counter32", "NotificationType", "Gauge32", "MibIdentifier", "ModuleIdentity", "zeroDotZero", "Counter64", "IpAddress", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "iso", "Integer32", "mib-2") TextualConvention, MacAddress, TruthValue, DateAndTime, RowStatus, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "MacAddress", "TruthValue", "DateAndTime", "RowStatus", "DisplayString") h3cEponDeviceMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4)) h3cEponDeviceMIB.setRevisions(('2004-09-21 00:00',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: h3cEponDeviceMIB.setRevisionsDescriptions(('Initial version, published as RFC XXXX.',)) if mibBuilder.loadTexts: h3cEponDeviceMIB.setLastUpdated('200409210000Z') if mibBuilder.loadTexts: h3cEponDeviceMIB.setOrganization('IETF Ethernet Interfaces and Hub MIB Working Group') if mibBuilder.loadTexts: h3cEponDeviceMIB.setContactInfo('WG charter: http://www.ietf.org/html.charters/hubmib-charter.html Mailing Lists: General Discussion: hubmib@ietf.org To Subscribe: hubmib-request@ietf.org In Body: subscribe your_email_address Chair: Dan Romascanu Postal: Avaya Inc. Atidim Technology Park, Bldg. 3 Tel Aviv 61131 Israel Tel: +972-3-645-8414 E-mail: dromasca@avaya.com Editor: Lior Khermosh Postal: Passave Technologies Inc. Ackerstein Towers, Tower A, 6th floor, 9 Hamenofim St. Hertzliya Pituach 46725, ISRAEL P.O.Box 2089 Hertzliya Pituach 46120 Israel Tel: +972-9-9717600 Ext: 7181 E-mail: lior.khermosh@passave.com') if mibBuilder.loadTexts: h3cEponDeviceMIB.setDescription("The objects in this MIB module are used to manage Ethernet Passive Optical Network (EPON) devices which are based on the Ethernet in the First Mile (EFM) PON as defined in IEEE Draft P802.3ah/D3.0 clause 60,64,65. This mib is excerpted from the draft files directly,only changed the object name,added the h3c as prefix. The following reference is used throughout this MIB module: [802.3ah] refers to: IEEE Draft P802.3ah/D3.3: 'Draft amendment to - Information technology - Telecommunications and information exchange between systems - Local and metropolitan area networks - Specific requirements - Part 3: Carrier sense multiple access with collision detection (CSMA/CD) access method and physical layer specifications - Media Access Control Parameters, Physical Layers and Management Parameters for subscriber access networks', 22 April 2004. Of particular interest are Clause 64(MPCP) 65(P2mP RS) and 60 (PON PMDs). Clause 30, 'Management', and Clause 45,'Management Data Input/Output (MDIO) Interface'. Copyright (C) The Internet Society (2004). This version of this MIB module is part of XXXX see the RFC itself for full legal notices.") h3cEponDeviceObjectMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1)) h3cEponDeviceObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1)) h3cEponDeviceConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 2)) h3cEponDeviceControlObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1)) h3cEponDeviceStatObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2)) h3cEponDeviceEventObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3)) h3cEponDeviceControlTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1), ) if mibBuilder.loadTexts: h3cEponDeviceControlTable.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceControlTable.setDescription('Table for EPON device MIB modules.') h3cEponDeviceControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: h3cEponDeviceControlEntry.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceControlEntry.setDescription('An entry in the EPON device Control table.') h3cEponDeviceObjectReset = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("running", 1), ("reset", 2))).clone(1)).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceObjectReset.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceObjectReset.setDescription('This variable is used to reset the EPON device. The interface may be unavailable while the reset occurs and data may be lost. During reading operation it returns the state of the EPON device. running(1) indicates and operates normal operation, reset(2) indicates and operates reset mode. Writing can be done all the time.') h3cEponDeviceObjectModes = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("olt", 1), ("onu", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceObjectModes.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceObjectModes.setDescription('This variable defines the mode of the EPON device. When an olt(1) it is an Optical Line Terminal device (server) and when an onu(2) and Optical Network Unit device (client)') h3cEponDeviceObjectFecEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("noFecEnabled", 1), ("fecTxEnabled", 2), ("fecRxEnabled", 3), ("fecTxRxEnabled", 4))).clone(1)).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceObjectFecEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceObjectFecEnabled.setDescription('This variable defines and provides information whether the EPON device uses FEC as defined in the [802.3ah] clause 65.2 for EPON. When noFECEnabled(1) the device does not support FEC mode When fecTxEnabled(2) the device supports the FEC transmission mode. When fecRxEnabled(3) the device supports the FEC Receive mode. When fecTxRxEnabled(4) the device supports the FEC transmission and receive mode. Writing can be done all the time. This attribute is relevant for an OLT and an ONU.') h3cEponDeviceObjectOamMode = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("noOam", 1), ("oamServer", 2), ("oamclient", 3))).clone(1)).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceObjectOamMode.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceObjectOamMode.setDescription('This variable defines and provides information on the Operation Administration and Maintenance (OAM) mode of an EPON device as defined by the [802.3ah] clause 57. When noOam(1) the device does not supports the OAM mode. When oamServer(2) the device supports the OAM mode as a server unit. When oamClient(3) the device supports the OAM mode as a client unit. Writing can be done during initialization, h3cEponDeviceObjectDeviceReadyMode is in notReady(1) or inProcess(2). This attribute is relevant for an OLT and an ONU.') h3cEponDeviceObjectDeviceReadyMode = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notReady", 1), ("inProcess", 2), ("ready", 3))).clone(1)).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceObjectDeviceReadyMode.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceObjectDeviceReadyMode.setDescription('This variable defines the mode of an EPON device and provides information on the mode in initialization - ready for registration as defined by the [802.3ah] clause 64. When notReady(1) the device is not ready for operation. When inProcess(2) the device is in initialization process. When ready(3) the device is ready for registration. Writing can be done all the time. This attribute is relevant for an OLT and an ONU.') h3cEponDeviceObjectPowerDown = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1, 1, 7), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceObjectPowerDown.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceObjectPowerDown.setDescription('Setting this variable to True(1) will cause Device to be entered into Power down mode where no registration is allowed and only receiving data from the link. Writing can be done all the time. This attribute is relevant for an OLT and an ONU.') h3cEponDeviceObjectNumberOfLLIDs = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceObjectNumberOfLLIDs.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceObjectNumberOfLLIDs.setDescription('A read only variable which defines the number of registered LLIDs (as defined by the [802.3ah] clause 65) in a EPON network for an OLT and an ONU. Initialization value is 0. This attribute is relevant for an OLT and an ONU.') h3cEponDeviceObjectReportThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1, 1, 9), Integer32()).setUnits('TQ (16nsec)').setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceObjectReportThreshold.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceObjectReportThreshold.setDescription('A set of 8 integers, for each LLID, that defines the threshold reporting for each Queue in the REPORT message, as defined in [802.3ah] 64. First Queue set reporting will provide information on the queue occupancy of frames below this Threshold. The value returned shall be in Time quanta (TQ) which is 16nsec or 2 octets increments. Writing can be done all the time. This attribute is relevant for an OLT and an ONU.') h3cEponDeviceRemoteMACAddressLLIDControl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("resetLog", 2), ("useDefaultReporting", 3))).clone(3)).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceRemoteMACAddressLLIDControl.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceRemoteMACAddressLLIDControl.setDescription('Indicates and controls the resetting of the LLID MAC address log. Setting this object to none(1) has no action resetLog(2) empties the LLID MAC address log. All data is deleted. Setting it to useDefaultReporting(3) returns all entries priorities to their factory-default reporting. Reading this object always returns useDefaultReporting(3).') h3cEponDeviceRemoteMACAddressLLIDTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 2), ) if mibBuilder.loadTexts: h3cEponDeviceRemoteMACAddressLLIDTable.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceRemoteMACAddressLLIDTable.setDescription('A table of read-only value that identifies the source_address and LLIDs parameter of the remote devices in the network. This MacAddress value, as defined in [802.3ah], 30.3.5.1.5, is updated on reception of a valid frame with a unicast destination Field or (1) a destination Field equal to the reserved multicast address for MAC Control specified in [802.3ah] Annex 31A, (2) lengthOrType field value equal to the reserved Type for MAC Control as specified in [802.3ah] Annex 31A. (3)an MPCP subtype value equal to the subtype reserved for MPCP as specified in [802.3ah] Annex 31A, and an LLID as allocated by the OLT. The table is defined as Remote MAC address - LLID (RMadL) The table is relevant only for an OLT device, and is equivalent from a bridge emulation to the bridge port-MAC address table where the LLIDs are equivalent to virtual bridge ports.') h3cEponDeviceRemoteMACAddressLLIDEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: h3cEponDeviceRemoteMACAddressLLIDEntry.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceRemoteMACAddressLLIDEntry.setDescription('A group of entries. Applications create and delete entries using h3cEponDeviceRMadlEntryStatus. When adding objects to an LLID they are added in the persistent order of their index in this table.') h3cEponDeviceRemoteMACAddressLLIDName = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 2, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readcreate") if mibBuilder.loadTexts: h3cEponDeviceRemoteMACAddressLLIDName.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceRemoteMACAddressLLIDName.setDescription('A locally-unique, administratively assigned name for a group of entries.') h3cEponDeviceRMadlLLID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 2, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)).clone(1)).setMaxAccess("readcreate") if mibBuilder.loadTexts: h3cEponDeviceRMadlLLID.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceRMadlLLID.setDescription('An arbitrary integer for the purpose of identifying the LLID. Writing can be done all the time.') h3cEponDeviceRMadlLogID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 2, 1, 3), ObjectIdentifier().clone((0, 0))).setMaxAccess("readcreate") if mibBuilder.loadTexts: h3cEponDeviceRMadlLogID.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceRMadlLogID.setDescription('The object identifier of a MIB module object to add to an entry, indicating the entry ID in the table. Writing can be done all the time.') h3cEponDeviceRMadlRemoteAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 2, 1, 4), MacAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: h3cEponDeviceRMadlRemoteAddress.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceRMadlRemoteAddress.setDescription('The remote MAC address of the LLID. Writing can be done all the time.') h3cEponDeviceRMadlType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("notRegister", 1), ("registered", 2))).clone(1)).setMaxAccess("readcreate") if mibBuilder.loadTexts: h3cEponDeviceRMadlType.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceRMadlType.setDescription('A list of types for entries - LLIDs. Indicates and defines the state of registration. notRegister(1) marks a non registered LID, registered(2) marks a registered LLID. Writing can be done all the time.') h3cEponDeviceRMadlAction = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("register", 2), ("deregister", 3), ("reregister", 4))).clone(1)).setMaxAccess("readcreate") if mibBuilder.loadTexts: h3cEponDeviceRMadlAction.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceRMadlAction.setDescription('A list of actions for an entry - LLID. Indicates and defines the state of registration for the remote device. none(1) marks no action, register(2) marks to register an LLID, deregister(3) marks to deregister an LLID, reregister(4) marks reregistered LLID. Writing can be done all the time.') h3cEponDeviceRMadlEntryStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 1, 2, 1, 7), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: h3cEponDeviceRMadlEntryStatus.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceRMadlEntryStatus.setDescription('The control that allows creation and deletion of entries. Once made active an entry MAY not be modified except to delete it.') h3cEponDeviceStatTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1), ) if mibBuilder.loadTexts: h3cEponDeviceStatTable.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatTable.setDescription('This table defines the list of statistics counters of EPON devices. The attributes are relevant for an OLT and an ONU.') h3cEponDeviceStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: h3cEponDeviceStatEntry.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatEntry.setDescription('Table entries for Table of statistics counters of EPON devices.') h3cEponDeviceStatTxFramesQueue0 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 1), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue0.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue0.setDescription('A count of the number of times a -Queue-0- frames transmission occurs. Increment the counter by one for each frame transmitted which is an output of -Queue-0-. The -Queue-0- marking matched the REPORT MPCP message Queue-0 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatTxFramesQueue1 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 2), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue1.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue1.setDescription('A count of the number of times a -Queue-1- frames transmission occurs. Increment the counter by one for each frame transmitted which is an output of -Queue-1-. The -Queue-1- marking matched the REPORT MPCP message Queue-1 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatTxFramesQueue2 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 3), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue2.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue2.setDescription('A count of the number of times a -Queue-2- frames transmission occurs. Increment the counter by one for each frame transmitted which is an output of -Queue-2-. The -Queue-2- marking matched the REPORT MPCP message Queue-2 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatTxFramesQueue3 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 4), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue3.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue3.setDescription('A count of the number of times a -Queue-3- frames transmission occurs. Increment the counter by one for each frame transmitted which is an output of -Queue-3-. The -Queue-3- marking matched the REPORT MPCP message Queue-3 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatTxFramesQueue4 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 5), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue4.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue4.setDescription('A count of the number of times a -Queue-4- frames transmission occurs. Increment the counter by one for each frame transmitted which is an output of -Queue-4-. The -Queue-4- marking matched the REPORT MPCP message Queue-4 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatTxFramesQueue5 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 6), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue5.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue5.setDescription('A count of the number of times a -Queue-5- frames transmission occurs. Increment the counter by one for each frame transmitted which is an output of -Queue-5-. The -Queue-5- marking matched the REPORT MPCP message Queue-5 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatTxFramesQueue6 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 7), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue6.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue6.setDescription('A count of the number of times a -Queue-6- frames transmission occurs. Increment the counter by one for each frame transmitted which is an output of -Queue-6-. The -Queue-6- marking matched the REPORT MPCP message Queue-6 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatTxFramesQueue7 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 8), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue7.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatTxFramesQueue7.setDescription('A count of the number of times a -Queue-7- frames transmission occurs. Increment the counter by one for each frame transmitted which is an output of -Queue-7-. The -Queue-7- marking matched the REPORT MPCP message Queue-7 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatRxFramesQueue0 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 9), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue0.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue0.setDescription('A count of the number of times a -Queue-0- frames reception occurs. A single counter at the ONU and a set of counters, one for each LLID, at the OLT. Increment the counter by one for each frame received for each LLID, which is an output of -Queue-0-. The -Queue-0- marking matched the REPORT MPCP message Queue-0 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU and an OLT.') h3cEponDeviceStatRxFramesQueue1 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 10), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue1.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue1.setDescription('A count of the number of times a -Queue-1- frames reception occurs. A single counter at the ONU and a set of counters, one for each LLID, at the OLT. Increment the counter by one for each frame received for each LLID, which is an output of -Queue-1-. The -Queue-1- marking matched the REPORT MPCP message Queue-1 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU and an OLT.') h3cEponDeviceStatRxFramesQueue2 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 11), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue2.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue2.setDescription('A count of the number of times a -Queue-2- frames reception occurs. A single counter at the ONU and a set of counters, one for each LLID, at the OLT. Increment the counter by one for each frame received for each LLID, which is an output of -Queue-2-. The -Queue-2- marking matched the REPORT MPCP message Queue-2 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU and an OLT.') h3cEponDeviceStatRxFramesQueue3 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 12), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue3.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue3.setDescription('A count of the number of times a -Queue-3- frames reception occurs. A single counter at the ONU and a set of counters, one for each LLID, at the OLT. Increment the counter by one for each frame received for each LLID, which is an output of -Queue-3-. The -Queue-3- marking matched the REPORT MPCP message Queue-3 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU and an OLT.') h3cEponDeviceStatRxFramesQueue4 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 13), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue4.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue4.setDescription('A count of the number of times a -Queue-4- frames reception occurs. A single counter at the ONU and a set of counters, one for each LLID, at the OLT. Increment the counter by one for each frame received for each LLID, which is an output of -Queue-4-. The -Queue-4- marking matched the REPORT MPCP message Queue-4 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU and an OLT.') h3cEponDeviceStatRxFramesQueue5 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 14), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue5.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue5.setDescription('A count of the number of times a -Queue-5- frames reception occurs. A single counter at the ONU and a set of counters, one for each LLID, at the OLT. Increment the counter by one for each frame received for each LLID, which is an output of -Queue-5-. The -Queue-5- marking matched the REPORT MPCP message Queue-5 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU and an OLT.') h3cEponDeviceStatRxFramesQueue6 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 15), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue6.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue6.setDescription('A count of the number of times a -Queue-6- frames reception occurs. A single counter at the ONU and a set of counters, one for each LLID, at the OLT. Increment the counter by one for each frame received for each LLID, which is an output of -Queue-6-. The -Queue-6- marking matched the REPORT MPCP message Queue-6 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU and an OLT.') h3cEponDeviceStatRxFramesQueue7 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 16), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue7.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatRxFramesQueue7.setDescription('A count of the number of times a -Queue-7- frames reception occurs. A single counter at the ONU and a set of counters, one for each LLID, at the OLT. Increment the counter by one for each frame received for each LLID, which is an output of -Queue-7-. The -Queue-7- marking matched the REPORT MPCP message Queue-7 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU and an OLT.') h3cEponDeviceStatDroppedFramesQueue0 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 17), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue0.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue0.setDescription('A count of the number of times a -Queue-0- frames drops occurs. Increment the counter by one for each frame dropped from -Queue-0-. The -Queue-0- marking matched the REPORT MPCP message Queue-0 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatDroppedFramesQueue1 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 18), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue1.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue1.setDescription('A count of the number of times a -Queue-1- frames drops occurs. Increment the counter by one for each frame dropped from -Queue-1-. The -Queue-1- marking matched the REPORT MPCP message Queue-1 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatDroppedFramesQueue2 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 19), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue2.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue2.setDescription('A count of the number of times a -Queue-2- frames drops occurs. Increment the counter by one for each frame dropped from -Queue-2-. The -Queue-2- marking matched the REPORT MPCP message Queue-2 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatDroppedFramesQueue3 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 20), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue3.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue3.setDescription('A count of the number of times a -Queue-3- frames drops occurs. Increment the counter by one for each frame dropped from -Queue-3-. The -Queue-3- marking matched the REPORT MPCP message Queue-3 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatDroppedFramesQueue4 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 21), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue4.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue4.setDescription('A count of the number of times a -Queue-4- frames drops occurs. Increment the counter by one for each frame dropped from -Queue-4-. The -Queue-4- marking matched the REPORT MPCP message Queue-4 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatDroppedFramesQueue5 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 22), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue5.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue5.setDescription('A count of the number of times a -Queue-5- frames drops occurs. Increment the counter by one for each frame dropped from -Queue-5-. The -Queue-5- marking matched the REPORT MPCP message Queue-5 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatDroppedFramesQueue6 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 23), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue6.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue6.setDescription('A count of the number of times a -Queue-6- frames drops occurs. Increment the counter by one for each frame dropped from -Queue-6-. The -Queue-6- marking matched the REPORT MPCP message Queue-6 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceStatDroppedFramesQueue7 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 2, 1, 1, 24), Counter32()).setUnits('frames').setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue7.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceStatDroppedFramesQueue7.setDescription('A count of the number of times a -Queue-7- frames drops occurs. Increment the counter by one for each frame dropped from -Queue-7-. The -Queue-7- marking matched the REPORT MPCP message Queue-7 field, as defined in [802.3ah] clause 64. This counter is mandatory for an ONU.') h3cEponDeviceEventObjectTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1), ) if mibBuilder.loadTexts: h3cEponDeviceEventObjectTable.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventObjectTable.setDescription('This table defines the Event Objects for EPON devices. The attributes are relevant for an OLT and an ONU.') h3cEponDeviceEventObjectEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: h3cEponDeviceEventObjectEntry.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventObjectEntry.setDescription('Table entries for Table of Event objects for EPON devices.') h3cEponDeviceSampleMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(1)).setUnits('seconds').setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceSampleMinimum.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceSampleMinimum.setDescription("The minimum Frequency of events this system will accept. A system may use the larger values of this minimum to lessen the impact of constant sampling. For larger sampling intervals the system samples less often and suffers less overhead. Unless explicitly resource limited, a system's value for this object SHOULD be 1, allowing as small as a 1 second interval for ongoing trigger sampling. Writing of the value can be done all the time.") h3cEponDeviceDyingGaspAlarmState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 2), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceDyingGaspAlarmState.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceDyingGaspAlarmState.setDescription('A read-only variable, which defines the state of the Dying Gasp indication of the OAM alarm indications as described in the [802.3ah] clause 57. When true the device has a dying gasp alarm asserted. When false the dying gasp alarm is reset ') h3cEponDeviceDyingGaspAlarmEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 3), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceDyingGaspAlarmEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceDyingGaspAlarmEnabled.setDescription('A control to allow DyingGaspAlarm event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDeviceCriticalEventState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 4), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceCriticalEventState.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceCriticalEventState.setDescription('A read-only variable, which defines the state of the Critical Event indication of the OAM alarm indications as described in the [802.3ah] clause 57. When true the device has a Critical Event asserted. When false the Critical Event is reset.') h3cEponDeviceCriticalEventEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 5), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceCriticalEventEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceCriticalEventEnabled.setDescription('A control to allow CriticalEvent event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDeviceLocalLinkFaultAlarmState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 6), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceLocalLinkFaultAlarmState.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceLocalLinkFaultAlarmState.setDescription('A read-only variable, which defines the state of the Local Link Fault indication of the OAM alarm indications as described in the [802.3ah] clause 57. When true the device has a Local Link Fault alarm asserted. When false the Local Link Fault alarm is reset.') h3cEponDeviceLocalLinkFaultAlarmEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 7), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceLocalLinkFaultAlarmEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceLocalLinkFaultAlarmEnabled.setDescription('A control to allow LocalLinkFaultAlarm event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDeviceTemperatureEventIndicationState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 8), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceTemperatureEventIndicationState.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceTemperatureEventIndicationState.setDescription('A read-only variable, which defines the state of the Temperature Event indication of an EPON device. When condition of box temperature is above the threshold defined the alarm is asserted. When the condition is below that threshold the alarm is de-asserted. When true the device has a Temperature Event Indication asserted. When false the Temperature Event Indication is reset.') h3cEponDeviceTemperatureEventIndicationEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 9), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceTemperatureEventIndicationEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceTemperatureEventIndicationEnabled.setDescription('A control to allow TemperatureEventIndication event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDevicePowerVoltageEventIndicationState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 10), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDevicePowerVoltageEventIndicationState.setStatus('current') if mibBuilder.loadTexts: h3cEponDevicePowerVoltageEventIndicationState.setDescription('A read-only variable, which defines the state of the Power/Voltage Event Indication of an EPON device. When condition of box Power/voltage is above the threshold defined the alarm is asserted. When the condition is below that threshold the alarm is de-asserted. When true the device has a Power/Voltage Event Indication asserted. When false the Power/Voltage Event Indication is reset. ') h3cEponDevicePowerVoltageEventIndicationEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 11), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDevicePowerVoltageEventIndicationEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDevicePowerVoltageEventIndicationEnabled.setDescription('A control to allow PowerVoltageEventIndication event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDeviceGlobalEventState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 12), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceGlobalEventState.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceGlobalEventState.setDescription('A read-only variable, which defines the state of the Global Event indication of an EPON device. When the indication of the event input occurs the event is asserted. When the input is removed that event is de-asserted. When true the device has a Global Event asserted. When false the Global Event Indication is reset.') h3cEponDeviceGlobalEventEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 13), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceGlobalEventEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceGlobalEventEnabled.setDescription('A control to allow GlobalEvent event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDeviceErroredSymbolPeriodEventState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 14), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceErroredSymbolPeriodEventState.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceErroredSymbolPeriodEventState.setDescription('A read-only variable, which defines the state of the Errored Symbol Period Event indication of the OAM alarm TLV indications as described in the [802.3ah] clause 57.5.3. When true the device has an Errored Symbol Period Event asserted. When false the Errored Symbol Period Event is reset.') h3cEponDeviceErroredSymbolPeriodEventEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 15), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceErroredSymbolPeriodEventEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceErroredSymbolPeriodEventEnabled.setDescription('A control to allow ErroredSymbolPeriodEvent event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDeviceErroredFrameEventState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 16), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceErroredFrameEventState.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceErroredFrameEventState.setDescription('A read-only variable, which defines the state of the Errored Frame Event indication of the OAM alarm TLV indications as described in the [802.3ah] clause 57.5.3. When true the device has an Errored Frame Event asserted. When false the Errored Frame Event is reset.') h3cEponDeviceErroredFrameEventEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 17), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceErroredFrameEventEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceErroredFrameEventEnabled.setDescription('A control to allow ErroredFrameEvent event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDeviceErroredFramePeriodEventState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 18), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceErroredFramePeriodEventState.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceErroredFramePeriodEventState.setDescription('A read-only variable, which defines the state of the Errored Frame Period Event indication of the OAM alarm TLV indications as described in the [802.3ah] clause 57.5.3. When true the device has an Errored Frame Period Event asserted. When false the Errored Frame Period Event is reset.') h3cEponDeviceErroredFramePeriodEventEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 19), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceErroredFramePeriodEventEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceErroredFramePeriodEventEnabled.setDescription('A control to allow ErroredFramePeriodEvent event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDeviceErroredFrameSecondsSummaryEventState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 20), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceErroredFrameSecondsSummaryEventState.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceErroredFrameSecondsSummaryEventState.setDescription('A read-only variable, which defines the state of the Errored Frame Seconds Summary Event indication of the OAM alarm TLV indications as described in the [802.3ah] clause 57.5.3. When true the device has an Errored Frame Seconds Summary Event asserted. When false the Errored Frame Seconds Summary Event is reset.') h3cEponDeviceErroredFrameSecondsSummaryEventEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 21), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceErroredFrameSecondsSummaryEventEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceErroredFrameSecondsSummaryEventEnabled.setDescription('A control to allow ErroredFrameSecondsSummaryEvent event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDeviceOrganizationSpecificEventState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 22), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceOrganizationSpecificEventState.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceOrganizationSpecificEventState.setDescription('A read-only variable, which defines the state of the Organization Specific Event indication of the OAM alarm TLV indications as described in the [802.3ah] clause 57.5.3. When true the device has an Organization Specific Event asserted. When false the Organization Specific Event is reset.') h3cEponDeviceOrganizationSpecificEventEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 23), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceOrganizationSpecificEventEnabled.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceOrganizationSpecificEventEnabled.setDescription('A control to allow OrganizationSpecificEvent event to be used. When the value is true the event is sampled. When the value is false the event is not sampled. Writing can be done all the time.') h3cEponDeviceEventControl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 1, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("resetLog", 2), ("useDefaultReporting", 3))).clone(3)).setMaxAccess("readwrite") if mibBuilder.loadTexts: h3cEponDeviceEventControl.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventControl.setDescription('Indicates and controls the resetting of the Event log. Setting this object to none(1) has no action resetLog(2) empties the event log. All data is deleted. Setting it to useDefaultReporting(3) returns all event priorities to their factory-default reporting. Reading this object always returns useDefaultReporting(3).') h3cEponDeviceEventsLogTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 2), ) if mibBuilder.loadTexts: h3cEponDeviceEventsLogTable.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventsLogTable.setDescription('A table of objects provides a log of notification based on the event as pointed to by entries in those tables. The intent is a MAC level event log (set of events to when they happened). This attribute is relevant for an OLT and an ONU.') h3cEponDeviceEventsLogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 2, 1), ).setIndexNames((0, "A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceEventsLogName"), (0, "A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceEventsLogIndex")) if mibBuilder.loadTexts: h3cEponDeviceEventsLogEntry.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventsLogEntry.setDescription('A group of Events. Applications create and delete entries using h3cEponDeviceEventsEntryStatus. When adding objects to a notification they are added in the lexical order of their index in this table.') h3cEponDeviceEventsLogName = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 2, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))) if mibBuilder.loadTexts: h3cEponDeviceEventsLogName.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventsLogName.setDescription('A locally-unique, administratively assigned name for a group of Events.') h3cEponDeviceEventsLogIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 2, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: h3cEponDeviceEventsLogIndex.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventsLogIndex.setDescription('An arbitrary integer for the purpose of identifying individual Events within a h3cEponDeviceEventsLogName group. Events within a group are placed in the notification in the numerical order of this index.') h3cEponDeviceEventsLogID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 2, 1, 3), ObjectIdentifier().clone((0, 0))).setMaxAccess("readcreate") if mibBuilder.loadTexts: h3cEponDeviceEventsLogID.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventsLogID.setDescription('The object identifier of a MIB module object to add to a Notification that results from the event. Writing can be done all the time.') h3cEponDeviceEventsLogFirstTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 2, 1, 4), DateAndTime()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceEventsLogFirstTime.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventsLogFirstTime.setDescription('The time that an entry was created.') h3cEponDeviceEventsLogLastTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 2, 1, 5), DateAndTime()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceEventsLogLastTime.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventsLogLastTime.setDescription('If multiple events are reported via the same entry, the time that the last event for this entry occurred, otherwise this should have the same value as h3cEponDeviceEventsLogFirstTime.') h3cEponDeviceEventsLogCounts = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 2, 1, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceEventsLogCounts.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventsLogCounts.setDescription('The number of consecutive event instances reported by this entry. This starts at 1 with the creation of this row and increments by 1 for each subsequent duplicate event.') h3cEponDeviceEventsLogType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("h3cEponDeviceDyingGaspAlarmState", 1), ("h3cEponDeviceCriticalEventState", 2), ("h3cEponDeviceLocalLinkFaultAlarmState", 3), ("h3cEponDeviceTemperatureEventIndicationState", 4), ("h3cEponDevicePowerVoltageEventIndicationState", 5), ("h3cEponDeviceGlobalEventState", 6), ("h3cEponDeviceErroredSymbolPeriodEventState", 7), ("h3cEponDeviceErroredFrameEventState", 8), ("h3cEponDeviceErroredFramePeriodEventState", 9), ("h3cEponDeviceErroredFrameSecondsSummaryEventState", 10), ("h3cEponDeviceOrganizationSpecificEventState", 11)))).setMaxAccess("readonly") if mibBuilder.loadTexts: h3cEponDeviceEventsLogType.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventsLogType.setDescription('A list of types for Events. Events are ordered according to their significance where 1 is the highest severity. h3cEponDeviceDyingGaspAlarmState(1) indicates a Dying Gasp Alarm State, h3cEponDeviceCriticalEventState(2) indicates a Critical Event State, h3cEponDeviceLocalLinkFaultAlarmState(3) indicates a Local Link Fault Alarm State, h3cEponDeviceTemperatureEventIndicationState(4) indicates a Temperature Event Indication State, h3cEponDevicePowerVoltageEventIndicationState(5) indicates a Power Voltage Event Indication State, h3cEponDeviceGlobalEventState(6) indicates a Global Event State, h3cEponDeviceErroredSymbolPeriodEventState(7) indicates an Errored Symbol Period Event State, h3cEponDeviceErroredFrameEventState(8) indicates an Errored Frame Event State, h3cEponDeviceErroredFramePeriodEventState(9) indicates an Errored Frame Period Event State, h3cEponDeviceErroredFrameSecondsSummaryEventState(10) indicates an Errored Frame Seconds Summary Event State, h3cEponDeviceOrganizationSpecificEventState(11) indicates an Organization Specific Event State. ') h3cEponDeviceEventsLogEntryStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 1, 3, 2, 1, 8), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: h3cEponDeviceEventsLogEntryStatus.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceEventsLogEntryStatus.setDescription('The control that allows creation and deletion of entries. Once made active an entry MAY not be modified except to delete it.') h3cEponDeviceGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 2, 1)) h3cEponDeviceGroupControl = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 2, 1, 1)).setObjects(("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceObjectReset"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceObjectModes"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceObjectFecEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceObjectOamMode"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceObjectDeviceReadyMode"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceObjectPowerDown"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceObjectNumberOfLLIDs"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceObjectReportThreshold"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceRemoteMACAddressLLIDControl")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): h3cEponDeviceGroupControl = h3cEponDeviceGroupControl.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceGroupControl.setDescription('A collection of objects of h3cEponDevice control definition.') h3cEponDeviceGroupRMadLTable = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 2, 1, 2)).setObjects(("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceRMadlLLID"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceRMadlLogID"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceRMadlRemoteAddress"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceRMadlType"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceRMadlAction"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceRMadlEntryStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): h3cEponDeviceGroupRMadLTable = h3cEponDeviceGroupRMadLTable.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceGroupRMadLTable.setDescription('A collection of objects of h3cEponDevice remote Mac address to LLID table.') h3cEponDeviceGroupStat = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 2, 1, 3)).setObjects(("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatTxFramesQueue0"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatTxFramesQueue1"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatTxFramesQueue2"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatTxFramesQueue3"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatTxFramesQueue4"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatTxFramesQueue5"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatTxFramesQueue6"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatTxFramesQueue7"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatRxFramesQueue0"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatRxFramesQueue1"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatRxFramesQueue2"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatRxFramesQueue3"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatRxFramesQueue4"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatRxFramesQueue5"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatRxFramesQueue6"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatRxFramesQueue7"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatDroppedFramesQueue0"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatDroppedFramesQueue1"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatDroppedFramesQueue2"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatDroppedFramesQueue3"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatDroppedFramesQueue4"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatDroppedFramesQueue5"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatDroppedFramesQueue6"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceStatDroppedFramesQueue7")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): h3cEponDeviceGroupStat = h3cEponDeviceGroupStat.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceGroupStat.setDescription('A collection of objects of EPON device Statistics') h3cEponDeviceGroupEvent = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 2, 1, 4)).setObjects(("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceSampleMinimum"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceDyingGaspAlarmState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceDyingGaspAlarmEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceCriticalEventState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceCriticalEventEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceLocalLinkFaultAlarmState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceLocalLinkFaultAlarmEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceTemperatureEventIndicationState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceTemperatureEventIndicationEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDevicePowerVoltageEventIndicationState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDevicePowerVoltageEventIndicationEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceGlobalEventState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceGlobalEventEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceErroredSymbolPeriodEventState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceErroredSymbolPeriodEventEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceErroredFrameEventState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceErroredFrameEventEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceErroredFramePeriodEventState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceErroredFramePeriodEventEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceErroredFrameSecondsSummaryEventState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceErroredFrameSecondsSummaryEventEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceOrganizationSpecificEventState"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceOrganizationSpecificEventEnabled"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceEventControl")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): h3cEponDeviceGroupEvent = h3cEponDeviceGroupEvent.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceGroupEvent.setDescription('A collection of objects for EPON device Events') h3cEponDeviceGroupEventLog = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 2, 1, 5)).setObjects(("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceEventsLogID"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceEventsLogFirstTime"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceEventsLogLastTime"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceEventsLogCounts"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceEventsLogType"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceEventsLogEntryStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): h3cEponDeviceGroupEventLog = h3cEponDeviceGroupEventLog.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceGroupEventLog.setDescription('A collection of objects for EPON device Events log') h3cEponDeviceCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 2, 2)) h3cEponDeviceCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 43, 45, 1, 10, 2, 42, 4, 1, 2, 2, 1)).setObjects(("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceGroupControl"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceGroupRMadLTable"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceGroupStat"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceGroupEvent"), ("A3COM-HUAWEI-EPON-DEVICE-MIB", "h3cEponDeviceGroupEventLog")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): h3cEponDeviceCompliance = h3cEponDeviceCompliance.setStatus('current') if mibBuilder.loadTexts: h3cEponDeviceCompliance.setDescription('The compliance statement for EPON Devices.') mibBuilder.exportSymbols("A3COM-HUAWEI-EPON-DEVICE-MIB", h3cEponDeviceStatTxFramesQueue3=h3cEponDeviceStatTxFramesQueue3, h3cEponDeviceObjectReportThreshold=h3cEponDeviceObjectReportThreshold, h3cEponDeviceObjects=h3cEponDeviceObjects, h3cEponDeviceRemoteMACAddressLLIDTable=h3cEponDeviceRemoteMACAddressLLIDTable, h3cEponDeviceEventObjectTable=h3cEponDeviceEventObjectTable, PYSNMP_MODULE_ID=h3cEponDeviceMIB, h3cEponDeviceCompliance=h3cEponDeviceCompliance, h3cEponDeviceErroredFrameSecondsSummaryEventState=h3cEponDeviceErroredFrameSecondsSummaryEventState, h3cEponDeviceGroupStat=h3cEponDeviceGroupStat, h3cEponDeviceStatDroppedFramesQueue4=h3cEponDeviceStatDroppedFramesQueue4, h3cEponDeviceDyingGaspAlarmState=h3cEponDeviceDyingGaspAlarmState, h3cEponDeviceRMadlType=h3cEponDeviceRMadlType, h3cEponDeviceEventsLogEntryStatus=h3cEponDeviceEventsLogEntryStatus, h3cEponDeviceRMadlRemoteAddress=h3cEponDeviceRMadlRemoteAddress, h3cEponDeviceObjectNumberOfLLIDs=h3cEponDeviceObjectNumberOfLLIDs, h3cEponDeviceGroupRMadLTable=h3cEponDeviceGroupRMadLTable, h3cEponDeviceObjectPowerDown=h3cEponDeviceObjectPowerDown, h3cEponDeviceErroredSymbolPeriodEventEnabled=h3cEponDeviceErroredSymbolPeriodEventEnabled, h3cEponDeviceEventsLogType=h3cEponDeviceEventsLogType, h3cEponDeviceObjectDeviceReadyMode=h3cEponDeviceObjectDeviceReadyMode, h3cEponDevicePowerVoltageEventIndicationState=h3cEponDevicePowerVoltageEventIndicationState, h3cEponDeviceStatRxFramesQueue4=h3cEponDeviceStatRxFramesQueue4, h3cEponDeviceStatDroppedFramesQueue5=h3cEponDeviceStatDroppedFramesQueue5, h3cEponDeviceEventObjects=h3cEponDeviceEventObjects, h3cEponDeviceStatRxFramesQueue1=h3cEponDeviceStatRxFramesQueue1, h3cEponDeviceGroups=h3cEponDeviceGroups, h3cEponDeviceStatTxFramesQueue1=h3cEponDeviceStatTxFramesQueue1, h3cEponDeviceStatTxFramesQueue4=h3cEponDeviceStatTxFramesQueue4, h3cEponDeviceStatDroppedFramesQueue1=h3cEponDeviceStatDroppedFramesQueue1, h3cEponDeviceStatRxFramesQueue3=h3cEponDeviceStatRxFramesQueue3, h3cEponDeviceDyingGaspAlarmEnabled=h3cEponDeviceDyingGaspAlarmEnabled, h3cEponDeviceConformance=h3cEponDeviceConformance, h3cEponDeviceStatObjects=h3cEponDeviceStatObjects, h3cEponDeviceEventsLogCounts=h3cEponDeviceEventsLogCounts, h3cEponDeviceStatDroppedFramesQueue2=h3cEponDeviceStatDroppedFramesQueue2, h3cEponDeviceStatRxFramesQueue6=h3cEponDeviceStatRxFramesQueue6, h3cEponDeviceGroupEventLog=h3cEponDeviceGroupEventLog, h3cEponDeviceStatDroppedFramesQueue6=h3cEponDeviceStatDroppedFramesQueue6, h3cEponDeviceSampleMinimum=h3cEponDeviceSampleMinimum, h3cEponDeviceObjectMIB=h3cEponDeviceObjectMIB, h3cEponDeviceStatDroppedFramesQueue3=h3cEponDeviceStatDroppedFramesQueue3, h3cEponDeviceEventsLogFirstTime=h3cEponDeviceEventsLogFirstTime, h3cEponDeviceErroredFrameSecondsSummaryEventEnabled=h3cEponDeviceErroredFrameSecondsSummaryEventEnabled, h3cEponDeviceStatRxFramesQueue7=h3cEponDeviceStatRxFramesQueue7, h3cEponDeviceStatRxFramesQueue2=h3cEponDeviceStatRxFramesQueue2, h3cEponDeviceControlTable=h3cEponDeviceControlTable, h3cEponDeviceEventsLogLastTime=h3cEponDeviceEventsLogLastTime, h3cEponDeviceStatRxFramesQueue5=h3cEponDeviceStatRxFramesQueue5, h3cEponDeviceRMadlLLID=h3cEponDeviceRMadlLLID, h3cEponDeviceEventObjectEntry=h3cEponDeviceEventObjectEntry, h3cEponDeviceControlObjects=h3cEponDeviceControlObjects, h3cEponDeviceObjectOamMode=h3cEponDeviceObjectOamMode, h3cEponDeviceEventsLogTable=h3cEponDeviceEventsLogTable, h3cEponDeviceObjectFecEnabled=h3cEponDeviceObjectFecEnabled, h3cEponDeviceErroredFramePeriodEventState=h3cEponDeviceErroredFramePeriodEventState, h3cEponDeviceStatDroppedFramesQueue7=h3cEponDeviceStatDroppedFramesQueue7, h3cEponDeviceCriticalEventState=h3cEponDeviceCriticalEventState, h3cEponDeviceStatTxFramesQueue6=h3cEponDeviceStatTxFramesQueue6, h3cEponDeviceStatTxFramesQueue7=h3cEponDeviceStatTxFramesQueue7, h3cEponDeviceGlobalEventState=h3cEponDeviceGlobalEventState, h3cEponDeviceLocalLinkFaultAlarmState=h3cEponDeviceLocalLinkFaultAlarmState, h3cEponDeviceGlobalEventEnabled=h3cEponDeviceGlobalEventEnabled, h3cEponDeviceOrganizationSpecificEventEnabled=h3cEponDeviceOrganizationSpecificEventEnabled, h3cEponDeviceRMadlAction=h3cEponDeviceRMadlAction, h3cEponDeviceTemperatureEventIndicationState=h3cEponDeviceTemperatureEventIndicationState, h3cEponDeviceCompliances=h3cEponDeviceCompliances, h3cEponDeviceGroupEvent=h3cEponDeviceGroupEvent, h3cEponDeviceRMadlLogID=h3cEponDeviceRMadlLogID, h3cEponDeviceRMadlEntryStatus=h3cEponDeviceRMadlEntryStatus, h3cEponDeviceRemoteMACAddressLLIDName=h3cEponDeviceRemoteMACAddressLLIDName, h3cEponDeviceGroupControl=h3cEponDeviceGroupControl, h3cEponDeviceLocalLinkFaultAlarmEnabled=h3cEponDeviceLocalLinkFaultAlarmEnabled, h3cEponDeviceMIB=h3cEponDeviceMIB, h3cEponDeviceObjectReset=h3cEponDeviceObjectReset, h3cEponDevicePowerVoltageEventIndicationEnabled=h3cEponDevicePowerVoltageEventIndicationEnabled, h3cEponDeviceErroredFrameEventState=h3cEponDeviceErroredFrameEventState, h3cEponDeviceStatTxFramesQueue5=h3cEponDeviceStatTxFramesQueue5, h3cEponDeviceRemoteMACAddressLLIDEntry=h3cEponDeviceRemoteMACAddressLLIDEntry, h3cEponDeviceStatTxFramesQueue2=h3cEponDeviceStatTxFramesQueue2, h3cEponDeviceStatDroppedFramesQueue0=h3cEponDeviceStatDroppedFramesQueue0, h3cEponDeviceEventsLogName=h3cEponDeviceEventsLogName, h3cEponDeviceStatRxFramesQueue0=h3cEponDeviceStatRxFramesQueue0, h3cEponDeviceControlEntry=h3cEponDeviceControlEntry, h3cEponDeviceErroredFramePeriodEventEnabled=h3cEponDeviceErroredFramePeriodEventEnabled, h3cEponDeviceStatTxFramesQueue0=h3cEponDeviceStatTxFramesQueue0, h3cEponDeviceStatEntry=h3cEponDeviceStatEntry, h3cEponDeviceEventsLogEntry=h3cEponDeviceEventsLogEntry, h3cEponDeviceObjectModes=h3cEponDeviceObjectModes, h3cEponDeviceErroredSymbolPeriodEventState=h3cEponDeviceErroredSymbolPeriodEventState, h3cEponDeviceStatTable=h3cEponDeviceStatTable, h3cEponDeviceTemperatureEventIndicationEnabled=h3cEponDeviceTemperatureEventIndicationEnabled, h3cEponDeviceOrganizationSpecificEventState=h3cEponDeviceOrganizationSpecificEventState, h3cEponDeviceCriticalEventEnabled=h3cEponDeviceCriticalEventEnabled, h3cEponDeviceEventControl=h3cEponDeviceEventControl, h3cEponDeviceErroredFrameEventEnabled=h3cEponDeviceErroredFrameEventEnabled, h3cEponDeviceEventsLogID=h3cEponDeviceEventsLogID, h3cEponDeviceRemoteMACAddressLLIDControl=h3cEponDeviceRemoteMACAddressLLIDControl, h3cEponDeviceEventsLogIndex=h3cEponDeviceEventsLogIndex)
[ 2, 198, 2, 9485, 15571, 7378, 337, 9865, 8265, 317, 18, 9858, 12, 39, 34970, 8845, 40, 12, 8905, 1340, 12, 7206, 27389, 12, 8895, 33, 357, 4023, 1378, 16184, 76, 489, 8937, 13, 785, 14, 79, 893, 11632, 8, 198, 2, 7054, 45, 13, ...
2.944515
23,484
''' Author Alumet 2015 https://github.com/Alumet/Codingame ''' l, c, n = [int(i) for i in input().split()] groupes=[] loop_g=[] loop_m=[] looping=True for i in range(n): groupes.append([i,int(input())]) result=0 j=0 while j < c: load=0 sub_group=[] for i in range (n): if l-load>=groupes[0][1]: load+=groupes[0][1] groupes.append(groupes.pop(0)) sub_group.append(groupes[0][0]) if sub_group in loop_g and looping: D_gain=result-loop_m[loop_g.index(sub_group)][0] D_loop=(j-loop_m[loop_g.index(sub_group)][1]) repeat=int((c-j)/D_loop) result+=int(D_gain*repeat) j=D_loop*(repeat+1)+loop_m[loop_g.index(sub_group)][1]+1 if j<=c: result+=load looping=False else: loop_g.append(sub_group) loop_m.append([result,j]) result+=load j+=1 print(result)
[ 7061, 6, 198, 220, 6434, 978, 388, 316, 1853, 198, 220, 3740, 1378, 12567, 13, 785, 14, 2348, 388, 316, 14, 34, 7656, 480, 198, 7061, 6, 198, 198, 75, 11, 269, 11, 299, 796, 685, 600, 7, 72, 8, 329, 1312, 287, 5128, 22446, 353...
1.730835
587
import logging import random import secrets import string import uuid import hashlib from django.conf import settings from django.core.files.storage import FileSystemStorage from django.http import Http404 from django.shortcuts import redirect from django.core.mail import send_mail, EmailMultiAlternatives from DRC.settings import PROJECT_NAME __module_name = f'{PROJECT_NAME}.' + __name__ + '::' logger = logging.getLogger(__module_name) # Mailer Client def sendmail(mailData=None): """ template = loader.get_template('mail-view/password-reset-mail-view.html') mailData = { 'sender': 'ABC <no-reply@abc.com>', 'to': [email, ], 'subject': 'ABC | Password Reset', 'msg': template.render({'name': Auth_User.objects.get(username=email).get_full_name(), 'tempPass': tempPass.get('passWord')}), } sendmail(mailData=mailData) """ if mailData is None: return { 'status': 500, 'error': { 'msg': 'sendmail :: ' + 'Empty Mail Data', } } from_email = mailData.get('sender') if (type(mailData.get('sender')) is str) else None reply_to = mailData.get('reply-to') if (type(mailData.get('reply-to')) is list) else None cc = mailData.get('cc') if (type(mailData.get('cc')) is list) else None bcc = mailData.get('bcc') if (type(mailData.get('bcc')) is list) else None to = mailData.get('to') if (type(mailData.get('to')) is list) else None subject = mailData.get('subject') if (type(mailData.get('subject')) is str) else '' message = mailData.get('msg') try: msg = EmailMultiAlternatives(subject=subject, body=message, from_email=from_email, to=to, cc=cc, bcc=bcc, reply_to=reply_to) msg.attach_alternative(message, "text/html") res = msg.send() if res == 1: return { 'status': 200, } except Exception as e: print("mail exp: ", e) return { 'status': 500, 'error': { 'msg': 'sendmail :: ' + 'Mail Client Error!', 'desc': e, } }
[ 11748, 18931, 198, 11748, 4738, 198, 11748, 13141, 198, 11748, 4731, 198, 11748, 334, 27112, 198, 11748, 12234, 8019, 198, 198, 6738, 42625, 14208, 13, 10414, 1330, 6460, 198, 6738, 42625, 14208, 13, 7295, 13, 16624, 13, 35350, 1330, 9220...
2.193204
1,030
# Generated by Django 3.0.5 on 2020-11-02 16:58 from django.db import migrations import django_mysql.models
[ 2, 2980, 515, 416, 37770, 513, 13, 15, 13, 20, 319, 12131, 12, 1157, 12, 2999, 1467, 25, 3365, 198, 198, 6738, 42625, 14208, 13, 9945, 1330, 15720, 602, 198, 11748, 42625, 14208, 62, 28744, 13976, 13, 27530, 628 ]
2.820513
39
import sys from java.awt import * from java.io import * from java.lang import * from javax.swing import * from edu.mines.jtk.awt import * from edu.mines.jtk.dsp import * from edu.mines.jtk.io import * from edu.mines.jtk.mosaic import * from edu.mines.jtk.util import * from edu.mines.jtk.util.ArrayMath import * from dnp import * seismicDir = "/data/seis/tpd/csm/oldslices/" ffile = "tp73" s1 = Sampling(251,0.004,0.500) s2 = Sampling(357,0.025,0.000) n1,n2 = s1.count,s2.count fmin,fmax = -1.0,1.0 emin,emax = -1.0,1.0 pmin,pmax = -0.5,0.5 ############################################################################# # graphics gray = ColorMap.GRAY jet = ColorMap.JET ############################################################################# # read/write files ############################################################################# # Run the function main on the Swing thread import sys run(main)
[ 11748, 25064, 198, 198, 6738, 20129, 13, 707, 83, 1330, 1635, 198, 6738, 20129, 13, 952, 1330, 1635, 198, 6738, 20129, 13, 17204, 1330, 1635, 198, 6738, 474, 615, 897, 13, 46737, 1330, 1635, 198, 198, 6738, 1225, 84, 13, 1084, 274, ...
2.83642
324
#!/usr/bin/env python # -*- coding: utf-8 -*- ''' A temperature/pressure/humidity server application ''' # Imports import os #from __future__ import division import sys import cherrypy from socket import getfqdn as gethostname from datetime import datetime # Jinja2 templating from jinja2 import Environment, FileSystemLoader # My libraries from DatabaseModel import read_value_from_db, read_press_from_db, read_humi_from_db, filter_data # Globals version = "4.1.4" uuid='56ty66fa-6kld-8opb-ak29-0t7f5d294686' device = None sensor = None # ------------------------ AUTHENTICATION -------------------------------- from cherrypy.lib import auth_basic # Tre ;) users = {'et': 'et'} # ------------------------ CLASS -------------------------------- # Cherrypy Management # Secure headers! if __name__ == '__main__': import argparse parser = argparse.ArgumentParser() parser.add_argument('--port', action="store", type=int, default=8805) parser.add_argument('--devel', action="store_true", default=False) parser.add_argument('--root', action="store", default=".") parser.add_argument('--pid', action="store", default="/tmp/8805.pid") parser.add_argument('--sensehat', action="store_true", default=False) args = parser.parse_args() # Where to start, what to get root = os.path.abspath(args.root) os.chdir(root) current_dir = os.path.dirname(os.path.abspath(__file__)) writemypid(args.pid) settings = {'global': {'server.socket_host': "0.0.0.0", 'server.socket_port' : args.port, 'log.screen': True, }, } conf = {'/static': {'tools.staticdir.on': True, 'tools.staticdir.root': current_dir, 'tools.staticfile.filename': 'icon.png', 'tools.staticdir.dir': 'static' }, '/': { 'tools.auth_basic.on': False, 'tools.auth_basic.realm': 'localhost', 'tools.auth_basic.checkpassword': validate_password, 'tools.secureheaders.on' : True, 'tools.sessions.on': True, }, } # This is the sensor if args.sensehat : from SensorHat import Sensor else: from SensorDS18b20 import Sensor sensor = Sensor() #cherrypy.config.update(file = 'configuration.conf') cherrypy.config.update(settings) cherrypy.config.update({'error_page.404': error_page_404}) cherrypy.tools.secureheaders = cherrypy.Tool('before_finalize', secureheaders, priority=60) # To make it ZERO CPU usage if args.devel == False: cherrypy.engine.timeout_monitor.unsubscribe() cherrypy.engine.autoreload.unsubscribe() # Jinja2 templates env = Environment(loader=FileSystemLoader('view')) hostname = gethostname() footer = {'version': version, 'hostname': hostname, 'sensor': sensor.sensorid } # Cherry insert pages serverroot = Root() # Start the CherryPy server. cherrypy.quickstart(serverroot, config=conf)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 7061, 6, 198, 220, 317, 5951, 14, 36151, 14, 17047, 17995, 4382, 3586, 198, 7061, 6, 198, 198, 2, 1846, 3742, 198, ...
2.389014
1,329
import datetime import json import pandas as pd from dateutil import relativedelta from rest_framework.generics import ListCreateAPIView, get_object_or_404 from rest_framework.response import Response from rest_framework.views import APIView from analytics.events.utils.dataframe_builders import ProductivityLogEventsDataframeBuilder, \ SupplementEventsDataframeBuilder, SleepActivityDataframeBuilder from apis.betterself.v1.constants import DAILY_FREQUENCY, MONTHLY_FREQUENCY from apis.betterself.v1.events.filters import SupplementLogFilter, UserActivityFilter, UserActivityLogFilter, \ DailyProductivityLogFilter from apis.betterself.v1.events.serializers import SupplementLogCreateUpdateSerializer, \ SupplementLogReadOnlySerializer, ProductivityLogReadSerializer, ProductivityLogCreateSerializer, \ UserActivitySerializer, UserActivityLogCreateSerializer, UserActivityLogReadSerializer, \ UserActivityUpdateSerializer, ProductivityLogRequestParametersSerializer, \ SupplementLogRequestParametersSerializer, SupplementReminderReadSerializer, SupplementReminderCreateSerializer, \ SupplementStackLogSerializer from apis.betterself.v1.utils.views import ReadOrWriteSerializerChooser, UUIDDeleteMixin, UUIDUpdateMixin from betterself.utils.date_utils import get_current_userdate from betterself.utils.pandas_utils import force_start_end_date_to_series, force_start_end_data_to_dataframe, \ update_dataframe_to_be_none_instead_of_nan_for_api_responses from config.pagination import ModifiedPageNumberPagination from events.models import SupplementLog, DailyProductivityLog, UserActivity, UserActivityLog, SupplementReminder, \ SleepLog from supplements.models import Supplement, UserSupplementStack # TODO - Refactor all of this after Twilio integration! # TODO - Refactor all of this after Twilio integration! class AggregatedSupplementLogView(APIView): # TODO - Refactor all of this after Twilio integration! Wow, this view sucks """ Returns a list of dates that Supplement was taken along with the productivity and sleep of that date"""
[ 11748, 4818, 8079, 198, 11748, 33918, 198, 198, 11748, 19798, 292, 355, 279, 67, 198, 6738, 3128, 22602, 1330, 48993, 1572, 12514, 198, 6738, 1334, 62, 30604, 13, 8612, 873, 1330, 7343, 16447, 2969, 3824, 769, 11, 651, 62, 15252, 62, ...
3.646447
577
import pytest from distro.distro import load
[ 11748, 12972, 9288, 198, 198, 6738, 1233, 305, 13, 17080, 305, 1330, 3440, 628 ]
3.357143
14
""" Logging interface for the fritzconnection library. On module level an instance of `FritzLogger` gets created as `fritzlogger` that can get imported by: >>> from fritzconnection.core.logger import fritzlogger The fritzlogger instance is preset to report on DEBUG level, the default handler is the NullHandler. To do some logging, a handler must be provided: >>> fritzlogger.add_handler(the_handler) >>> fritzlogger.log("the message") # will get logged now In case that another logger is already in use, the other logger can get set as parent for fritzlogger. fritzlogger will then use the parent handlers. >>> fritzlogger.set_parent(another_logger) >>> fritzlogger.log("the message") # will get logged now For convenience fritzlogger provides the methods `set_streamhandler` and `set_filehandler` to add predefined handler. If logging is activated at debug-level, fritzconnection will log all requests and responses. This can produce a lot of output, especial on initializing a FritzConnection-instance. To suppress output the methods `disable` and `enable` can get called. Default mode is enable. """ import logging class FritzLogger: """ Wrapper for the logging library to reduce executable code on module global level. As multiple instances would use the same logger, to not get confused this class is a singleton. """ _instance = None def __new__(cls, *args, **kwargs): """Takes care to be a singleton.""" if cls._instance is None: cls._instance = super().__new__(cls, *args, **kwargs) return cls._instance def __init__(self, level=logging.DEBUG): """Creates the internal logger state.""" self.logger = logging.getLogger("fritzconnection") self.logger.addHandler(logging.NullHandler()) self.loggers = { logging.CRITICAL: self.logger.critical, logging.ERROR: self.logger.error, logging.WARNING: self.logger.warning, logging.INFO: self.logger.info, logging.DEBUG: self.logger.debug, "critical": self.logger.critical, "error": self.logger.error, "warning": self.logger.warning, "info": self.logger.info, "debug": self.logger.debug, } self.set_level(level) def set_level(self, level): """Set the log-level for the logger.""" self.logger.setLevel(level) def set_parent(self, parent): """ Set a parent manually. After calling all registered handlers FritzLogger will call the handlers of the parent chain (which must also all be loggers). Be careful not to create a closed loop of parents! """ self.logger.parent = parent def delete_parent(self): """Deletes the parent logger.""" self.logger.parent = None def disable(self): """Disables the logger.""" self.logger.disabled = True def enable(self): """Enables the logger.""" self.logger.disabled = False def set_streamhandler(self): """Sets the StreamHandler logging to stderr.""" self.add_handler(logging.StreamHandler()) def set_filehandler(self, filename): """Sets the FileHandler logging to the given filename.""" self.add_handler(logging.FileHandler(filename, encoding="utf-8")) def add_handler(self, handler): """ Add a handler to the logger. Handlers will just added once, even if this method get called multiple times with the same handler. """ self.logger.addHandler(handler) def remove_handler(self, handler): """ Remove the given handler from the list of handler. Unknown handlers are ignored. """ self.logger.removeHandler(handler) def log(self, message, level=logging.DEBUG, **kwargs): """ Send the message to the logger. Unknown levels are ignored. """ if isinstance(level, str): level = level.lower() logger = self.loggers.get(level) if logger: logger(message, **kwargs) fritzlogger = FritzLogger()
[ 37811, 198, 11187, 2667, 7071, 329, 262, 277, 29574, 38659, 5888, 13, 198, 198, 2202, 8265, 1241, 281, 4554, 286, 4600, 37, 29574, 11187, 1362, 63, 3011, 2727, 355, 4600, 69, 29574, 6404, 1362, 63, 198, 5562, 460, 651, 17392, 416, 25,...
2.660305
1,572
import numpy as np class EnvMap(object): """EnvMap object used to load and render map from file"""
[ 11748, 299, 32152, 355, 45941, 198, 198, 4871, 2039, 85, 13912, 7, 15252, 2599, 198, 220, 220, 220, 37227, 4834, 85, 13912, 2134, 973, 284, 3440, 290, 8543, 3975, 422, 2393, 37811, 628, 628, 628, 628, 198 ]
3.027027
37
""" Write a program that calculates and prints the value according to the given formula: Q = Square root of [(2 * C * D)/H] Following are the fixed values of C and H: C is 50. H is 30. D is the variable whose values should be input to your program in a comma-separated sequence. [EXAMPLE] Let us assume the following comma separated input sequence is given: 100,150,180 The output should be: 18,22,24 """ import math C = 50 D = input("\nList of numbers (separated by commas), please: ") H = 30 numbers = list() for n in D.split(','): calc = int(round(math.sqrt((2 * C * int(n)) / H))) numbers.append(str(calc)) print(','.join(numbers))
[ 37811, 19430, 257, 1430, 326, 43707, 290, 20842, 262, 1988, 1864, 198, 220, 220, 220, 284, 262, 1813, 10451, 25, 198, 220, 220, 220, 220, 220, 220, 220, 1195, 796, 9276, 6808, 286, 47527, 17, 1635, 327, 1635, 360, 20679, 39, 60, 628...
2.625455
275
# this routine creates a standalone tikz image from the code of a single tikz image
[ 2, 428, 8027, 8075, 257, 27669, 256, 1134, 89, 2939, 422, 262, 2438, 286, 257, 2060, 256, 1134, 89, 2939 ]
4.15
20
from django.conf import settings
[ 6738, 42625, 14208, 13, 10414, 1330, 6460, 628 ]
4.25
8
# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # # See COPYING file distributed along with the NiBabel package for the # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## ''' Tests for nifti reading package ''' from __future__ import with_statement import os from StringIO import StringIO import numpy as np from ..tmpdirs import InTemporaryDirectory from ..spatialimages import HeaderDataError from .. import nifti1 as nifti1 from ..nifti1 import (load, Nifti1Header, Nifti1PairHeader, Nifti1Image, Nifti1Pair, Nifti1Extension, Nifti1Extensions, data_type_codes, extension_codes, slice_order_codes) from numpy.testing import assert_array_equal, assert_array_almost_equal from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) from nose import SkipTest from ..tmpdirs import InTemporaryDirectory from ..testing import parametric, data_path from . import test_analyze as tana from .test_analyze import _log_chk header_file = os.path.join(data_path, 'nifti1.hdr') image_file = os.path.join(data_path, 'example4d.nii.gz') # Example transformation matrix R = [[0, -1, 0], [1, 0, 0], [0, 0, 1]] # rotation matrix Z = [2.0, 3.0, 4.0] # zooms T = [20, 30, 40] # translations A = np.eye(4) A[:3,:3] = np.array(R) * Z # broadcasting does the job A[:3,3] = T @parametric @parametric @parametric @parametric @parametric @parametric @parametric @parametric @parametric
[ 2, 795, 16436, 25, 532, 9, 12, 4235, 25, 21015, 12, 14171, 26, 12972, 12, 521, 298, 12, 28968, 25, 604, 26, 33793, 12, 8658, 82, 12, 14171, 25, 18038, 532, 9, 12, 198, 2, 25357, 25, 900, 10117, 28, 29412, 39747, 28, 19, 40379, ...
2.651633
643
import numpy as np from networktables import NetworkTables from utils import lazytalonsrx
[ 11748, 299, 32152, 355, 45941, 198, 6738, 3127, 83, 2977, 1330, 7311, 51, 2977, 198, 198, 6738, 3384, 4487, 1330, 16931, 39240, 684, 40914, 628, 198 ]
3.576923
26
# Copyright 2022 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ from src.dataset.pose import PoseDataset
[ 2, 15069, 33160, 43208, 21852, 1766, 1539, 12052, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 13789, 13, 198...
4.251497
167
# Copyright (c) 2021-2022, NVIDIA CORPORATION. import numpy as np import pandas as pd import pytest import cudf from cudf.testing._utils import assert_eq @pytest.mark.parametrize("ts_resolution", ["ns", "s", "ms"]) @pytest.mark.parametrize("rule", ["2S", "10S"]) @pytest.mark.parametrize("rule", ["2S", "10S"]) @pytest.mark.parametrize("rule", ["2S", "10S"]) @pytest.mark.parametrize( "in_freq, sampling_freq, out_freq", [ ("1ns", "1us", "us"), ("1us", "10us", "us"), ("ms", "100us", "us"), ("ms", "1s", "s"), ("s", "1T", "s"), ("1T", "30s", "s"), ("1D", "10D", "s"), ("10D", "1D", "s"), ], )
[ 2, 15069, 357, 66, 8, 33448, 12, 1238, 1828, 11, 15127, 23929, 44680, 6234, 13, 198, 198, 11748, 299, 32152, 355, 45941, 198, 11748, 19798, 292, 355, 279, 67, 198, 11748, 12972, 9288, 198, 198, 11748, 269, 463, 69, 198, 6738, 269, 4...
1.962857
350
import argparse import sys sys.path.insert(0, '../../') import cv2 import numpy as np from flexinfer.misc import Config, set_device from flexinfer.preprocess import build_preprocess from flexinfer.model import build_model from flexinfer.postprocess import build_postprocess PALETTE = [[0, 0, 0], [128, 0, 0], [0, 128, 0], [128, 128, 0], [0, 0, 128], [128, 0, 128], [0, 128, 128], [128, 128, 128], [64, 0, 0], [192, 0, 0], [64, 128, 0], [192, 128, 0], [64, 0, 128], [192, 0, 128], [64, 128, 128], [192, 128, 128], [0, 64, 0], [128, 64, 0], [0, 192, 0], [128, 192, 0], [0, 64, 128]] if __name__ == '__main__': parser = argparse.ArgumentParser(description='Segmentation Inference Demo') parser.add_argument('config', help='config file') parser.add_argument('image', type=str, help='image file') args = parser.parse_args() segment(args)
[ 11748, 1822, 29572, 198, 11748, 25064, 198, 17597, 13, 6978, 13, 28463, 7, 15, 11, 705, 40720, 40720, 11537, 198, 198, 11748, 269, 85, 17, 198, 11748, 299, 32152, 355, 45941, 198, 6738, 7059, 259, 2232, 13, 44374, 1330, 17056, 11, 900...
2.432796
372
import unittest from server import app from model import (db, connect_to_db, example_data, User, Review, CriticReview, Game, CurrentGame, Cover, Franchise, Genre, Developer, Platform, Screenshot) from correlation import pearson import pull_data import datetime import gen_fake_data class RouteIntegrationTests(unittest.TestCase): """Tests for the app routes and URL paths""" class DatabaseTests(unittest.TestCase): """Tests for db units (seed methods/user methods) and integration""" def setUp(self): """Sets up testdb and connects""" connect_to_db(app, "postgresql:///testdb") db.create_all() example_data() def tearDown(self): """Ends the db session and drops the whole db""" db.session.close() db.drop_all() def test_reprs(self): """Tests represenation methods for each db class""" self.assertEqual(repr(User.query.first()), "<User user_id=1 username=testo email=testo@test.com age=35 " + "gender=nb_gf>") self.assertEqual(repr(Review.query.first()), "<Review user_id=1 game_id=1 score=95>") self.assertEqual(repr(CriticReview.query.first()), "<CriticReview critic_code=ign game_id=1 score=100>") self.assertEqual(repr(Game.query.first()), "<Game game_id=1 name=Testo release_date=" + str(Game.query.first().release_date) + ">") self.assertEqual(repr(CurrentGame.query.first()), "<CurrentGame game_id=1 user_id=1>") self.assertEqual(repr(Cover.query.first()), "<Cover cover_id=1 game_id=1 url=///testo.png>") self.assertEqual(repr(Franchise.query.first()), "<Franchise franchise_id=1 name=Testo Stories>") self.assertEqual(repr(Genre.query.first()), "<Genre genre_id=1 genre=test>") self.assertEqual(repr(Developer.query.first()), "<Developer developer_id=1 name=Testo Games>") self.assertEqual(repr(Platform.query.first()), "<Platform platform_id=1 name=Testo360>") self.assertEqual(repr(Screenshot.query.first()), "<Screenshot screenshot_id=1 game_id=1 url=///test.png>") def test_relationships(self): """Tests the relationships between the tables""" self.assertEqual(User.query.first().reviews, [Review.query.first()]) self.assertEqual(User.query.first().currently_playing, [CurrentGame.query.first()]) self.assertEqual(Game.query.first().reviews, Review.query.all()) self.assertEqual(Game.query.first().critics, [CriticReview.query.first()]) self.assertEqual(Game.query.first().franchise, Franchise.query.first()) self.assertEqual(Game.query.first().covers, [Cover.query.first()]) self.assertEqual(Game.query.first().genres, [Genre.query.first()]) self.assertEqual(Game.query.first().developers, [Developer.query.first()]) self.assertEqual(Game.query.first().platforms, [Platform.query.first()]) self.assertEqual(Game.query.first().screenshots, [Screenshot.query.first()]) def test_recommendation(self): """Tests the recommendation system""" user1 = User.query.filter_by(user_id=1).first() user2 = User.query.filter_by(user_id=2).first() user3 = User.query.filter_by(user_id=3).first() self.assertEqual(user1.recommend(), [2]) self.assertEqual(user2.recommend(), [1]) self.assertEqual(user3.recommend(), None) class PearsonTests(unittest.TestCase): """Tests for correlation/pearson""" def test_pearson(self): """Tests that perfectly matched users have correlation 1.0""" match_pairs = [(1,1), (2,2), (3,3), (4,4), (5,5)] self.assertEqual(pearson(match_pairs), 1.0) def test_anti_pearson(self): """Tests that perfectly unmatched users have correlation -1.0""" unmatched_pairs = [(1,5), (2,4), (3,3), (4,2), (5,1)] self.assertEqual(pearson(unmatched_pairs), -1.0) def test_mid_pearson(self): """Tests that users with mixed values have expected correlation""" mixed_pairs = [(1,5), (2,1), (3,2), (4,4), (5,3)] self.assertEqual(pearson(mixed_pairs), -0.1) def test_zero_pearson(self): """Tests that if the denominator is 0 that correlation is 0.0""" pairs = [(0,0), (0,0), (0,0), (0,0), (0,0)] self.assertEqual(pearson(pairs), 0.0) class PullDataTests(unittest.TestCase): """Tests for pull_data""" def test_gameURL(self): """Gets base game URL and checks request returns something""" gameURL = ('https://igdbcom-internet-game-database-v1.p.mashape.com/' + 'games/?fields=id%2Cname%2Csummary%2Cstoryline%2Cfranchise%2Cgenres%2C' + 'first_release_date%2Cvideos%2Ccover%2Cdevelopers%2Cscreenshots&order=' + 'first_release_date%3Adesc') self.assertEqual(pull_data.get_game_url(), gameURL) # Removed from regular testing due to the number of games in std. request # self.assertIsNotNone(pull_data.make_request(gameURL)) def test_franchiseURL(self): """Gets base franchise URL and checks request returns something""" franchiseURL = ('https://igdbcom-internet-game-database-v1.p.mashape.com' + '/franchises/?fields=id%2Cname') self.assertEqual(pull_data.get_franchise_url(), franchiseURL) # Removed from regular testing due to the number of franchises in std. request # self.assertIsNotNone(pull_data.make_request(franchiseURL)) def test_companyURL(self): """Gets base company URL and checks request returns something""" companyURL = ('https://igdbcom-internet-game-database-v1.p.mashape.com' + '/companies/6?fields=name') self.assertEqual(pull_data.get_company_url(6), companyURL) self.assertIsNotNone(pull_data.make_request(companyURL)) def test_genreURL(self): """Gets base genre URL and checks request returns something""" genreURL = ('https://igdbcom-internet-game-database-v1.p.mashape.com' + '/genres/?fields=id%2Cname') self.assertEqual(pull_data.get_genre_url(), genreURL) self.assertIsNotNone(pull_data.make_request(genreURL)) def test_platformURL(self): """Gets base platform URL and checks request returns something""" platformURL = ('https://igdbcom-internet-game-database-v1.p.mashape.com' + '/platforms/?fields=id%2Cname%2Cgames') self.assertEqual(pull_data.get_platform_url(), platformURL) self.assertIsNotNone(pull_data.make_request(platformURL)) if __name__ == "__main__": unittest.main()
[ 11748, 555, 715, 395, 198, 198, 6738, 4382, 1330, 598, 198, 6738, 2746, 1330, 357, 9945, 11, 2018, 62, 1462, 62, 9945, 11, 1672, 62, 7890, 11, 11787, 11, 6602, 11, 10056, 291, 14832, 11, 198, 220, 220, 220, 220, 220, 220, 220, 220...
2.373634
2,837
# coding=utf-8 from __future__ import absolute_import, division, print_function, \ unicode_literals def with_context(exc, context): # type: (Exception, dict) -> Exception """ Attaches a ``context`` value to an Exception. Before:: exc = Exception('Frog blast the vent core!') exc.context = { ... } raise exc After:: raise with_context(Exception('Frog blast the vent core!'), { ... }) """ if not hasattr(exc, 'context'): exc.context = {} exc.context.update(context) return exc
[ 2, 19617, 28, 40477, 12, 23, 198, 6738, 11593, 37443, 834, 1330, 4112, 62, 11748, 11, 7297, 11, 3601, 62, 8818, 11, 3467, 198, 220, 28000, 1098, 62, 17201, 874, 628, 198, 4299, 351, 62, 22866, 7, 41194, 11, 4732, 2599, 198, 220, 1...
2.988636
176
__author__ = 'Jovan Brakus <jovan@brakus.rs>' __contact__ = 'jovan@brakus.rs' __date__ = '31 May 2012' from jinja2 import Environment, PackageLoader jinja_env = Environment(loader=PackageLoader('configserver', 'static/templates'))
[ 834, 9800, 834, 220, 220, 220, 796, 705, 41, 22590, 9718, 45614, 1279, 73, 22590, 31, 16057, 45614, 13, 3808, 29, 6, 198, 834, 32057, 834, 220, 220, 796, 705, 73, 22590, 31, 16057, 45614, 13, 3808, 6, 198, 834, 4475, 834, 220, 220...
2.738636
88
from enum import Enum
[ 6738, 33829, 1330, 2039, 388, 198 ]
3.666667
6
import sys from PySide2.QtWidgets import QApplication, QMainWindow from PySide2.QtCore import QFile from PySide2 import QtCore from PySide2.QtGui import QPixmap from ui_files.main_gui import Ui_MainWindow from ui_files.image_window_gui import Ui_MainWindow as Ui_ImageWindow from pathlib import Path #class ImageWindow(MainWindow): if __name__ == "__main__": # Run the aplication app = QApplication(sys.argv) window = MainWindow() window.show() # Start the main loop of the program sys.exit(app.exec_())
[ 11748, 25064, 198, 6738, 9485, 24819, 17, 13, 48, 83, 54, 312, 11407, 1330, 1195, 23416, 11, 1195, 13383, 27703, 198, 6738, 9485, 24819, 17, 13, 48, 83, 14055, 1330, 1195, 8979, 198, 6738, 9485, 24819, 17, 1330, 33734, 14055, 198, 673...
2.796875
192
import numpy as np import pandas as pd import glob import sys import os def find_label(file, path): """Find audit label relating to spectrogram file. # Arguments file: spectrogram file. path: spectrogram directory. # Returns audit label file path relating to spectrogram file. If the file has multiple audit label files, the first label will be returned. """ label_path = os.path.dirname(os.path.dirname(path)) + '/audit/' label_search = ''.join(file.split('.')[0].split('_')[0]) + '_' + file.split('.')[0].split('_')[1] labels_list = [l for l in glob.glob(label_path + label_search + '*')] return labels_list[0] def create_label_dataframe(label, begin_time, end_time, window_size, timesteps_per_second): """Create dataframe, reformated and containing relevant information. # Arguments label: label path. begin_time: start time for the related spectrogram. end_time: end time for the related spectrogram. window_size: end time - start time. timesteps_per_second: spectrogram timesteps / spectrogram window_size. # Returns Dataframe with relevant label information for the spectrogram file. """ labels_df = pd.read_csv(label, sep='\t', index_col='Selection') if 'Label' in labels_df.columns: call_labels = ['GIG', 'SQL', 'GRL', 'GRN', 'SQT', 'MOO', 'RUM', 'WHP'] labels_df.Label = labels_df.Label.str[0:3] labels_df = labels_df[labels_df['Label'].isin(call_labels)] labels_df = labels_df[labels_df['Begin Time (s)'] <= end_time] labels_df = labels_df[labels_df['End Time (s)'] >= begin_time] labels_df.loc[labels_df['End Time (s)'] > end_time, 'End Time (s)'] = end_time labels_df.loc[labels_df['Begin Time (s)'] < begin_time, 'Begin Time (s)'] = begin_time labels_df['Begin Time(t)'] = ((labels_df['Begin Time (s)'] - begin_time) * timesteps_per_second).apply(np.floor) labels_df['End Time(t)'] = ((labels_df['End Time (s)'] - begin_time) * timesteps_per_second).apply(np.ceil) return labels_df def create_label_matrix(dataframe, timesteps): """Create label matrix of shape (number of classes, timesteps). # Arguments dataframe: dataframe of label information. timesteps: number of timesteps. # Returns Matrix of 0s and 1s. Each column represents a timestep, Each row represents a different call type: Row 0 = Giggle (GIG) Row 1 = Squeal (SQL) Row 2 = Growl (GRL) Row 3 = Groan (GRN) Row 4 = Squitter (SQT) Row 5 = Low / Moo (MOO) Row 6 = Alarm rumble (RUM) Row 7 = Whoop (WHP) # Example: [[0, 0, 0, 0, 0, 0 ....], [0, 0, 0, 0, 0, 0 ....], [0, 0, 0, 1, 1, 1 ....], This represents a Growl in timesteps 3, 4, 5. [0, 0, 0, 0, 0, 0 ....], [0, 0, 0, 0, 0, 0 ....], [0, 0, 0, 0, 0, 0 ....], [0, 0, 0, 0, 0, 0 ....], [1, 1, 1, 1, 0, 0 ....],] This represents a Whoop in timesteps 0, 1, 2, 3. """ label = np.zeros((8, timesteps)) if 'Label' in list(dataframe): # create update list update_list = [] for index, row in dataframe.iterrows(): update_list.append([row['Begin Time(t)'], row['End Time(t)'], row['Label']]) # overwrite with ones in correct row based on label for l in update_list: begin_t = int(l[0]) end_t = int(l[1])+1 if l[2] == 'GIG': label[0][begin_t:end_t] = 1 elif l[2] == 'SQL': label[1][begin_t:end_t] = 1 elif l[2] == 'GRL': label[2][begin_t:end_t] = 1 elif l[2] == 'GRN': label[3][begin_t:end_t] = 1 elif l[2] == 'SQT': label[4][begin_t:end_t] = 1 elif l[2] == 'MOO': label[5][begin_t:end_t] = 1 elif l[2] == 'RUM': label[6][begin_t:end_t] = 1 elif l[2] == 'WHP': label[7][begin_t:end_t] = 1 return label paths = ['cc16_352a_converted/spectro/', 'cc16_352b_converted/spectro/', 'cc16_354a_converted/spectro/', 'cc16_360a_converted/spectro/', 'cc16_366a_converted/spectro/'] timesteps = 259 window_size = 6 timesteps_per_second = timesteps / window_size for path in paths: for f in os.listdir(path): if 'LABEL' not in f: label = find_label(f, path) begin_time = int(f.split('_')[2].split('sto')[0]) end_time = int(f.split('_')[2].split('sto')[1].split('s')[0]) df = create_label_dataframe(label, begin_time, end_time, window_size, timesteps_per_second) label_matrix = create_label_matrix(df, timesteps) np.save(path+f[:-4]+'LABEL', label_matrix)
[ 11748, 299, 32152, 355, 45941, 198, 11748, 19798, 292, 355, 279, 67, 198, 11748, 15095, 198, 11748, 25064, 198, 11748, 28686, 628, 198, 4299, 1064, 62, 18242, 7, 7753, 11, 3108, 2599, 198, 220, 220, 220, 37227, 16742, 14984, 6167, 11270...
2.052079
2,381
import sys import tkinter as tk l=sys.argv[1:] js=dict() for j in range(len(l)): i=l[j] if i.startswith('-'): try: js[i[1:]]=l[j+1] except: js['h']=1 if 'help' in js or 'h' in js: print('sight -x 1920 -y 1080 -color #ff0000 -name sight -r 20 -wd 6 -len 40') exit() u('waits',0.2,float) u('alpha',0.4,float) u('name','sight',str) u('color','#ff0000',str) if 'x' not in js and 'cx' not in js: import win32api,win32con js['x']=win32api.GetSystemMetrics(win32con.SM_CXSCREEN) if 'y' not in js and 'cy' not in js: import win32api,win32con js['y']=win32api.GetSystemMetrics(win32con.SM_CYSCREEN) u('cx',js['x']//2) u('cy',js['y']//2) mn=js['y'] if js['y']<=js['x'] else js['x'] u('r',mn//50) u('len',js['r']*2) u('wd',js['r']//6*2) l=[ str(js['wd'])+'x'+str(js['len'])+'+'+str(js['cx']-js['wd']//2) +'+'+str(js['cy']-js['r']-js['len']), str(js['len'])+'x'+str(js['wd'])+'+'+str(js['cx']-js['len']-js['r'])+'+'+str(js['cy']-js['wd']//2), str(js['wd'])+'x'+str(js['len'])+'+'+str(js['cx']-js['wd']//2) +'+'+str(js['cy']+js['r']), str(js['len'])+'x'+str(js['wd'])+'+'+str(js['cx']+js['r']) +'+'+str(js['cy']-js['wd']//2), ] import downs a=downs.nThread(waits=js['waits'],f=f,args=l,fast=True)
[ 11748, 25064, 201, 198, 11748, 256, 74, 3849, 355, 256, 74, 201, 198, 201, 198, 75, 28, 17597, 13, 853, 85, 58, 16, 47715, 201, 198, 201, 198, 8457, 28, 11600, 3419, 201, 198, 1640, 474, 287, 2837, 7, 11925, 7, 75, 8, 2599, 201,...
1.763231
718
# Name : Gradescope Export Renamer # Author : Ryan Carr # Modified : 05/17/2019 # Purppose : Opens .yml file in exports folder and renames .pdfs according # to student name. # Ask user for full path to .yml file # Open yml file and skip first line # Process file one submission at a time storing filename and student name in # a dictionary # Prompt user for permission before rewriting files # iterate through dictionary renaming files
[ 2, 6530, 220, 220, 220, 220, 1058, 1902, 2367, 66, 3008, 36472, 7152, 2382, 201, 198, 2, 6434, 220, 220, 1058, 6047, 20765, 201, 198, 2, 40499, 1058, 8870, 14, 1558, 14, 23344, 201, 198, 2, 9330, 381, 577, 1058, 8670, 641, 764, 88...
3.34058
138
# Test methods with long descriptive names can omit docstrings # pylint: disable=missing-docstring import io from os import path, remove import unittest import tempfile import shutil import time from collections import OrderedDict import numpy as np from Orange.data import Table, DiscreteVariable from Orange.data.io import TabReader
[ 2, 6208, 5050, 351, 890, 35644, 3891, 460, 42848, 2205, 37336, 198, 2, 279, 2645, 600, 25, 15560, 28, 45688, 12, 15390, 8841, 198, 198, 11748, 33245, 198, 6738, 28686, 1330, 3108, 11, 4781, 198, 11748, 555, 715, 395, 198, 11748, 20218...
3.820225
89
import logging import os import platform import re import time import traceback import prettytable from airtest.core.android.constant import DEFAULT_ADB_PATH from airrun.common.command_helper import command_execute from airrun.common.exception import LocalPackageNotFoundException from airrun.common.helper import deal_with_python_version logger = logging.getLogger(__name__)
[ 11748, 18931, 198, 11748, 28686, 198, 11748, 3859, 198, 11748, 302, 198, 11748, 640, 198, 11748, 12854, 1891, 198, 198, 11748, 2495, 11487, 198, 6738, 1633, 9288, 13, 7295, 13, 19411, 13, 9979, 415, 1330, 5550, 38865, 62, 2885, 33, 62, ...
3.584906
106
""" # Part of localization phase """ import sys import os import pickle import keras import keras.backend as K from scripts.tools.utils import ModelUtils def simplify_layer_name(layer_name:str): """ simplify layer name 'conv2d_copy_LA' -> conv2d """ if 'copy' in layer_name: layer_name = layer_name.split("_copy_")[0] if 'insert' in layer_name: layer_name = layer_name.split("_insert_")[0] # '_' in str and str doesn't endwiths '_' if "_" in layer_name: last_chr = layer_name.rfind("_") # print(layer_name[last_chr+1],layer_name[last_chr+1].isdigit()) if last_chr == len(layer_name) -1 or layer_name[last_chr+1].isdigit(): layer_name = layer_name[:last_chr] # print("After",layer_name) return layer_name if __name__ == "__main__": save_path = sys.argv[1] current_container = save_path.rstrip("/").split("/")[-1] bug_list_path = os.path.join(save_path, "bug_list.pkl") with open(bug_list_path,"rb") as fr: bug_list = pickle.load(fr) filter_bugs(bug_list,current_container)
[ 37811, 198, 2, 2142, 220, 286, 42842, 7108, 198, 37811, 198, 11748, 25064, 198, 11748, 28686, 198, 11748, 2298, 293, 198, 11748, 41927, 292, 198, 11748, 41927, 292, 13, 1891, 437, 355, 509, 198, 6738, 14750, 13, 31391, 13, 26791, 1330, ...
2.283644
483
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'RecToolUI.ui' # # Created by: PyQt5 UI code generator 5.9.2 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 2, 5178, 7822, 7560, 422, 3555, 334, 72, 2393, 705, 6690, 25391, 10080, 13, 9019, 6, 198, 2, 198, 2, 15622, 416, 25, 9485, 48, 83, 20, 12454, 2438, 17301, 642, ...
2.802326
86
from abc import abstractmethod from typing import Any, Sequence from common.serializers.serialization import client_req_rep_store_serializer from plenum.common.request import Request
[ 6738, 450, 66, 1330, 12531, 24396, 198, 6738, 19720, 1330, 4377, 11, 45835, 198, 198, 6738, 2219, 13, 46911, 11341, 13, 46911, 1634, 1330, 5456, 62, 42180, 62, 7856, 62, 8095, 62, 46911, 7509, 198, 6738, 458, 44709, 13, 11321, 13, 259...
4.111111
45
from contextlib import contextmanager import os import tornado.httpserver import tornado.ioloop import tornado.web import requests import tempfile import shutil import subprocess from git import GitCommandError, Repo, Actor import github from fnmatch import fnmatch @contextmanager if __name__ == "__main__": main()
[ 198, 6738, 4732, 8019, 1330, 4732, 37153, 198, 11748, 28686, 198, 11748, 33718, 13, 5450, 18497, 198, 11748, 33718, 13, 1669, 11224, 198, 11748, 33718, 13, 12384, 198, 11748, 7007, 198, 11748, 20218, 7753, 198, 11748, 4423, 346, 198, 1174...
3.651685
89
""" Integration test for API Warning: this test runs against a real database See README.adoc Lint using: black -t py37 -l 100 --fast ooniapi/tests/integ/test_integration.py Test using: pytest-3 -k test_aggregation """ from datetime import datetime, timedelta from hashlib import shake_128 from urllib.parse import urlencode import time from dateutil.parser import parse as parse_date from tests.utils import * import pytest from ooniapi.measurements import FASTPATH_MSM_ID_PREFIX # The flask app is created in tests/conftest.py @pytest.fixture() def fastpath_dup_rid_input(app): """ Access DB directly. Fetch > 1 measurements from fastpath that share the same report_id and input Returns (rid, input, count) """ # Too slow sql = """ SELECT report_id, input from fastpath group by report_id, input HAVING count(*) > 1 LIMIT 1 """ with app.app_context(): for row in app.db_session.execute(sql): return (row[0], row[1]) @pytest.fixture() def fastpath_rid_input(app): """Access DB directly. Get a fresh msmt Returns (rid, input) """ sql = """ SELECT report_id, input, test_start_time FROM fastpath WHERE input IS NOT NULL ORDER BY measurement_start_time DESC LIMIT 1 """ rid, inp, test_start_time = dbquery(app, sql)[0:3] assert rid.strip() assert inp.strip() return (rid, inp, test_start_time) def dbquery(app, sql, **query_params): """Access DB directly, returns row as tuple.""" with app.app_context(): q = app.db_session.execute(sql, query_params) return q.fetchone() # # rate limiting / quotas # # @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") # FIXME @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") # FIXME @pytest.fixture() @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") # FIXME @pytest.mark.skip("SLOW") # # list_files # # @pytest.mark.skip(reason="DROP") @pytest.mark.skip(reason="DROP") @pytest.mark.skip(reason="DROP") @pytest.mark.skip(reason="DROP") @pytest.mark.skip(reason="DROP") # # get_measurement_meta # # @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") # https://explorer.ooni.org/measurement/20210622T144545Z_riseupvpn_MM_133384_n1_VJkB5EObudGDpy9Y @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") # # list_measurements # # @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") # FIXME @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") # FIXME # # Test slow list_measurements queries with order_by = None # pytest-3 ooniapi/tests/integ/test_integration.py \ # -k test_list_measurements_slow --durations=40 -s -x # These are some of the parameters exposed by Explorer Search @pytest.mark.parametrize("probe_cc", ("US", None)) @pytest.mark.parametrize("since", ("2021-01-01", None)) @pytest.mark.parametrize("test_name", ("web_connectivity", None)) @pytest.mark.parametrize("anomaly", ("true", None)) @pytest.mark.parametrize("domain", ("twitter.com", None)) @pytest.mark.parametrize("f", ("probe_cc=YT", "probe_asn=AS3352", "test_name=web_connectivity")) @pytest.mark.parametrize("f", ("anomaly=true", "domain=twitter.com")) # This is the hard case. When mixing one filter that applies to the # measurements table and one on the results table, there is no way to do # an order_by that avoids heavy scans @pytest.mark.parametrize("f1", ("probe_cc=YT", "test_name=web_connectivity")) @pytest.mark.parametrize("f2", ("anomaly=true", "domain=twitter.com")) @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") @pytest.mark.skip(reason="Broken. To be fixed after updating Flask") def today_range(): """Return since/until pair to extract fresh fastpath entries""" since = datetime.utcnow().date() until = since + timedelta(days=1) return since, until @pytest.mark.parametrize("anomaly", (True, False)) @pytest.mark.parametrize("confirmed", (True, False)) @pytest.mark.parametrize("failure", (True, False)) def test_list_measurements_filter_flags_fastpath(anomaly, confirmed, failure, client, log): """Test filtering by anomaly/confirmed/msm_failure using the cartesian product SELECT COUNT(*), anomaly, confirmed, msm_failure AS failure FROM fastpath WHERE measurement_start_time > '2021-07-09' AND measurement_start_time <= '2021-07-10' GROUP BY anomaly, confirmed, failure ORDER BY anomaly, confirmed, failure ASC; ┌─count()─┬─anomaly─┬─confirmed─┬─failure─┐ │ 8796 │ f │ f │ f │ │ 454 │ f │ f │ t │ │ 714 │ t │ f │ f │ │ 13 │ t │ f │ t │ │ 9 │ t │ t │ f │ │ 2 │ t │ t │ t │ └─────────┴─────────┴───────────┴─────────┘ """ p = f"measurements?since=2021-07-09&until=2021-07-10&anomaly={anomaly}" p += f"&confirmed={confirmed}&failure={failure}&limit=100" p = p.lower() log.info("Calling %s", p) response = api(client, p) for r in response["results"]: assert r["anomaly"] == anomaly, r assert r["confirmed"] == confirmed, r assert r["failure"] == failure, r i = anomaly * 4 + confirmed * 2 + failure * 1 thresholds = [100, 100, 0, 0, 100, 13, 9, 2] assert len(response["results"]) == thresholds[i], len(response["results"]) ## get_measurement ## @pytest.mark.skip(reason="broken") @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") def test_get_measurement_2(log, client, fastpath_rid_input): """Simulate Explorer behavior Get a measurement from the fastpath table """ # Get a real rid/inp directly from the database rid, inp, test_start_time = fastpath_rid_input # This has collisions with data from the traditional pipeline p = f"measurements?report_id={rid}&input={inp}" log.info("Calling API on %s", p) response = api(client, p) assert response["metadata"]["count"] > 0, jd(response) assert len(response["results"]) == 1, jd(response) pick = response["results"][0] url_substr = "measurement/{}".format(FASTPATH_MSM_ID_PREFIX) assert url_substr in pick["measurement_url"] assert "anomaly" in pick, pick.keys() assert pick["scores"] != {} assert "blocking_general" in pick["scores"] url = pick["measurement_url"] relurl = url[27:] log.info("Calling API on %r", relurl) msm = api(client, relurl) # Assure the correct msmt was received msm = api(client, relurl) for f in ("probe_asn", "probe_cc", "report_id", "input", "test_name"): # (measurement_start_time differs in the timezone letter) assert msm[f] == pick[f], "%r field: %r != %r" % (f, msm[f], pick[f]) @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") @pytest.mark.skipif(not pytest.proddb, reason="use --proddb to run") ## files_download ## @pytest.mark.skip(reason="legacy") @pytest.mark.skip(reason="legacy") @pytest.mark.skip(reason="legacy") @pytest.mark.skip(reason="legacy")
[ 37811, 198, 34500, 1358, 1332, 329, 7824, 198, 198, 20361, 25, 428, 1332, 4539, 1028, 257, 1103, 6831, 198, 6214, 20832, 11682, 13, 324, 420, 198, 198, 43, 600, 1262, 25, 198, 220, 220, 220, 2042, 532, 83, 12972, 2718, 532, 75, 1802...
2.486174
3,110
# creates a polygonoal buffer in geojson format given ... # xin,yin = centre point # radius = buffer radius # npoints = number of points (e.g. 3 for triangle, 6 for hex, Inf. for circle, etc.) import math import matplotlib.pyplot as plt print buffer(50,50,1,24)
[ 2, 8075, 257, 7514, 14520, 78, 282, 11876, 287, 4903, 13210, 1559, 5794, 1813, 2644, 198, 2, 2124, 259, 11, 88, 259, 796, 7372, 966, 198, 2, 16874, 796, 11876, 16874, 198, 2, 299, 13033, 796, 1271, 286, 2173, 357, 68, 13, 70, 13, ...
3
88
# pylint: disable=no-self-use,invalid-name import gzip import numpy import pytest from deep_qa.data.embeddings import PretrainedEmbeddings from deep_qa.data.data_indexer import DataIndexer from ..common.test_case import DeepQaTestCase # pylint: disable=protected-access
[ 2, 279, 2645, 600, 25, 15560, 28, 3919, 12, 944, 12, 1904, 11, 259, 12102, 12, 3672, 198, 11748, 308, 13344, 198, 11748, 299, 32152, 198, 11748, 12972, 9288, 198, 198, 6738, 2769, 62, 20402, 13, 7890, 13, 20521, 67, 654, 1330, 37123...
2.98913
92
import rot13 if __name__ == '__main__': """ Perform rotational encryption on an input. """ source_file = 'cc.txt' state = 'r' alphabet = 'abcdefghijklmnopqrstuvwxyz' rotation = 13 output_file = 'ccenc.txt' output_mode = 'w' rotation_dict = rot13.assign_and_return_positions(alphabet, rotation) # get source with open(source_file, state) as f: source = f.read() print rotation_dict encoded_source = rot13.apply_substitution(rotation_dict, source) print encoded_source with open(output_file, output_mode) as f: f.write(encoded_source)
[ 198, 11748, 5724, 1485, 628, 198, 361, 11593, 3672, 834, 6624, 705, 834, 12417, 834, 10354, 198, 220, 220, 220, 37227, 35006, 5724, 864, 15835, 319, 281, 5128, 13, 198, 220, 220, 220, 220, 198, 220, 220, 220, 37227, 628, 220, 220, 2...
2.432432
259
#!/usr/bin/python3 ''' /****************************************************************** * * Copyright 2018 Samsung Electronics All Rights Reserved. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************/ ''' import os a=make_tc_list('./', 'tc_list.txt') print(a) ''' if info[] sun = (row.split(',')[1]).split(':')[1] tcn = (row.split(',')[2]).split(':')[1] pcn = (row.split(',')[3]).split(':')[1] #print('-u '+sun+'-c '+tcn+'-g '+pcn) if "time" in row: #print(row.split("time")[1]) '''
[ 2, 48443, 14629, 14, 8800, 14, 29412, 18, 198, 7061, 6, 198, 14, 17174, 17174, 1174, 198, 9, 198, 9, 15069, 2864, 10397, 27828, 1439, 6923, 33876, 13, 198, 9, 198, 9, 198, 9, 198, 9, 49962, 739, 262, 24843, 13789, 11, 10628, 362, ...
3.100575
348
#!/usr/bin/env python # # Encrypt sys.stdin # import fileinput import os.path import logging from Crypto.Cipher import PKCS1_v1_5 from Crypto.PublicKey import RSA ROOT = os.path.dirname(__file__) PRIV_KEY = os.path.join(ROOT, 'rsa_1024_priv.pem') PUB_KEY = os.path.join(ROOT, 'rsa_1024_pub.pem') if __name__ == '__main__': logging.basicConfig(level=logging.INFO) main()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 198, 2, 14711, 6012, 25064, 13, 19282, 259, 198, 2, 220, 198, 198, 11748, 2393, 15414, 198, 11748, 28686, 13, 6978, 198, 11748, 18931, 198, 198, 6738, 36579, 13, 34, 10803, 1330, ...
2.4125
160
import copy import hashlib import importlib import inspect import json import os import sys from database import get_database
[ 11748, 4866, 198, 11748, 12234, 8019, 198, 11748, 1330, 8019, 198, 11748, 10104, 198, 11748, 33918, 198, 11748, 28686, 628, 198, 198, 11748, 25064, 198, 198, 6738, 6831, 1330, 651, 62, 48806, 628, 628, 628, 628 ]
3.805556
36
""" Operational Sutras """ from sanskrit_parser.base.sanskrit_base import SanskritImmutableString, SLP1 from decimal import Decimal from copy import deepcopy from sanskrit_parser.generator.paninian_object import PaninianObject import logging logger = logging.getLogger(__name__) # Global Domains # Base class
[ 37811, 198, 18843, 864, 45220, 8847, 198, 198, 37811, 198, 6738, 264, 34738, 799, 62, 48610, 13, 8692, 13, 82, 34738, 799, 62, 8692, 1330, 46178, 24675, 18187, 10100, 11, 12419, 47, 16, 198, 6738, 32465, 1330, 4280, 4402, 198, 6738, 4...
3.336842
95
import pytest from satellite.vault.transformer import ( FormDataTransformer, TransformerError, XMLTransformer, ) XML_PAYLOAD = b"""<CC> <Foo>PREFIX<Bar>TEXT1</Bar>TEXT2<Bar>TEXT3</Bar>TAIL</Foo> <Number>4111111111111111</Number> <Number>4444333322221111</Number> <CVC>123</CVC> </CC>""" @pytest.mark.parametrize('payload,expected', [ ('', ''), ('f1=v1', 'f1=transformed_v1'), (b'f1=v1', 'f1=transformed_v1'), (b'f1=v1&f1=v2', 'f1=transformed_v1&f1=transformed_v2'), (b'f1=v1&f2=v2', 'f1=transformed_v1&f2=v2'), (b'f1=', 'f1='), (b'f1', 'f1='), ]) @pytest.mark.parametrize('expressions', [ [], ['/CC/CVC', '//Number'], ['/CC/Foo'], ])
[ 11748, 12972, 9288, 198, 198, 6738, 11210, 13, 85, 1721, 13, 7645, 16354, 1330, 357, 198, 220, 220, 220, 5178, 6601, 8291, 16354, 11, 198, 220, 220, 220, 3602, 16354, 12331, 11, 198, 220, 220, 220, 23735, 8291, 16354, 11, 198, 8, 62...
1.908847
373
############################################################################ # Copyright ESIEE Paris (2018) # # # # Contributor(s) : Benjamin Perret # # # # Distributed under the terms of the CECILL-B License. # # # # The full license is in the file LICENSE, distributed with this software. # ############################################################################ import unittest import numpy as np import higra as hg # needed for reliable access to resource files... import os import os.path # needed for reliable access to resource files... _my_path = os.path.dirname(os.path.abspath(__file__)) graph_file = os.path.join(_my_path, "..", "resources", "test.graph") if __name__ == '__main__': unittest.main()
[ 29113, 29113, 7804, 4242, 198, 2, 15069, 412, 11584, 6500, 6342, 357, 7908, 8, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, ...
2.001908
524
# **************************************************************************** # # # # |\ # # MovieTitleScraper.py ------| \---- # # | \` \ | p # # By: jeudy2552 <jeudy2552@floridapoly.edu> | \`-\ \ | o # # |---\ \ `| l # # Created: 2019/03/23 13:22:36 by jeudy2552 | ` .\ \ | y # # Updated: 2019/03/23 16:54:37 by jeudy2552 ------------- # # # # **************************************************************************** # from selenium import webdriver import selenium from selenium.webdriver.support.ui import Select import re targetURL = "https://www.listchallenges.com/the-most-important-movies-since-1975" buttonPath = '/html/body/form/div[7]/div[1]/div[3]/div[1]/div[1]/div[2]/ul/li[12]/a' chrome_driver = webdriver.Chrome() chrome_driver.get(targetURL) names = [] for i in range(1, 11): for j in range(1, 41): gridItem = "/html/body/form/div[7]/div[1]/div[3]/div[1]/div[4]/div[1]/div["+str(j)+"]/div/div[3]" try: name = chrome_driver.find_element_by_xpath(gridItem).text print(name) names.append(name) except Exception as e: print(e) names.append("no element found") try: chrome_driver.find_element_by_xpath(buttonPath).click() except: pass with open("titles.txt", "w+") as f: f.write('\n'.join(names))
[ 2, 41906, 17174, 46068, 1303, 198, 2, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, ...
1.747573
1,030
############################################################ # File: utils.py # # Created: 2019-11-18 20:50:50 # # Author : wvinzh # # Email : wvinzh@qq.com # # ------------------------------------------ # # Description:utils.py # # Copyright@2019 wvinzh, HUST # ############################################################ import os import random import numpy as np import torch import shutil import logging def accuracy(output, target, topk=(1,)): """Computes the precision@k for the specified values of k""" with torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k.mul_(100.0 / batch_size)) return res
[ 29113, 14468, 7804, 4242, 198, 2, 220, 220, 9220, 25, 3384, 4487, 13, 9078, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 22...
2.012111
578
import bcrypt as b from flask import render_template, request, redirect, url_for, session, abort from sqlalchemy import or_ from app import app from app.models.users import User from app.models.speakers import Speaker from app.models.students import Student SALT = b'$2b$12$QSEeNz4SOAKE/RUZT4zNHO' @app.route('/login') @app.route("/logout/<string:error>") @app.route('/login', methods=['POST']) @app.route('/forgotten-password') @app.route('/register') @app.route('/users') @app.route('/users/<int:id>')
[ 11748, 275, 29609, 355, 275, 198, 198, 6738, 42903, 1330, 8543, 62, 28243, 11, 2581, 11, 18941, 11, 19016, 62, 1640, 11, 6246, 11, 15614, 198, 6738, 44161, 282, 26599, 1330, 393, 62, 198, 6738, 598, 1330, 598, 198, 6738, 598, 13, 27...
2.703125
192
from pytest import approx
[ 6738, 12972, 9288, 1330, 5561, 198 ]
4.333333
6
class Settings: """ organize our game settings """ def __init__(self): """ initialize our game settings """ self.screen_width = 800 self.screen_height = 600 self.screen_mid_x = (self.screen_width/2) self.screen_mid_y = (self.screen_height/2) self.direction_list = [1, -1] self.bg_color = (113, 143, 30) self.screen_rows = self.screen_height / 12 self.player_life_limit = 3 def set_resolution(self, x_y: tuple): """ change screen dimensions """ self.screen_width = x_y[0] self.screen_height = x_y[1]
[ 198, 4871, 16163, 25, 198, 220, 220, 220, 37227, 16481, 674, 983, 6460, 37227, 628, 220, 220, 220, 825, 11593, 15003, 834, 7, 944, 2599, 198, 220, 220, 220, 220, 220, 220, 220, 37227, 41216, 674, 983, 6460, 37227, 198, 220, 220, 220...
2.289963
269
""" edx_course_team_api Django application initialization. """ from django.apps import AppConfig class EdxCourseTeamApiConfig(AppConfig): """ Configuration for the edx_course_team_api Django application. """ name = 'edx_course_team_api' plugin_app = { 'url_config': { 'cms.djangoapp': { 'namespace': 'edx_course_team_api', 'regex': r'^sn-api/course-team/', }, }, 'settings_config': { 'cms.djangoapp': { 'common': {'relative_path': 'settings.common'}, }, }, }
[ 37811, 198, 276, 87, 62, 17319, 62, 15097, 62, 15042, 37770, 3586, 37588, 13, 198, 37811, 198, 198, 6738, 42625, 14208, 13, 18211, 1330, 2034, 16934, 628, 198, 4871, 1717, 87, 49046, 15592, 32, 14415, 16934, 7, 4677, 16934, 2599, 198, ...
2.023026
304
import hashlib import json import requests from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) from datetime import datetime import sys import click import glob import os @click.group() @cli.command(help="Create Ova for VM to backup") @cli.command(help="Download and delete Ova from Nutanix to Download Folder") @cli.command(help="Test config file after editing") if __name__ == '__main__': try: with open("nutanix-backup.json", "r") as config_file: cfg = json.load(config_file) except Exception as e: print("Error Message: " + str(e)) os._exit(1) cli()
[ 11748, 12234, 8019, 198, 11748, 33918, 198, 11748, 7007, 198, 6738, 7007, 13, 43789, 13, 333, 297, 571, 18, 13, 1069, 11755, 1330, 554, 22390, 18453, 20361, 198, 8897, 3558, 13, 43789, 13, 333, 297, 571, 18, 13, 40223, 62, 40539, 654,...
2.813765
247
from flask import Flask, render_template, redirect, abort import os from flask_pymongo import PyMongo application = Flask(__name__) application.config['MONGO_DBNAME'] = 'server2' with application.app_context(): mongo = PyMongo(application) d = mongo.db.classes @application.route("/") @application.route("/class/<course>") @application.route("/note/<string>") if __name__ == "__main__": # Setting debug to True enables debug output. This line should be # removed before deploying a production app. application.debug = False application.run()
[ 6738, 42903, 1330, 46947, 11, 8543, 62, 28243, 11, 18941, 11, 15614, 198, 11748, 28686, 198, 198, 6738, 42903, 62, 79, 4948, 25162, 1330, 9485, 44, 25162, 198, 198, 31438, 796, 46947, 7, 834, 3672, 834, 8, 198, 31438, 13, 11250, 17816...
3.136612
183
################################################################################ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ from abc import ABC, abstractmethod from typing import List, Dict, Any from pyflink.datastream import StreamExecutionEnvironment from pyflink.java_gateway import get_gateway from pyflink.table import Table from pyflink.util.java_utils import to_jarray from pyflink.ml.core.api import Model, Transformer, AlgoOperator, Stage, Estimator from pyflink.ml.core.param import Param, WithParams class JavaWrapper(ABC): """ Wrapper class for a Java object """ class JavaWithParams(WithParams, JavaWrapper): """ Wrapper class for a Java WithParams. """ PYTHON_PARAM_NAME_TO_JAVA_PARM_NAME = { 'distance_measure': 'distanceMeasure', 'features_col': 'featuresCol', 'global_batch_size': 'globalBatchSize', 'handle_invalid': 'handleInvalid', 'input_cols': 'inputCols', 'label_col': 'labelCol', 'learning_rate': 'learningRate', 'max_iter': 'maxIter', 'multi_class': 'multiClass', 'output_cols': 'outputCols', 'prediction_col': 'predictionCol', 'raw_prediction_col': 'rawPredictionCol', 'reg': 'reg', 'seed': 'seed', 'tol': 'tol', 'weight_col': 'weightCol' } class JavaStage(Stage, JavaWithParams, ABC): """ Wrapper class for a Java Stage. """ class JavaAlgoOperator(AlgoOperator, JavaStage, ABC): """ Wrapper class for a Java AlgoOperator. """ class JavaTransformer(Transformer, JavaAlgoOperator, ABC): """ Wrapper class for a Java Transformer. """ class JavaModel(Model, JavaTransformer, ABC): """ Wrapper class for a Java Model. """ @classmethod @classmethod @abstractmethod class JavaEstimator(Estimator, JavaStage, ABC): """ Wrapper class for a Java Estimator. """ @classmethod def _create_model(cls, java_model) -> Model: """ Creates a model from the input Java model reference. """ pass @classmethod def load(cls, env: StreamExecutionEnvironment, path: str): """ Instantiates a new stage instance based on the data read from the given path. """ java_estimator = _to_java_reference(cls._java_estimator_path()).load( env._j_stream_execution_environment, path) instance = cls() instance._java_obj = java_estimator return instance @classmethod @abstractmethod def _new_java_obj(java_class: str, *java_args): """ Returns a new Java object. """ java_obj = _to_java_reference(java_class) return java_obj(*java_args) def _to_java_tables(*inputs: Table): """ Converts Python Tables to Java tables. """ gateway = get_gateway() return to_jarray(gateway.jvm.org.apache.flink.table.api.Table, [t._j_table for t in inputs])
[ 29113, 29113, 14468, 198, 2, 220, 49962, 284, 262, 24843, 10442, 5693, 357, 1921, 37, 8, 739, 530, 198, 2, 220, 393, 517, 18920, 5964, 11704, 13, 220, 4091, 262, 28536, 2393, 198, 2, 220, 9387, 351, 428, 670, 329, 3224, 1321, 198, ...
2.784242
1,358
""" SleekXMPP: The Sleek XMPP Library Copyright (C) 2011 Nathanael C. Fritz This file is part of SleekXMPP. See the file LICENSE for copying permission. """ import logging from sleekxmpp.stanza import StreamFeatures from sleekxmpp.xmlstream import RestartStream, register_stanza_plugin from sleekxmpp.xmlstream.matcher import * from sleekxmpp.xmlstream.handler import * from sleekxmpp.plugins.base import base_plugin from sleekxmpp.features.feature_starttls import stanza log = logging.getLogger(__name__)
[ 37811, 198, 220, 220, 220, 19498, 988, 55, 7378, 47, 25, 383, 19498, 988, 1395, 7378, 47, 10074, 198, 220, 220, 220, 15069, 357, 34, 8, 2813, 220, 32607, 2271, 417, 327, 13, 45954, 198, 220, 220, 220, 770, 2393, 318, 636, 286, 194...
3.04023
174
from django.core.exceptions import ValidationError from django.utils.translation import gettext_lazy as _ from phonenumber_field.phonenumber import to_python
[ 6738, 42625, 14208, 13, 7295, 13, 1069, 11755, 1330, 3254, 24765, 12331, 198, 6738, 42625, 14208, 13, 26791, 13, 41519, 1330, 651, 5239, 62, 75, 12582, 355, 4808, 198, 198, 6738, 32896, 268, 4494, 62, 3245, 13, 746, 34481, 4494, 1330, ...
3.555556
45
# MIT License # # Copyright (c) 2021 [FacuFalcone] # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. def Factorial(number:int): """ Calculate the factorial of a positive integer https://en.wikipedia.org/wiki/Factorial >>> import math >>> all(factorial(i) == math.factorial(i) for i in range(20)) True >>> factorial(0.1) Traceback (most recent call last): ... ValueError: factorial() only accepts integral values >>> factorial(-1) Traceback (most recent call last): ... ValueError: factorial() not defined for negative values """ if not isinstance(number, int): raise ValueError("factorial() only accepts integral values") if number < 0: raise ValueError("factorial() not defined for negative values") return 1 if number == 0 or number == 1 else number * Factorial(number - 1) if __name__=="__main__": print(Factorial(4))
[ 2, 17168, 13789, 198, 2, 220, 198, 2, 15069, 357, 66, 8, 33448, 685, 47522, 84, 41129, 49180, 60, 198, 2, 220, 198, 2, 2448, 3411, 318, 29376, 7520, 11, 1479, 286, 3877, 11, 284, 597, 1048, 16727, 257, 4866, 198, 2, 286, 428, 37...
3.304274
585
import o3seespy as o3 # for testing only import pytest @pytest.mark.skip() @pytest.mark.skip()
[ 11748, 267, 18, 325, 274, 9078, 355, 267, 18, 220, 1303, 329, 4856, 691, 198, 11748, 12972, 9288, 628, 628, 628, 198, 31, 9078, 9288, 13, 4102, 13, 48267, 3419, 628, 198, 31, 9078, 9288, 13, 4102, 13, 48267, 3419, 628 ]
2.560976
41
import math import time t1 = time.time() N = 28124 prime = [2,3] b = 3 while True: while True: b = b+2 i = 0 t = True while (prime[i]*prime[i] < b): i=i+1 if (b%prime[i] == 0): t = False break if t: prime.append(b) break if b > N: break # to find out the sum of the proper divsor # find the abudant numbers abundant = [] for i in range(1,N): if spd(i)>i: abundant.append(i) # 28124/2 = 14062 #print (abundant[3489],abundant[3490]) #print("time:",time.time()-t1) total = 0 p = [] for i in range(1,N): p.append(True) l = len(abundant) for i in range(0,3490): for j in range(i,l): n = abundant[i]+abundant[j] if n < N: p[n-1] = False for i in range(1,N): if p[i-1]: total += i print (total) print("time:",time.time()-t1)
[ 11748, 10688, 198, 11748, 640, 198, 198, 83, 16, 796, 640, 13, 2435, 3419, 198, 198, 45, 796, 2579, 17464, 198, 198, 35505, 796, 685, 17, 11, 18, 60, 198, 65, 796, 513, 198, 198, 4514, 6407, 25, 628, 220, 220, 220, 220, 198, 220...
1.685
600
#!/usr/bin/env python # Copyright (c) 2019 Diamond Key Security, NFP # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # - Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # - Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # - Neither the name of the NORDUnet nor the names of its contributors may # be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import os class SafeShutdown(object): """Class to ensure the HSM shuts down correctly"""
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 15069, 357, 66, 8, 13130, 220, 13566, 7383, 4765, 11, 399, 5837, 198, 2, 1439, 2489, 10395, 13, 198, 2, 198, 2, 2297, 396, 3890, 290, 779, 287, 2723, 290, 13934, 5107, 11, 351, ...
3.561028
467
from pathlib import Path import json import typing as t import argparse import tokenizers.decoders from tokenizers import Tokenizer from tokenizers.pre_tokenizers import Whitespace from tokenizers.trainers import BpeTrainer from tokenizers.models import BPE from tokenizers.decoders import BPEDecoder from seq2seq.data.dictionary import Dictionary from preprocess import make_binary_dataset if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--tokenizer-dir", help="Path to directory were tokenizer model is stored and or should be created", type=Path, default=None, ) parser.add_argument("--data-dir", help="Path to directory containing the preprocessed data files", type=Path, default=None, ) parser.add_argument("--output-dir", help="Path to directory were prepared data files should be outputted", type=Path, default=None, ) parser.add_argument("--data-prefix", help="Prefix attached to file name to marke bpe", type=str, default="bpe_" ) parser.add_argument("--preprocess", action='store_true', help="If activated, script preprocesses given data files and creates bpe files", ) parser.add_argument("--postprocess", action='store_true', help="If activated, script takes bpe encoded input and outputs decoded version", ) parser.add_argument("--postprocess-file", help="File to be decoded with bpe model", type=Path, default=None, ) parser.add_argument("--bpe-suffix", type=str, help="Suffix that marks the end of a word in the bpe model", default="<w>" ) parser.add_argument("--vocabsize", type=int, help="Size of vacabulary of bpe model", default=3997 ) # paths to data args = parser.parse_args() if args.tokenizer_dir: tokenizer_dir = args.tokenizer_dir else: tokenizer_dir = Path(__file__).parent if args.data_dir: preprocessed_path = args.data_dir else: preprocessed_path = (tokenizer_dir.parent / "data" / "en-fr" / "preprocessed") if args.output_dir: prepared_path = args.output_dir else: prepared_path = (tokenizer_dir.parent / "data" / "en-fr" / "prepared") # normal data if args.preprocess: tokenizer = None for file in tokenizer_dir.iterdir(): if file.name == "tokenizer.json": tokenizer = load_bpe_tokenizer(str(file)) break if not tokenizer: tokenizer = train_bpe_tokenizer( inputfile_trt=[str(preprocessed_path / "train.en")], inputfile_src=[str(preprocessed_path / "train.fr")], outputpath=tokenizer_dir / "tokenizer.json", vocab_size=args.vocabsize, suffix=args.bpe_suffix, ) dict_files = create_dict( voca_json=(tokenizer_dir / "vocab.json"), dict_paths=[(prepared_path / "dict.en"), (prepared_path / "dict.fr")], file_prefix=args.data_prefix ) dictionary = Dictionary.load(dict_files[0]) for file in list(preprocessed_path.iterdir()): out = preprocessed_path / f"{args.data_prefix}{file.name}" tokenize_bpe(tokenizer=tokenizer, infile=file, outfile=out) out_prepared = prepared_path / out.name make_binary_dataset( input_file=out, output_file=out_prepared, dictionary=dictionary, ) if args.postprocess: if args.postprocess_file: decoder = BPEDecoder(suffix=args.bpe_suffix) decode_file( infile=args.postprocess_file, outfile=args.postprocess_file.parent / f"decoded_{args.postprocess_file.name}.txt", decoder=decoder ) else: raise ValueError("No file to decode was specified")
[ 6738, 3108, 8019, 1330, 10644, 198, 11748, 33918, 198, 11748, 19720, 355, 256, 198, 11748, 1822, 29572, 198, 198, 11748, 11241, 11341, 13, 12501, 375, 364, 198, 6738, 11241, 11341, 1330, 29130, 7509, 198, 6738, 11241, 11341, 13, 3866, 62,...
1.937474
2,399
#!/usr/bin/python """ (C) Copyright 2018-2021 Intel Corporation. SPDX-License-Identifier: BSD-2-Clause-Patent """ import os from command_utils_base import CommandFailure from test_utils_container import TestContainer from pydaos.raw import str_to_c_uuid, DaosContainer, DaosObj, IORequest from ior_test_base import IorTestBase from mdtest_test_base import MdtestBase from data_mover_utils import DcpCommand, DsyncCommand, FsCopy, ContClone from data_mover_utils import DserializeCommand, DdeserializeCommand from data_mover_utils import format_daos_path, uuid_from_obj from os.path import join import uuid import re import ctypes from general_utils import create_string_buffer class DataMoverTestBase(IorTestBase, MdtestBase): # pylint: disable=too-many-ancestors """Base DataMover test class. Sample Use Case: # Create test file run_ior_with_params("DAOS", "/testFile, pool1, cont1, flags="-w -K") # Set dcp as the tool to use self.set_tool("DCP") # Copy from DAOS to POSIX run_datamover( "some test description", "DAOS", "/testFile", pool1, cont1, "POSIX", "/some/posix/path/testFile") # Verify destination file run_ior_with_params("POSIX", "/some/posix/path/testFile", flags="-r -R") :avocado: recursive """ # The valid parameter types for setting params. PARAM_TYPES = ("POSIX", "DAOS_UUID", "DAOS_UNS") # The valid datamover tools that can be used TOOLS = ( "DCP", # mpifileutils dcp "DSYNC", # mpifileutils dsync "DSERIAL", # mpifileutils daos-serialize + daos-deserialize "FS_COPY", # daos filesystem copy "CONT_CLONE" # daos container clone ) def __init__(self, *args, **kwargs): """Initialize a DataMoverTestBase object.""" super().__init__(*args, **kwargs) self.tool = None self.api = None self.daos_cmd = None self.dcp_cmd = None self.dsync_cmd = None self.dserialize_cmd = None self.ddeserialize_cmd = None self.fs_copy_cmd = None self.cont_clone_cmd = None self.ior_processes = None self.mdtest_processes = None self.dcp_processes = None self.dsync_processes = None self.dserialize_processes = None self.ddeserialize_processes = None self.pool = [] self.container = [] self.uuids = [] self.dfuse_hosts = None self.num_run_datamover = 0 # Number of times run_datamover was called self.job_manager = None self.parent = None # Temp directory for serialize/deserialize self.serial_tmp_dir = self.tmp self.preserve_props_path = None # List of local test paths to create and remove self.posix_local_test_paths = [] # List of shared test paths to create and remove self.posix_shared_test_paths = [] # paths to unmount in teardown self.mounted_posix_test_paths = [] # List of daos test paths to keep track of self.daos_test_paths = [] def setUp(self): """Set up each test case.""" # Start the servers and agents super().setUp() self.dfuse_hosts = self.agent_managers[0].hosts # initialize daos_cmd self.daos_cmd = self.get_daos_command() # Get the processes for each explicitly # This is needed because both IorTestBase and MdtestBase # define self.processes self.ior_processes = self.params.get( "np", '/run/ior/client_processes/*') self.mdtest_processes = self.params.get( "np", '/run/mdtest/client_processes/*') self.dcp_processes = self.params.get( "np", "/run/dcp/client_processes/*", 1) self.dsync_processes = self.params.get( "np", "/run/dsync/client_processes/*", 1) self.dserialize_processes = self.params.get( "np", "/run/dserialize/client_processes/*", 1) self.ddeserialize_processes = self.params.get( "np", "/run/ddeserialize/client_processes/*", 1) self.parent = self.params.get("parent", "/run/datamover/*", self.tmp) tool = self.params.get("tool", "/run/datamover/*") if tool: self.set_tool(tool) def pre_tear_down(self): """Tear down steps to run before tearDown(). Returns: list: a list of error strings to report at the end of tearDown(). """ # doesn't append to error list because it reports an error if all # processes completed successfully (nothing to stop), but this call is # necessary in the case that mpi processes are ran across multiple nodes # and a timeout occurs. If this happens then cleanup on shared posix # directories causes errors (because an MPI process might still have it open) error_list = [] if self.job_manager: self.job_manager.kill() # cleanup mounted paths if self.mounted_posix_test_paths: path_list = self._get_posix_test_path_list(path_list=self.mounted_posix_test_paths) for item in path_list: # need to remove contents before umount rm_cmd = "rm -rf {}/*".format(item) try: self._execute_command(rm_cmd) except CommandFailure as error: error_list.append("Error removing directory contents: {}".format(error)) umount_cmd = "sudo umount -f {}".format(item) try: self._execute_command(umount_cmd) except CommandFailure as error: error_list.append("Error umounting posix test directory: {}".format(error)) # cleanup local paths if self.posix_local_test_paths: command = "rm -rf {}".format(self._get_posix_test_path_string()) try: self._execute_command(command) except CommandFailure as error: error_list.append("Error removing created directories: {}".format(error)) # cleanup shared paths (only runs on one node in job) if self.posix_shared_test_paths: shared_path_strs = self._get_posix_test_path_string(path=self.posix_shared_test_paths) command = "rm -rf {}".format(shared_path_strs) try: # only call rm on one client since this is cleaning up shared dir self._execute_command(command, hosts=self.hostlist_clients[0:1]) except CommandFailure as error: error_list.append( "Error removing created directories: {}".format(error)) return error_list def set_api(self, api): """Set the api. Args: api (str): the api to use. """ self.api = api def set_tool(self, tool): """Set the copy tool. Converts to upper-case and fails if the tool is not valid. Args: tool (str): the tool to use. Must be in self.TOOLS """ _tool = str(tool).upper() if _tool in self.TOOLS: self.log.info("DataMover tool = %s", _tool) self.tool = _tool else: self.fail("Invalid tool: {}".format(_tool)) def _get_posix_test_path_list(self, path_list=None): """Get a list of quoted posix test path strings. Returns: list: a list of quoted posix test path strings """ if path_list is None: path_list = self.posix_local_test_paths return ["'{}'".format(item) for item in path_list] def _get_posix_test_path_string(self, path=None): """Get a string of all of the quoted posix test path strings. Returns: str: a string of all of the quoted posix test path strings """ return " ".join(self._get_posix_test_path_list(path_list=path)) def new_posix_test_path(self, shared=False, create=True, parent=None, mount_dir=False): """Generate a new, unique posix path. Args: shared (bool): Whether to create a directory shared across nodes or local. Defaults to False. create (bool): Whether to create the directory. Defaults to True. mount_dir (bool): Whether or not posix directory will be manually mounted in tmpfs. parent (str, optional): The parent directory to create the path in. Defaults to self.parent, which has a default of self.tmp. Returns: str: the posix path. """ # make dirname unique to datamover test method = self.get_test_info()["method"] dir_name = "{}{}".format(method, len(self.posix_local_test_paths)) if parent: path = join(parent, dir_name) else: path = join(self.parent, dir_name) # Add to the list of posix paths if shared: self.posix_shared_test_paths.append(path) else: self.posix_local_test_paths.append(path) if create: # Create the directory cmd = "mkdir -p '{}'".format(path) self.execute_cmd(cmd) # mount small tmpfs filesystem on posix path, using size required sudo # add mount_dir to mounted list for use when umounting if mount_dir: self.mounted_posix_test_paths.append(path) self.execute_cmd("sudo mount -t tmpfs none '{}' -o size=128M".format(path)) return path def new_daos_test_path(self, create=True, cont=None, parent="/"): """Create a new, unique daos container path. Args: create (bool, optional): Whether to create the directory. Defaults to True. cont (TestContainer, optional): The container to create the path within. This container should have a UNS path in DFUSE. parent (str, optional): The parent directory relative to the container root. Defaults to "/". Returns: str: the path relative to the root of the container. """ dir_name = "daos_test{}".format(len(self.daos_test_paths)) path = join(parent, dir_name) # Add to the list of daos paths self.daos_test_paths.append(path) if create: if not cont or not cont.path: self.fail("Container path required to create directory.") # Create the directory relative to the container path cmd = "mkdir -p '{}'".format(cont.path.value + path) self.execute_cmd(cmd) return path def _validate_param_type(self, param_type): """Validates the param_type. It converts param_types to upper-case and handles shorthand types. Args: param_type (str): The param_type to be validated. Returns: str: A valid param_type """ _type = str(param_type).upper() if _type == "DAOS": return "DAOS_UUID" if _type in self.PARAM_TYPES: return _type self.fail("Invalid param_type: {}".format(_type)) return None def create_pool(self): """Create a TestPool object and adds to self.pool. Returns: TestPool: the created pool """ pool = self.get_pool(connect=False) # Save the pool and uuid self.pool.append(pool) self.uuids.append(str(pool.uuid)) return pool def create_cont(self, pool, use_dfuse_uns=False, dfuse_uns_pool=None, dfuse_uns_cont=None, cont_type=None, oclass=None): # pylint: disable=arguments-differ """Create a TestContainer object. Args: pool (TestPool): pool to create the container in. use_dfuse_uns (bool, optional): whether to create a UNS path in the dfuse mount. Default is False. dfuse_uns_pool (TestPool, optional): pool in the dfuse mount for which to create a UNS path. Default assumes dfuse is running for a specific pool. dfuse_uns_cont (TestContainer, optional): container in the dfuse mount for which to create a UNS path. Default assumes dfuse is running for a specific container. cont_type (str, optional): the container type. Returns: TestContainer: the container object Note about uns path: These are only created within a dfuse mount. The full UNS path will be created as: <dfuse.mount_dir>/[pool_uuid]/[cont_uuid]/<dir_name> dfuse_uns_pool and dfuse_uns_cont should only be supplied when dfuse was not started for a specific pool/container. """ container = self.get_container(pool, create=False) if use_dfuse_uns: path = str(self.dfuse.mount_dir.value) if dfuse_uns_pool: path = join(path, dfuse_uns_pool.uuid) if dfuse_uns_cont: path = join(path, dfuse_uns_cont.uuid) path = join(path, "uns{}".format(str(len(self.container)))) container.path.update(path) if cont_type: container.type.update(cont_type) if oclass: container.oclass.update(oclass) # Create container container.create() # Save container and uuid self.container.append(container) self.uuids.append(str(container.uuid)) return container def get_cont(self, pool, cont_uuid): """Get an existing container. Args: pool (TestPool): pool to open the container in. cont_uuid (str): container uuid. Returns: TestContainer: the container object """ # Open the container # Create a TestContainer instance container = TestContainer(pool, daos_command=self.get_daos_command()) # Create the underlying DaosContainer instance container.container = DaosContainer(pool.context) container.container.uuid = str_to_c_uuid(cont_uuid) container.uuid = container.container.get_uuid_str() container.container.poh = pool.pool.handle # Save container and uuid self.container.append(container) self.uuids.append(str(container.uuid)) return container def gen_uuid(self): """Generate a unique uuid. Returns: str: a unique uuid """ new_uuid = str(uuid.uuid4()) while new_uuid in self.uuids: new_uuid = str(uuid.uuid4()) return new_uuid def parse_create_cont_uuid(self, output): """Parse a uuid from some output. Format: Successfully created container (.*-.*-.*-.*-.*) Args: output (str): The string to parse for the uuid. Returns: str: The parsed uuid. """ uuid_search = re.search( r"Successfully created container (.*-.*-.*-.*-.*)", output) if not uuid_search: self.fail("Failed to parse container uuid") return uuid_search.group(1) def dataset_gen(self, cont, num_objs, num_dkeys, num_akeys_single, num_akeys_array, akey_sizes, akey_extents): """Generate a dataset with some number of objects, dkeys, and akeys. Expects the container to be created with the API control method. Args: cont (TestContainer): the container. num_objs (int): number of objects to create in the container. num_dkeys (int): number of dkeys to create per object. num_akeys_single (int): number of DAOS_IOD_SINGLE akeys per dkey. num_akeys_array (int): number of DAOS_IOD_ARRAY akeys per dkey. akey_sizes (list): varying akey sizes to iterate. akey_extents (list): varying number of akey extents to iterate. Returns: list: a list of DaosObj created. """ self.log.info("Creating dataset in %s/%s", str(cont.pool.uuid), str(cont.uuid)) cont.open() obj_list = [] for obj_idx in range(num_objs): # Open the obj obj = DaosObj(cont.pool.context, cont.container) obj_list.append(obj) obj.create(rank=obj_idx, objcls=2) obj.open() ioreq = IORequest(cont.pool.context, cont.container, obj) for dkey_idx in range(num_dkeys): dkey = "dkey {}".format(dkey_idx) c_dkey = create_string_buffer(dkey) for akey_idx in range(num_akeys_single): # Round-robin to get the size of data and # arbitrarily use a number 0-9 to fill data akey_size_idx = akey_idx % len(akey_sizes) data_size = akey_sizes[akey_size_idx] data_val = str(akey_idx % 10) data = data_size * data_val akey = "akey single {}".format(akey_idx) c_akey = create_string_buffer(akey) c_data = create_string_buffer(data) c_size = ctypes.c_size_t(ctypes.sizeof(c_data)) ioreq.single_insert(c_dkey, c_akey, c_data, c_size) for akey_idx in range(num_akeys_array): # Round-robin to get the size of data and # the number of extents, and # arbitrarily use a number 0-9 to fill data akey_size_idx = akey_idx % len(akey_sizes) data_size = akey_sizes[akey_size_idx] akey_extent_idx = akey_idx % len(akey_extents) num_extents = akey_extents[akey_extent_idx] akey = "akey array {}".format(akey_idx) c_akey = create_string_buffer(akey) c_data = [] for data_idx in range(num_extents): data_val = str(data_idx % 10) data = data_size * data_val c_data.append([ create_string_buffer(data), data_size]) ioreq.insert_array(c_dkey, c_akey, c_data) obj.close() cont.close() return obj_list # pylint: disable=too-many-locals def dataset_verify(self, obj_list, cont, num_objs, num_dkeys, num_akeys_single, num_akeys_array, akey_sizes, akey_extents): """Verify a dataset generated with dataset_gen. Args: obj_list (list): obj_list returned from dataset_gen. cont (TestContainer): the container. num_objs (int): number of objects created in the container. num_dkeys (int): number of dkeys created per object. num_akeys_single (int): number of DAOS_IOD_SINGLE akeys per dkey. num_akeys_array (int): number of DAOS_IOD_ARRAY akeys per dkey. akey_sizes (list): varying akey sizes to iterate. akey_extents (list): varying number of akey extents to iterate. """ self.log.info("Verifying dataset in %s/%s", str(cont.pool.uuid), str(cont.uuid)) cont.open() for obj_idx in range(num_objs): # Open the obj c_oid = obj_list[obj_idx].c_oid obj = DaosObj(cont.pool.context, cont.container, c_oid=c_oid) obj.open() ioreq = IORequest(cont.pool.context, cont.container, obj) for dkey_idx in range(num_dkeys): dkey = "dkey {}".format(dkey_idx) c_dkey = create_string_buffer(dkey) for akey_idx in range(num_akeys_single): # Round-robin to get the size of data and # arbitrarily use a number 0-9 to fill data akey_size_idx = akey_idx % len(akey_sizes) data_size = akey_sizes[akey_size_idx] data_val = str(akey_idx % 10) data = data_size * data_val akey = "akey single {}".format(akey_idx) c_akey = create_string_buffer(akey) c_data = ioreq.single_fetch(c_dkey, c_akey, data_size + 1) actual_data = str(c_data.value.decode()) if actual_data != data: self.log.info("Expected:\n%s\nBut got:\n%s", data[:100] + "...", actual_data[:100] + "...") self.log.info( "For:\nobj: %s.%s\ndkey: %s\nakey: %s", str(obj.c_oid.hi), str(obj.c_oid.lo), dkey, akey) self.fail("Single value verification failed.") for akey_idx in range(num_akeys_array): # Round-robin to get the size of data and # the number of extents, and # arbitrarily use a number 0-9 to fill data akey_size_idx = akey_idx % len(akey_sizes) data_size = akey_sizes[akey_size_idx] akey_extent_idx = akey_idx % len(akey_extents) num_extents = akey_extents[akey_extent_idx] akey = "akey array {}".format(akey_idx) c_akey = create_string_buffer(akey) c_num_extents = ctypes.c_uint(num_extents) c_data_size = ctypes.c_size_t(data_size) actual_data = ioreq.fetch_array(c_dkey, c_akey, c_num_extents, c_data_size) for data_idx in range(num_extents): data_val = str(data_idx % 10) data = data_size * data_val actual_idx = str(actual_data[data_idx].decode()) if data != actual_idx: self.log.info("Expected:\n%s\nBut got:\n%s", data[:100] + "...", actual_idx + "...") self.log.info( "For:\nobj: %s.%s\ndkey: %s\nakey: %s", str(obj.c_oid.hi), str(obj.c_oid.lo), dkey, akey) self.fail("Array verification failed.") obj.close() cont.close() def set_datamover_params(self, src_type=None, src_path=None, src_pool=None, src_cont=None, dst_type=None, dst_path=None, dst_pool=None, dst_cont=None): """Set the params for self.tool. Called by run_datamover if params are passed. Args: src_type (str): how to interpret the src params. Must be in PARAM_TYPES. src_path (str): source cont path or posix path. src_pool (TestPool, optional): the source pool or uuid. src_cont (TestContainer, optional): the source cont or uuid. dst_type (str): how to interpret the dst params. Must be in PARAM_TYPES. dst_path (str): destination cont path or posix path. dst_pool (TestPool, optional): the destination pool or uuid. dst_cont (TestContainer, optional): the destination cont or uuid. """ if self.tool == "DCP": self._set_dcp_params(src_type, src_path, src_pool, src_cont, dst_type, dst_path, dst_pool, dst_cont) elif self.tool == "DSYNC": self._set_dsync_params(src_type, src_path, src_pool, src_cont, dst_type, dst_path, dst_pool, dst_cont) elif self.tool == "DSERIAL": assert src_type in (None, "DAOS", "DAOS_UUID") #nosec assert src_path is None #nosec assert dst_type in (None, "DAOS", "DAOS_UUID") #nosec assert dst_path is None #nosec assert dst_cont is None #nosec self._set_dserial_params(src_pool, src_cont, dst_pool) elif self.tool == "FS_COPY": self._set_fs_copy_params(src_type, src_path, src_pool, src_cont, dst_type, dst_path, dst_pool, dst_cont) elif self.tool == "CONT_CLONE": assert src_type in (None, "DAOS", "DAOS_UUID") # nosec assert src_path is None # nosec assert dst_type in (None, "DAOS", "DAOS_UUID") # nosec assert dst_path is None # nosec self._set_cont_clone_params(src_pool, src_cont, dst_pool, dst_cont) else: self.fail("Invalid tool: {}".format(str(self.tool))) def _set_dcp_params(self, src_type=None, src_path=None, src_pool=None, src_cont=None, dst_type=None, dst_path=None, dst_pool=None, dst_cont=None): """Set the params for dcp. This is a wrapper for DcpCommand.set_params. When both src_type and dst_type are DAOS_UNS, a prefix will only work for either the src or the dst, but not both. Args: src_type (str): how to interpret the src params. Must be in PARAM_TYPES. src_path (str): source cont path or posix path. src_pool (TestPool, optional): the source pool or uuid. src_cont (TestContainer, optional): the source cont or uuid. dst_type (str): how to interpret the dst params. Must be in PARAM_TYPES. dst_path (str): destination cont path or posix path. dst_pool (TestPool, optional): the destination pool or uuid. dst_cont (TestContainer, optional): the destination cont or uuid. """ if src_type is not None: src_type = self._validate_param_type(src_type) if dst_type is not None: dst_type = self._validate_param_type(dst_type) if not src_type and (src_path or src_pool or src_cont): self.fail("src params require src_type") if not dst_type and (dst_path or dst_pool or dst_cont): self.fail("dst params require dst_type") # First, initialize a new dcp command self.dcp_cmd = DcpCommand(self.hostlist_clients, self.workdir) self.dcp_cmd.get_params(self) if self.api: self.dcp_cmd.set_params(daos_api=self.api) # Set the source params if src_type == "POSIX": self.dcp_cmd.set_params( src_path=str(src_path)) elif src_type == "DAOS_UUID": self.dcp_cmd.set_params( src_path=format_daos_path(src_pool, src_cont, src_path)) elif src_type == "DAOS_UNS": if src_cont: if src_path == "/": self.dcp_cmd.set_params( src_path=src_cont.path.value) else: self.dcp_cmd.set_params( daos_prefix=src_cont.path.value, src_path=src_cont.path.value + src_path) # Set the destination params if dst_type == "POSIX": self.dcp_cmd.set_params( dst_path=str(dst_path)) elif dst_type == "DAOS_UUID": self.dcp_cmd.set_params( dst_path=format_daos_path(dst_pool, dst_cont, dst_path)) elif dst_type == "DAOS_UNS": if dst_cont: if dst_path == "/": self.dcp_cmd.set_params( dst_path=dst_cont.path.value) else: self.dcp_cmd.set_params( daos_prefix=dst_cont.path.value, dst_path=dst_cont.path.value + dst_path) def _set_dsync_params(self, src_type=None, src_path=None, src_pool=None, src_cont=None, dst_type=None, dst_path=None, dst_pool=None, dst_cont=None): """Set the params for dsync. This is a wrapper for DsyncCommand.set_params. When both src_type and dst_type are DAOS_UNS, a prefix will only work for either the src or the dst, but not both. Args: src_type (str): how to interpret the src params. Must be in PARAM_TYPES. src_path (str): source cont path or posix path. src_pool (TestPool, optional): the source pool or uuid. src_cont (TestContainer, optional): the source cont or uuid. dst_type (str): how to interpret the dst params. Must be in PARAM_TYPES. dst_path (str): destination cont path or posix path. dst_pool (TestPool, optional): the destination pool or uuid. dst_cont (TestContainer, optional): the destination cont or uuid. """ # First, initialize a new dsync command self.dsync_cmd = DsyncCommand(self.hostlist_clients, self.workdir) self.dsync_cmd.get_params(self) if self.api: self.dcp_cmd.set_params(daos_api=self.api) # Set the source params if src_type == "POSIX": self.dsync_cmd.set_params( src_path=str(src_path)) elif src_type == "DAOS_UUID": self.dsync_cmd.set_params( src_path=format_daos_path(src_pool, src_cont, src_path)) elif src_type == "DAOS_UNS": if src_cont: if src_path == "/": self.dsync_cmd.set_params( src_path=src_cont.path.value) else: self.dsync_cmd.set_params( daos_prefix=src_cont.path.value, src_path=src_cont.path.value + src_path) # Set the destination params if dst_type == "POSIX": self.dsync_cmd.set_params( dst_path=str(dst_path)) elif dst_type == "DAOS_UUID": self.dsync_cmd.set_params( dst_path=format_daos_path(dst_pool, dst_cont, dst_path)) elif dst_type == "DAOS_UNS": if dst_cont: if dst_path == "/": self.dsync_cmd.set_params( dst_path=dst_cont.path.value) else: self.dsync_cmd.set_params( daos_prefix=dst_cont.path.value, dst_path=dst_cont.path.value + dst_path) def _set_fs_copy_params(self, src_type=None, src_path=None, src_pool=None, src_cont=None, dst_type=None, dst_path=None, dst_pool=None, dst_cont=None): """Set the params for fs copy. daos fs copy does not support a "prefix" on UNS paths, so the param type for DAOS_UNS must have the path "/". Args: src_type (str): how to interpret the src params. Must be in PARAM_TYPES. src_path (str): source cont path or posix path. src_pool (TestPool, optional): the source pool or uuid. src_cont (TestContainer, optional): the source cont or uuid. dst_type (str): how to interpret the dst params. Must be in PARAM_TYPES. dst_path (str): destination cont path or posix path. dst_pool (TestPool, optional): the destination pool or uuid. dst_cont (TestContainer, optional): the destination cont or uuid. """ if src_type is not None: src_type = self._validate_param_type(src_type) if dst_type is not None: dst_type = self._validate_param_type(dst_type) if not src_type and (src_path or src_pool or src_cont): self.fail("src params require src_type") if not dst_type and (dst_path or dst_pool or dst_cont): self.fail("dst params require dst_type") # First, initialize a new fs copy command self.fs_copy_cmd = FsCopy(self.daos_cmd, self.log) # set preserve-props path if it was used in test case if self.preserve_props_path: self.fs_copy_cmd.set_fs_copy_params(preserve_props=self.preserve_props_path) # Set the source params if src_type == "POSIX": self.fs_copy_cmd.set_fs_copy_params( src=str(src_path)) elif src_type == "DAOS_UUID": self.fs_copy_cmd.set_fs_copy_params( src=format_daos_path(src_pool, src_cont, src_path)) elif src_type == "DAOS_UNS": path = "" if src_cont: if src_path == "/": path = str(src_cont.path) else: self.fail("daos fs copy does not support a prefix") self.fs_copy_cmd.set_fs_copy_params( src=path) # Set the destination params if dst_type == "POSIX": self.fs_copy_cmd.set_fs_copy_params( dst=str(dst_path)) elif dst_type == "DAOS_UUID": self.fs_copy_cmd.set_fs_copy_params( dst=format_daos_path(dst_pool, dst_cont, dst_path)) elif dst_type == "DAOS_UNS": path = "" if dst_cont: if dst_path == "/": path = str(dst_cont.path) else: self.fail("daos fs copy does not support a prefix") self.fs_copy_cmd.set_fs_copy_params( dst=path) def _set_cont_clone_params(self, src_pool=None, src_cont=None, dst_pool=None, dst_cont=None): """Set the params for daos cont clone. This only supports DAOS -> DAOS copies. Args: src_pool (TestPool, optional): the source pool or uuid. src_cont (TestContainer, optional): the source cont or uuid. dst_pool (TestPool, optional): the destination pool or uuid. dst_cont (TestContainer, optional): the destination cont or uuid. """ # First, initialize a new cont copy command self.cont_clone_cmd = ContClone(self.daos_cmd, self.log) # Set the source params if src_pool or src_cont: self.cont_clone_cmd.set_cont_clone_params( src=format_daos_path(src_pool, src_cont)) # Set the destination params if dst_pool or dst_cont: self.cont_clone_cmd.set_cont_clone_params( dst=format_daos_path(dst_pool, dst_cont)) def _set_dserial_params(self, src_pool=None, src_cont=None, dst_pool=None): """Set the params for daos-serialize and daos-deserialize. This uses a temporary POSIX path as the intermediate step between serializing and deserializing. Args: src_pool (TestPool, optional): the source pool or uuid. src_cont (TestContainer, optional): the source cont or uuid. dst_pool (TestPool, optional): the destination pool or uuid. """ # First initialize new commands self.dserialize_cmd = DserializeCommand(self.hostlist_clients, self.workdir) self.ddeserialize_cmd = DdeserializeCommand(self.hostlist_clients, self.workdir) # Get an intermediate path for HDF5 file(s) tmp_path = self.new_posix_test_path(create=False, parent=self.serial_tmp_dir) # Set the source params for dserialize if src_pool or src_cont: self.dserialize_cmd.set_params( src_path=format_daos_path(src_pool, src_cont), output_path=tmp_path) # Set the destination params for ddeserialize if dst_pool: self.ddeserialize_cmd.set_params( src_path=tmp_path, pool=uuid_from_obj(dst_pool)) def set_ior_params(self, param_type, path, pool=None, cont=None, path_suffix=None, flags=None, display=True): """Set the ior params. Args: param_type (str): how to interpret the params. path (str): cont path or posix path. pool (TestPool, optional): the pool object cont (TestContainer, optional): the cont or uuid. path_suffix (str, optional): suffix to append to the path. E.g. path="/some/path", path_suffix="testFile" flags (str, optional): ior_cmd flags to set display (bool, optional): print updated params. Defaults to True. """ param_type = self._validate_param_type(param_type) # Reset params self.ior_cmd.api.update(None) self.ior_cmd.test_file.update(None) self.ior_cmd.dfs_pool.update(None) self.ior_cmd.dfs_cont.update(None) self.ior_cmd.dfs_group.update(None) if flags: self.ior_cmd.flags.update(flags, "flags" if display else None) display_api = "api" if display else None display_test_file = "test_file" if display else None # Allow cont to be either the container or the uuid cont_uuid = uuid_from_obj(cont) # Optionally append suffix if path_suffix: if path_suffix[0] == "/": path_suffix = path_suffix[1:] path = join(path, path_suffix) if param_type == "POSIX": self.ior_cmd.api.update("POSIX", display_api) self.ior_cmd.test_file.update(path, display_test_file) elif param_type in ("DAOS_UUID", "DAOS_UNS"): self.ior_cmd.api.update("DFS", display_api) self.ior_cmd.test_file.update(path, display_test_file) if pool and cont_uuid: self.ior_cmd.set_daos_params(self.server_group, pool, cont_uuid) elif pool: self.ior_cmd.set_daos_params(self.server_group, pool, None) def run_ior_with_params(self, param_type, path, pool=None, cont=None, path_suffix=None, flags=None, display=True, display_space=False): """Set the ior params and run ior. Args: param_type: see set_ior_params path: see set_ior_params pool: see set_ior_params cont: see set_ior_params path_suffix: see set_ior_params flags: see set_ior_params display (bool, optional): print updated params. Defaults to True. display_space (bool, optional): whether to display the pool space. Defaults to False. """ self.set_ior_params(param_type, path, pool, cont, path_suffix, flags, display) self.run_ior(self.get_ior_job_manager_command(), self.ior_processes, display_space=(display_space and bool(pool)), pool=pool) def set_mdtest_params(self, param_type, path, pool=None, cont=None, flags=None, display=True): """Set the mdtest params. Args: param_type (str): how to interpret the params. path (str): cont path or posix path. pool (TestPool, optional): the pool object. cont (TestContainer, optional): the cont or uuid. flags (str, optional): mdtest_cmd flags to set display (bool, optional): print updated params. Defaults to True. """ param_type = self._validate_param_type(param_type) # Reset params self.mdtest_cmd.api.update(None) self.mdtest_cmd.test_dir.update(None) self.mdtest_cmd.dfs_pool_uuid.update(None) self.mdtest_cmd.dfs_cont.update(None) self.mdtest_cmd.dfs_group.update(None) if flags: self.mdtest_cmd.flags.update(flags, "flags" if display else None) display_api = "api" if display else None display_test_dir = "test_dir" if display else None # Allow cont to be either the container or the uuid cont_uuid = uuid_from_obj(cont) if param_type == "POSIX": self.mdtest_cmd.api.update("POSIX", display_api) self.mdtest_cmd.test_dir.update(path, display_test_dir) elif param_type in ("DAOS_UUID", "DAOS_UNS"): self.mdtest_cmd.api.update("DFS", display_api) self.mdtest_cmd.test_dir.update(path, display_test_dir) if pool and cont_uuid: self.mdtest_cmd.set_daos_params(self.server_group, pool, cont_uuid) elif pool: self.mdtest_cmd.set_daos_params(self.server_group, pool, None) def run_mdtest_with_params(self, param_type, path, pool=None, cont=None, flags=None, display=True): """Set the mdtest params and run mdtest. Args: param_type: see set_ior_params path: see set_mdtest_params pool: see set_mdtest_params cont: see set_mdtest_params flags see set_mdtest_params display (bool, optional): print updated params. Defaults to True. """ self.set_mdtest_params(param_type, path, pool, cont, flags, display) self.run_mdtest(self.get_mdtest_job_manager_command(self.manager), self.mdtest_processes, display_space=(bool(pool)), pool=pool) def run_diff(self, src, dst, deref=False): """Run linux diff command. Args: src (str): the source path dst (str): the destination path deref (bool, optional): Whether to dereference symlinks. Defaults to False. """ deref_str = "" if not deref: deref_str = "--no-dereference" cmd = "diff -r {} '{}' '{}'".format( deref_str, src, dst) self.execute_cmd(cmd) # pylint: disable=too-many-arguments def run_datamover(self, test_desc=None, src_type=None, src_path=None, src_pool=None, src_cont=None, dst_type=None, dst_path=None, dst_pool=None, dst_cont=None, expected_rc=0, expected_output=None, expected_err=None, processes=None): """Run the corresponding command specified by self.tool. Calls set_datamover_params if and only if any are passed in. Args: test_desc (str, optional): description to print before running src_type: see set_datamover_params src_path: see set_datamover_params src_pool: see set_datamover_params src_cont: see set_datamover_params dst_type: see set_datamover_params dst_path: see set_datamover_params dst_cont: see set_datamover_params expected_rc (int, optional): rc expected to be returned expected_output (list, optional): substrings expected in stdout expected_err (list, optional): substrings expected in stderr processes (int, optional): number of mpi processes. defaults to self.dcp_processes Returns: The result "run" object """ self.num_run_datamover += 1 self.log.info("run_datamover called %s times", str(self.num_run_datamover)) # Set the params if and only if any were passed in have_src_params = (src_type or src_path or src_pool or src_cont) have_dst_params = (dst_type or dst_path or dst_pool or dst_cont) if have_src_params or have_dst_params: self.set_datamover_params( src_type, src_path, src_pool, src_cont, dst_type, dst_path, dst_pool, dst_cont) # Default expected_output and expected_err to empty lists if not expected_output: expected_output = [] if not expected_err: expected_err = [] # Convert singular value to list if not isinstance(expected_output, list): expected_output = [expected_output] if not isinstance(expected_err, list): expected_err = [expected_err] if test_desc is not None: self.log.info("Running %s: %s", self.tool, test_desc) try: if self.tool == "DCP": if not processes: processes = self.dcp_processes # If we expect an rc other than 0, don't fail self.dcp_cmd.exit_status_exception = (expected_rc == 0) result = self.dcp_cmd.run(processes, self.job_manager) elif self.tool == "DSYNC": if not processes: processes = self.dsync_processes # If we expect an rc other than 0, don't fail self.dsync_cmd.exit_status_exception = (expected_rc == 0) result = self.dsync_cmd.run(processes, self.job_manager) elif self.tool == "DSERIAL": if processes: processes1 = processes2 = processes else: processes1 = self.dserialize_processes processes2 = self.ddeserialize_processes result = self.dserialize_cmd.run(processes1, self.job_manager) result = self.ddeserialize_cmd.run(processes2, self.job_manager) elif self.tool == "FS_COPY": result = self.fs_copy_cmd.run() elif self.tool == "CONT_CLONE": result = self.cont_clone_cmd.run() else: self.fail("Invalid tool: {}".format(str(self.tool))) except CommandFailure as error: self.log.error("%s command failed: %s", str(self.tool), str(error)) self.fail("Test was expected to pass but it failed: {}\n".format( test_desc)) # Check the return code actual_rc = result.exit_status if actual_rc != expected_rc: self.fail("Expected (rc={}) but got (rc={}): {}\n".format( expected_rc, actual_rc, test_desc)) # Check for expected output for s in expected_output: if s not in result.stdout_text: self.fail("stdout expected {}: {}".format(s, test_desc)) for s in expected_err: if s not in result.stderr_text: self.fail("stderr expected {}: {}".format(s, test_desc)) return result def run_dm_activities_with_ior(self, tool, create_dataset=False, pool=None, cont=None): """Generic method to perform varios datamover activities using ior Args: tool(str): specify the tool name to be used create_dataset(bool): boolean to create initial set of data using ior. Defaults to False. pool(TestPool): Pool object. Defaults to None cont(TestContainer): Container object. Defaults to None """ # Set the tool to use self.set_tool(tool) if create_dataset: # create initial datasets if not pool: pool = self.create_pool() cont = self.create_cont(pool, oclass=self.ior_cmd.dfs_oclass.value) # update and run ior on container 1 self.run_ior_with_params( "DAOS", self.ior_cmd.test_file.value, pool, cont) else: if not pool: pool = self.pool[0] if not cont: cont = self.container[-1] # create cont2 cont2 = self.create_cont(pool, oclass=self.ior_cmd.dfs_oclass.value) # perform various datamover activities if tool == 'CONT_CLONE': read_back_cont = self.gen_uuid() self.run_datamover( self.test_id + " (cont to cont2)", "DAOS", None, pool, cont, "DAOS", None, pool, read_back_cont) read_back_pool = pool elif tool == 'DSERIAL': # Create pool2 pool2 = self.get_pool() # Use dfuse as a shared intermediate for serialize + deserialize dfuse_cont = self.create_cont(pool, oclass=self.ior_cmd.dfs_oclass.value) self.start_dfuse(self.dfuse_hosts, pool, dfuse_cont) self.serial_tmp_dir = self.dfuse.mount_dir.value # Serialize/Deserialize container 1 to a new cont2 in pool2 result = self.run_datamover( self.test_id + " (cont->HDF5->cont2)", "DAOS_UUID", None, pool, cont, "DAOS_UUID", None, pool2, None) # Get the destination cont2 uuid read_back_cont = self.parse_create_cont_uuid(result.stdout_text) read_back_pool = pool2 elif tool in ['FS_COPY', 'DCP']: # copy from daos cont to cont2 self.run_datamover( self.test_id + " (cont to cont2)", "DAOS", "/", pool, cont, "DAOS", "/", pool, cont2) else: self.fail("Invalid tool: {}".format(tool)) # move data from daos to posix FS and vice versa if tool in ['FS_COPY', 'DCP']: posix_path = self.new_posix_test_path(shared=True) # copy from daos cont2 to posix file system self.run_datamover( self.test_id + " (cont2 to posix)", "DAOS", "/", pool, cont2, "POSIX", posix_path) # create cont3 cont3 = self.create_cont(pool, oclass=self.ior_cmd.dfs_oclass.value) # copy from posix file system to daos cont3 self.run_datamover( self.test_id + " (posix to cont3)", "POSIX", posix_path, None, None, "DAOS", "/", pool, cont3) read_back_cont = cont3 read_back_pool = pool # the result is that a NEW directory is created in the destination if tool == 'FS_COPY': daos_path = "/" + os.path.basename(posix_path) + self.ior_cmd.test_file.value else: daos_path = self.ior_cmd.test_file.value # update ior params, read back and verify data from cont3 self.run_ior_with_params( "DAOS", daos_path, read_back_pool, read_back_cont, flags="-r -R -F -k")
[ 2, 48443, 14629, 14, 8800, 14, 29412, 198, 37811, 198, 7, 34, 8, 15069, 2864, 12, 1238, 2481, 8180, 10501, 13, 198, 198, 4303, 36227, 12, 34156, 12, 33234, 7483, 25, 347, 10305, 12, 17, 12, 2601, 682, 12, 12130, 298, 198, 37811, 1...
1.981724
26,045
#!/usr/bin/env python3 """ Name: jranderson Date: 03.17.19 Purpose: SwissProt to FASTA """ import argparse import os import re import sys from Bio import SeqIO # -------------------------------------------------------- def get_args(): """get command-line arguments""" parser = argparse.ArgumentParser( description='Filter Swissprot file for keywords, taxa', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( 'unifile', help='Uniprot file', metavar='FILE') parser.add_argument( '-s', '--skip', help='Skip taxa', nargs='+', metavar='STR', type=str, default='') parser.add_argument( '-k', '--keyword', help='Take on keyword', metavar='STR', required=True, default=None) parser.add_argument( '-o', '--output', help='Output filename', metavar='FILE', default='out.fa') return parser.parse_args() #--------------------------------------------------------- def warn(msg): """Print a message to STDERR""" print(msg, file=sys.stderr) #--------------------------------------------------------- def die(msg='Something bad happened'): """warn() and exit with error""" warn(msg) sys.exit(1) #--------------------------------------------------------- def main(): """Let's get ready to code!""" args = get_args() unifile = args.unifile skip_taxa = list(map(lambda x:x.lower(), sorted(args.skip))) keyword = args.keyword.lower() output = args.output if not os.path.isfile(unifile): die('"{}" is not a file'.format(unifile)) if output != 'out.fa': output = output match_list = [] rec_ctr = 0; took_ctr = 0 print('Processing "{}"'.format(unifile)) uni_fh = open(unifile, 'r') for i, unirecord in enumerate(SeqIO.parse(uni_fh, 'swiss'), start=1): kw_list = list(map(lambda x:x.lower(), sorted(unirecord.annotations['keywords']))) taxonomy = list(map(lambda x:x.lower(), sorted(unirecord.annotations['taxonomy']))) rec_ctr += 1 for kw in kw_list: match = re.match((keyword), kw) if match: if set(skip_taxa).intersection(set(taxonomy)) == set(): took_ctr += 1 match_list.append(unirecord) out_fh = open(output, 'w') SeqIO.write(match_list, out_fh, 'fasta') num_skipped = rec_ctr - took_ctr print('Done, skipped {} and took {}. See output in "{}".'.format(num_skipped, took_ctr, output)) #--------------------------------------------------------- if __name__ == '__main__': main()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 37811, 198, 5376, 25, 474, 25192, 882, 198, 10430, 25, 7643, 13, 1558, 13, 1129, 198, 30026, 3455, 25, 14780, 19703, 284, 376, 1921, 5603, 220, 198, 37811, 198, 198, 11748, 1822, ...
2.442029
1,104
import os INPUT_FILE = os.path.join(os.path.dirname(os.path.realpath(__file__)), "input.txt") if __name__ == '__main__': for row in get_input(INPUT_FILE): print(row)
[ 11748, 28686, 198, 198, 1268, 30076, 62, 25664, 796, 28686, 13, 6978, 13, 22179, 7, 418, 13, 6978, 13, 15908, 3672, 7, 418, 13, 6978, 13, 5305, 6978, 7, 834, 7753, 834, 36911, 366, 15414, 13, 14116, 4943, 198, 198, 361, 11593, 3672,...
2.278481
79
from django.db import models from django.contrib.auth.models import User
[ 6738, 42625, 14208, 13, 9945, 1330, 4981, 198, 6738, 42625, 14208, 13, 3642, 822, 13, 18439, 13, 27530, 1330, 11787, 628 ]
3.52381
21
"""Generate test data for users.""" import factory from tdpservice.stts.test.factories import STTFactory class BaseUserFactory(factory.django.DjangoModelFactory): """Generate test data for users.""" class Meta: """Hardcoded metata data for users.""" model = "users.User" django_get_or_create = ("username",) id = factory.Faker("uuid4") username = factory.Sequence(lambda n: "testuser%d" % n) password = "test_password" # Static password so we can login. email = factory.Faker("email") first_name = factory.Faker("first_name") last_name = factory.Faker("last_name") is_active = True is_staff = False is_superuser = False stt = factory.SubFactory(STTFactory) login_gov_uuid = factory.Faker("uuid4") deactivated = False @classmethod @factory.post_generation def groups(self, create, extracted, **kwargs): """Add groups to user instance.""" if not create: return if extracted: for group in extracted: self.groups.add(group) class UserFactory(BaseUserFactory): """General purpose user factory used through out most tests.""" stt = factory.SubFactory(STTFactory) class STTUserFactory(BaseUserFactory): """User factory for use in STT tests.""" # To prevent an error that happens when calling the `populate_stt` command. # The stt factory and the command were competing over the right to set the stt. # Our solution was to not set the STT specifically for the STT tests that # were calling the `populate_stt` command. stt = None class AdminSTTUserFactory(STTUserFactory): """Generate an admin user who has no stt assigned.""" is_staff = True is_superuser = True class AdminUserFactory(UserFactory): """Generate Admin User.""" is_staff = True is_superuser = True class StaffUserFactory(UserFactory): """Generate Staff User.""" is_staff = True class InactiveUserFactory(UserFactory): """Generate inactive user, from Django's context.""" is_active = False class DeactivatedUserFactory(UserFactory): """Generate user with account deemed `inactive`.""" deactivated = True
[ 37811, 8645, 378, 1332, 1366, 329, 2985, 526, 15931, 198, 198, 11748, 8860, 198, 198, 6738, 41560, 862, 712, 501, 13, 301, 912, 13, 9288, 13, 22584, 1749, 1330, 3563, 10234, 9548, 628, 198, 4871, 7308, 12982, 22810, 7, 69, 9548, 13, ...
2.835669
785
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT # All rights reserved. This work is under a BSD license, see LICENSE.TXT. import sys import time import subprocess from jenkinsflow.test.cfg import ApiType from jenkinsflow.test.framework import api_select from jenkinsflow.test.framework.logger import log, logt if __name__ == '__main__': job_name = sys.argv[4] with open(job_name, 'a+') as log_file: _abort(log_file, sys.argv[1], ApiType[sys.argv[2]], sys.argv[3], job_name, int(sys.argv[5])) def abort(api, job_name, sleep_time): """Call this script as a subprocess""" if api.api_type == ApiType.MOCK: return ff = __file__.replace('.pyc', '.py') args = [sys.executable, ff, api.file_name, api.api_type.name, api.func_name.replace('test_', ''), job_name, str(sleep_time)] with open(job_name, 'w') as log_file: logt(log_file, "Invoking abort subprocess.", args) subprocess.Popen(args)
[ 2, 15069, 357, 66, 8, 2321, 532, 1853, 31239, 367, 929, 16265, 83, 31154, 11, 367, 929, 16265, 83, 7283, 198, 2, 1439, 2489, 10395, 13, 770, 670, 318, 739, 257, 347, 10305, 5964, 11, 766, 38559, 24290, 13, 51, 25010, 13, 198, 198,...
2.498701
385
"""Parquet file format functions""" import os import pandas from typing import Dict, List from tempfile import mkstemp import target_snowflake.flattening as flattening def create_copy_sql(table_name: str, stage_name: str, s3_key: str, file_format_name: str, columns: List): """Generate a Parquet compatible snowflake COPY INTO command""" return "COPY INTO {} ({}) " \ "FROM (SELECT {} FROM '@{}/{}') " \ "FILE_FORMAT = (format_name='{}')".format( table_name, ', '.join([c['name'] for c in columns]), ', '.join(["{}($1:{}) {}".format(c['trans'], c['json_element_name'], c['name']) for i, c in enumerate(columns)]), stage_name, s3_key, file_format_name) def create_merge_sql(table_name: str, stage_name: str, s3_key: str, file_format_name: str, columns: List, pk_merge_condition: str) -> str: """Generate a Parquet compatible snowflake MERGE INTO command""" return "MERGE INTO {} t USING (" \ "SELECT {} " \ "FROM '@{}/{}' " \ "(FILE_FORMAT => '{}')) s " \ "ON {} " \ "WHEN MATCHED THEN UPDATE SET {} " \ "WHEN NOT MATCHED THEN " \ "INSERT ({}) " \ "VALUES ({})".format( table_name, ', '.join(["{}($1:{}) {}".format(c['trans'], c['json_element_name'], c['name']) for i, c in enumerate(columns)]), stage_name, s3_key, file_format_name, pk_merge_condition, ', '.join(['{0}=s.{0}'.format(c['name']) for c in columns]), ', '.join([c['name'] for c in columns]), ', '.join(['s.{}'.format(c['name']) for c in columns])) def records_to_dataframe(records: Dict, schema: Dict, data_flattening_max_level: int = 0) -> pandas.DataFrame: """ Transforms a list of record messages into pandas dataframe with flattened records Args: records: List of dictionaries that represents a batch of singer record messages data_flattening_max_level: Max level of auto flattening if a record message has nested objects. (Default: 0) Returns: Pandas dataframe """ flattened_records = [] for record in records.values(): flatten_record = flattening.flatten_record(record, schema, max_level=data_flattening_max_level) flattened_records.append(flatten_record) return pandas.DataFrame(data=flattened_records) def records_to_file(records: Dict, schema: Dict, suffix: str = 'parquet', prefix: str = 'batch_', compression: bool = False, dest_dir: str = None, data_flattening_max_level: int = 0): """ Transforms a list of dictionaries with records messages to a parquet file Args: records: List of dictionaries that represents a batch of singer record messages schema: JSONSchema of the records suffix: Generated filename suffix prefix: Generated filename prefix compression: Gzip compression enabled or not (Default: False) dest_dir: Directory where the parquet file will be generated. (Default: OS specificy temp directory) data_flattening_max_level: Max level of auto flattening if a record message has nested objects. (Default: 0) Returns: Absolute path of the generated parquet file """ if dest_dir: os.makedirs(dest_dir, exist_ok=True) if compression: file_suffix = f'.{suffix}.gz' parquet_compression='gzip' else: file_suffix = f'.{suffix}' parquet_compression = None filename = mkstemp(suffix=file_suffix, prefix=prefix, dir=dest_dir)[1] dataframe = records_to_dataframe(records, schema, data_flattening_max_level) dataframe.to_parquet(filename, compression=parquet_compression) return filename
[ 37811, 10044, 21108, 2393, 5794, 5499, 37811, 198, 11748, 28686, 198, 11748, 19798, 292, 198, 198, 6738, 19720, 1330, 360, 713, 11, 7343, 198, 6738, 20218, 7753, 1330, 33480, 927, 79, 198, 198, 11748, 2496, 62, 82, 2197, 47597, 13, 2704...
2.098345
2,054
# if x > 0 : # r = int(str(x)[::-1]) # # if r ==x: # return True # else: # return False # # else: # return False
[ 198, 220, 220, 220, 220, 220, 220, 220, 1303, 611, 2124, 1875, 657, 1058, 198, 220, 220, 220, 220, 220, 220, 220, 1303, 220, 220, 220, 220, 374, 796, 493, 7, 2536, 7, 87, 38381, 3712, 12, 16, 12962, 198, 220, 220, 220, 220, 220,...
1.435583
163
# gdraw.py """ Copyright 2021 Cesar O. Aguilar Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import matplotlib.pyplot as plt from math import cos, sin, pi import copy #%% Graph class class Graph: """ A simple graph class. Attributes ---------- vertices : dictionary the keys are the vertices and the values are the positions where each vertex is to be drawn, each position is a two-element list of (x, y) coordinates of the vertex position, the keys can be strings, integers, or anything that has a string representation edges : list or dictionary the edges of the graph, if edges is a list then edges is a list of 2-element lists representing an edge in the graph, if edges is a dictionary then each key is a vertex and the corresponding value is a list of the neighbors of the vertex, in other words an adjacency list label_positions : dictionary the keys are the vertices and the values are 2-element lists which is the offset (dx, dy) of where the vertex label should be placed relative to the position (x, y) of the vertex graph_label : dictionary the keys are strings representing a label that is to be placed at an (xx, yy) position determined by the corresponding value, these are used to place a label like 'G' or 'C_4' somewhere in the graph, or any other desired text label_style : string determines the format of the vertex labels, the default is 'v_i' which causes the _i to be replaced by the number used to name a vertex, if 'raw' then the exact objects used as the vertices will be used, so long as the objects have a string representation node_colors : dictionary a key is a vertex and a value is a valid Python color to use for coloring the vertex """ def __init__(self, vertices=None, edges=None): """ Parameters ---------- vertices : dictionary the keys are the vertices and the values are the positions where each vertex is to be drawn, each position is a two-element list of the (x,y) coordinates of the vertex position, the keys can be strings, integers, or anything that has a string representation. edges : list or dictionary the edges of the graph, if E is a list then E is a list of edges where each edge is a 2-element list, if E is a dictionary then each key is a vertex and the corresponding value is a list of the neighbors of the vertex, in other words an adjacency list """ self.vertices = vertices self.edges = edges self.label_positions = None self.graph_label = None self.label_style = 'v_i' self.node_colors = None def _set_vertex_xpts(self): """Creates list of x-coordinates of vertex positions.""" self.x_pts = [] for x_val, _ in self.vertices.values(): self.x_pts.append(x_val) def _set_vertex_ypts(self): """Creates list of y-coordinates of vertex positions.""" self.y_pts = [] for _, y_val in self.vertices.values(): self.y_pts.append(y_val) def shift_vertices(self, dx, dy): """ Shift all vertices horizontally by dx and vertically by dy. Parameters ---------- dx : float all vertices will by shifted by dx dy : float all vertices will be shifted by dy """ for vertex in self.vertices: self.vertices[vertex][0] += dx self.vertices[vertex][1] += dy def add_edge(self, u, v): """ Add edge {u, v}. Parameters ---------- u : int, str v : int, str """ if u in self.vertices and v in self.vertices: if isinstance(self.edges, list): self.edges.append([u, v]) else: # then neighbors_list if u in self.edges: self.edges[u].append(v) else: self.edges[u] = [v] return None raise TypeError("Both u = {u} and v = {v} must already be vertices.") def add_edges(self, edges): """ Add multiple edges to graph. Paramters --------- edges : list a list of lists where each sublist is a 2-element list forming an edge """ if isinstance(edges, list): for e in edges: self.add_edge(e) return None raise TypeError("Argument must be a list of lists.") def add_vertex(self, v, pos, w=None): """ Adds vertex v at position pos. If w is the label for an existing vertex then the location where v is added is relative to w. Parameters ---------- v : mixed, usually str or int a vertex in the graph pos : list a 2D-point of the position of the new vertex w : mixed, usually str or int an existing vertex of the graph """ if v in self.vertices: raise Exception(f"Vertex v = {v} is already in the graph.") w_x = w_y = 0 if w is not None: if w not in self.vertices: raise Exception(f"Vertex w = {w} is not in the graph.") w_x, w_y = self.vertices[w] self.vertices[v] = [pos[0] + w_x, pos[1] + w_y] def radd_vertex(self, v, R, theta, w): """ Adds vertex v at distance R and angle theta relative to existing vertex w. The parameters R and theta are polar coordinates. If w has position (x, y) then v is added at Euclidean position (x+R*cos(theta), y+R*sin(theta)). The angle theta should be given in degrees not radians. Paramters --------- v : mixed, usually str or int the new vertex to be added R : float distance from vertex theta : float angle measured CCW from horizontal line through w w : mixed, usually str or int existing vertex in graph """ if v in self.vertices: raise Exception(f"Vertex v = {v} is already in the graph.") if w not in self.vertices: raise Exception(f"Vertex w = {w} is not in the graph.") w_x, w_y = self.vertices[w] angle = theta*pi/180 self.vertices[v] = [R*cos(angle) + w_x, R*sin(angle) + w_y] def copy(self): """ Returns a deep copy of the graph. """ G_new = Graph() G_new.vertices = copy.deepcopy( self.vertices ) G_new.edges = copy.deepcopy( self.edges ) G_new.label_positions = copy.deepcopy( self.label_positions ) G_new.graph_label = copy.deepcopy( self.graph_label ) return G_new #%% RadGraph class RadGraph(Graph): """A graph whose vertices are placed on a circle.""" def __init__(self, n, R, phi=None, dr=0.5, vertex_labels=True): """ Parameters ---------- n : int number of vertices R : float radius of circle phi : float, optional degrees to shift all vertices. The default is None. dr : float, optional distance from vertex to place label. The default is 0.5. vertex_labels : boolean, optional if True then labels are created automatically. The default is True. Returns ------- None. """ dt = (360/n)*(pi/180) t0 = 0.5*dt if phi is None else phi*pi/180 super().__init__() self.vertices = {} self.label_positions = {} for v in range(n): a, b = cos(t0+v*dt), sin(t0+v*dt) x, y = R*a, R*b self.add_vertex(v+1, [x, y] ) if vertex_labels: self.label_positions[v+1] = [dr*a, dr*b] #%% GraphDrawer class class GraphDrawer: """ A Graph drawer. """ def __init__(self, fig_size, font_size=25, marker_size=20): """ Paramters --------- fig_size : tuple width and height of the figure in inches font_size : int font size used in vertex labels marker_size : int the marker size used for the vertices """ self.fig_size = fig_size self.font_size = font_size self.marker_size = marker_size self.graphs = [] self.fig = None self.ax = None def _load(self, graphs): """ Parameters ---------- graphs : list list of Graph objects Returns ------- None. """ for g in graphs: if isinstance(g, Graph): self.graphs.append(g) return None def draw(self, *args): """Draw graphs.""" self._load(args) self.fig = plt.figure(figsize=self.fig_size) self.ax = self.fig.gca() for g in self.graphs: self.plot_graph(g) self._center_axis() return self def save(self, file_name, dpi=150): """ Saves figure to the file file_name. When file_name contains an extension, for example, 'mygraph.png' then a .png file is generated. If no extension is included then two files are generated with .eps and .png extensions using file_name as the base name of the file. Parameters ---------- file_name : str Name of file when saving with extension. dpi : int, optional Dots per square inch. The default is 100. Returns ------- None. """ if '.' in file_name: self.fig.savefig(file_name, dpi=dpi, bbox_inches='tight',pad_inches=0,transparent=True) else: self.fig.savefig(f"{file_name}.eps",dpi=dpi, bbox_inches='tight',pad_inches=0,transparent=True) self.fig.savefig(f"{file_name}.png",dpi=dpi, bbox_inches='tight',pad_inches=0,transparent=True) return None def make_edges(self, G): """Create edges connecting adjacent vertices.""" if isinstance(G.edges, list): for e in G.edges: x = [ G.vertices[e[0]][0] , G.vertices[e[1]][0] ] y = [ G.vertices[e[0]][1] , G.vertices[e[1]][1] ] self.ax.plot(x, y, '.-k', ms=self.marker_size) elif isinstance(G.edges, dict): # using adjacency lists for v, Nv in G.edges.items(): for u in Nv: x = [G.vertices[v][0], G.vertices[u][0]] y = [G.vertices[v][1], G.vertices[u][1]] self.ax.plot(x, y, '-k', ms=self.marker_size) else: # invalid type raise TypeError("The edge structure must be a list or a dictionary.") return None def add_vertex_labels(self, G): """Add labels to the vertices.""" for v, pos in G.label_positions.items(): x = G.vertices[v][0] + pos[0] y = G.vertices[v][1] + pos[1] # Use latex to render label if G.label_style == 'raw': s = r'${}$'.format(v) else: s = r'${}$'.format('v_{' + str(v) + '}') self.ax.text(x, y, s, fontsize=self.font_size, horizontalalignment='center', verticalalignment='center') return None def _center_axis(self): """ Makes the axis of a plot with center of axis at (0,0) instead of the default box-style axis in matplotlib Returns ------- None. """ self.ax.spines['right'].set_color('none') self.ax.spines['top'].set_color('none') self.ax.xaxis.set_ticks_position('bottom') self.ax.spines['bottom'].set_position(('data',0)) # set position of x spine to x=0 self.ax.yaxis.set_ticks_position('left') self.ax.spines['left'].set_position(('data',0)) # set position of y spine to y=0 for t in self.ax.xaxis.get_major_ticks(): t.label.set_fontsize(self.font_size) t.label.set_color('black') for t in self.ax.yaxis.get_major_ticks(): t.label.set_fontsize(self.font_size) t.label.set_color('black') self.ax.axis('equal') self.ax.axis('off') return None
[ 2, 308, 19334, 13, 9078, 198, 37811, 198, 15269, 33448, 327, 18964, 440, 13, 33118, 1794, 198, 198, 5990, 3411, 318, 29376, 7520, 11, 1479, 286, 3877, 11, 284, 597, 1048, 220, 198, 672, 7339, 257, 4866, 286, 428, 3788, 290, 3917, 22...
1.987249
7,607
''' Exercise 6: Rewrite the program that prompts the user for a list of numbers\ and prints out the maximum and minimum of the numbers at the end when the user enters “done”. Write the program to store the numbers the user enters in a list and use the max() and min() functions to compute the maximum and minimum numbers after the loop completes. ----Example: Enter a number: 6 Enter a number: 2 Enter a number: 9 Enter a number: 3 Enter a number: 5 Enter a number: done Maximum: 9.0 Minimum: 2.0 ''' stop = '' max_num = 0 min_num = 0 lst=[] while(stop != 'done'): number = input('Enter a number: ') try: if(number == 'done'): stop = 'done' break number = float(number) lst.append(number) except: print('Invalid input') continue max_num = max(lst) min_num = min(lst) print('Maximum:', max_num) print('Minimum:', min_num)
[ 7061, 6, 198, 3109, 23697, 718, 25, 198, 220, 220, 220, 16140, 6525, 262, 1430, 326, 36454, 262, 2836, 329, 257, 1351, 286, 3146, 59, 198, 220, 220, 220, 220, 220, 220, 220, 290, 20842, 503, 262, 5415, 290, 5288, 286, 262, 3146, 3...
2.333333
438
# PLY based Lexer class, based on pycparser by Eli Bendersky. # # Copyright (c) 2012, Eli Bendersky # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of Eli Bendersky nor the names of its contributors may # be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE # GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import re import sys import os.path # Try to load the ply module, if not, then assume it is in the third_party # directory. try: # Disable lint check which fails to find the ply module. # pylint: disable=F0401 from ply.lex import TOKEN except ImportError: module_path, module_name = os.path.split(__file__) third_party = os.path.join( module_path, os.pardir, os.pardir, os.pardir, os.pardir, 'third_party') sys.path.append(third_party) # pylint: disable=F0401 from ply.lex import TOKEN
[ 2, 9297, 56, 1912, 17210, 263, 1398, 11, 1912, 319, 12972, 13155, 28198, 416, 25204, 347, 7338, 2584, 13, 198, 2, 198, 2, 15069, 357, 66, 8, 2321, 11, 25204, 347, 7338, 2584, 198, 2, 1439, 2489, 10395, 13, 198, 2, 198, 2, 2297, ...
3.361022
626
from dagster import ModeDefinition, PresetDefinition, pipeline from dagster.seven.temp_dir import get_system_temp_directory from hacker_news.ops.download_items import build_comments, build_stories, download_items from hacker_news.ops.id_range_for_time import id_range_for_time from hacker_news.resources import RESOURCES_LOCAL, RESOURCES_PROD, RESOURCES_STAGING from hacker_news.resources.hn_resource import hn_api_subsample_client, hn_snapshot_client from hacker_news.resources.partition_bounds import partition_bounds MODE_TEST = ModeDefinition( name="test_local_data", description="This mode queries snapshotted HN data and does all writes locally.", resource_defs=dict( {"partition_bounds": partition_bounds, "hn_client": hn_snapshot_client}, **RESOURCES_LOCAL, ), ) MODE_STAGING = ModeDefinition( name="staging_live_data", description=( "This mode queries live HN data and writes to a staging S3 bucket. " "Intended for use in the staging environment." ), resource_defs=dict( **{ "partition_bounds": partition_bounds, "hn_client": hn_api_subsample_client.configured({"sample_rate": 10}), }, **RESOURCES_STAGING, ), ) MODE_PROD = ModeDefinition( name="prod", description=( "This mode queries live HN data and writes to a prod S3 bucket." "Intended for use in production." ), resource_defs=dict( **{ "partition_bounds": partition_bounds, "hn_client": hn_api_subsample_client.configured({"sample_rate": 10}), }, **RESOURCES_PROD, ), ) DEFAULT_PARTITION_RESOURCE_CONFIG = { "partition_bounds": {"config": {"start": "2020-12-30 00:00:00", "end": "2020-12-30 01:00:00"}}, } PRESET_TEST = PresetDefinition( name="test_local_data", run_config={ "resources": dict( parquet_io_manager={"config": {"base_path": get_system_temp_directory()}}, warehouse_io_manager={"config": {"base_path": get_system_temp_directory()}}, **DEFAULT_PARTITION_RESOURCE_CONFIG, ), }, mode="test_local_data", ) @pipeline( mode_defs=[ MODE_TEST, MODE_STAGING, MODE_PROD, ], tags={ "dagster-k8s/config": { "container_config": { "resources": { "requests": {"cpu": "500m", "memory": "2Gi"}, } }, } }, preset_defs=[PRESET_TEST], ) def download_pipeline(): """ #### Owners schrockn@elementl.com, cat@elementl.com #### About Downloads all items from the HN API for a given day, splits the items into stories and comment types using Spark, and uploads filtered items to the corresponding stories or comments Snowflake table """ items = download_items(id_range_for_time()) build_comments(items) build_stories(items)
[ 6738, 48924, 1706, 1330, 10363, 36621, 11, 1763, 316, 36621, 11, 11523, 198, 6738, 48924, 1706, 13, 26548, 13, 29510, 62, 15908, 1330, 651, 62, 10057, 62, 29510, 62, 34945, 198, 6738, 23385, 62, 10827, 13, 2840, 13, 15002, 62, 23814, ...
2.354684
1,249
# 371. Sum of Two Integers # ttungl@gmail.com # Calculate the sum of two integers a and b, but you are not allowed to use the operator + and -.
[ 2, 47343, 13, 5060, 286, 4930, 15995, 364, 198, 2, 256, 83, 2150, 75, 31, 14816, 13, 785, 198, 2, 27131, 378, 262, 2160, 286, 734, 37014, 257, 290, 275, 11, 475, 345, 389, 407, 3142, 284, 779, 262, 10088, 1343, 290, 532, 13, 198...
2.736842
57
general = Channel("general-helpdesk", ["Owner", "Tutor", "Student"]) announcements = Channel("announcements", ["Owner", "Tutor"]) secret = Channel("foobar", ["Owner"]) print(general.get_name()) # #general-helpdesk print(secret.get_name()) # #foobar russell = Owner("Russell") clifton = Tutor("Clifton") aeron = Student("Aeron") kenghwee = User("Keng Hwee") print(russell.get_role()) # Russell is the owner! print(clifton.get_role()) # Clifton is a Tutor print(aeron.get_role()) # Aeron is a Student print(kenghwee.get_role()) # Keng Hwee has no role print() print(russell.join(general)) # Russell joins #general-helpdesk print(russell.join(general)) # Russell has already joined #general-helpdesk print(clifton.join(general)) # Clifton joins #general-helpdesk print(russell.mute(aeron, None, None)) # Russell muted Aeron indefinitely. Reason: None print(aeron.join(general)) # Aeron is muted! # therefore cannot join print(russell.unmute(aeron)) # Russell unmuted Aeron! print(aeron.join(general)) # Aeron joins #general-helpdesk print(kenghwee.join(general)) # Keng Hwee has no permission to join #general-helpdesk print(general.get_members()) # ['Aeron', 'Clifton', 'Russell'] print() print(russell.join(announcements)) # Russell joins #announcements print(clifton.join(announcements)) # Clifton joins #announcements print(aeron.join(announcements)) # Aeron has no permission to join #announcements print(kenghwee.join(announcements)) # Keng Hwee has no permission to join #announcements print(announcements.get_members()) # ['Clifton', 'Russell'] print() print(russell.join(secret)) # Russell joins #foobar print(clifton.join(secret)) # Clifton has no permission to join #foobar print(aeron.join(secret)) # Aeron has no permission to join #foobar print(kenghwee.join(secret)) # Keng Hwee has no permission to join #foobar print(secret.get_members()) # ['Russell'] print() print(russell.get_channels()) # ['announcements', 'foobar', 'general-helpdesk'] print(clifton.get_channels()) # ['announcements', 'general-helpdesk'] print(aeron.get_channels()) # ['general-helpdesk'] print(kenghwee.get_channels()) # [] print() print(russell.message(announcements, "Tutorial is canceled!")) # #announcements --- Russell: Tutorial is canceled! print(clifton.message(general, "Hooray!")) # #general-helpdesk --- Clifton: Hooray! print(aeron.message(announcements, "Tutorial is canceled!")) # Aeron has not joined #announcements print(kenghwee.message(announcements, "Tutorial is canceled!")) # Keng Hwee has not joined #announcements print(russell.message(secret, "I am alone!")) # #foobar --- Russell: I am alone! print(clifton.message(secret, "WHAT")) # Clifton has not joined #foobar print() print(russell.mute(kenghwee, None, "Testing")) # Russell muted Keng Hwee indefinitely. Reason: Testing print(clifton.mute(russell, 10, "Revenge")) # Cannot mute a fellow tutor print(kenghwee.mute(russell, 100, "Revenge")) # Keng Hwee is not allowed to send messages! # because he's muted print(kenghwee.message(announcements, "Tutorial is canceled!")) # Keng Hwee has not joined #announcements print(clifton.unmute(kenghwee)) # Clifton unmuted Keng Hwee! print(kenghwee.mute(russell, 100, "Revenge")) # Keng Hwee doesn't have a permission to mute another user print(aeron.mute(clifton, 3, None)) # Aeron doesn't have a permission to mute another user print(clifton.mute(aeron, 5, "Why would you try to mute me?")) # Clifton muted Aeron for 5 minutes. Reason: Why would you try to mute me? print(kenghwee.mute(kenghwee, 10, "No idea")) # Cannot mute oneself print(russell.mute(russell, 10, "Same here")) # Cannot mute oneself print(russell.mute(aeron, 3, "Spam")) # Aeron is muted! display_hall_of_mute() ''' HALL OF MUTE #general-helpdesk: ['Aeron'] #announcements: [] #foobar: [] ''' print(aeron.message(secret, "Hello")) # Aeron has not joined #foobar print(aeron.message(general, "Yoooo")) # Aeron is not allowed to send messages in #general-helpdesk print(aeron.message(announcements, "Test")) # Aeron has not joined #announcements print(clifton.mute(russell, None, None)) # Cannot mute a fellow tutor print(russell.mute(clifton, None, "Muting a fellow tutor is a can")) # Russell muted Clifton indefinitely. Reason: Muting a fellow tutor is a can display_hall_of_mute() ''' HALL OF MUTE #general-helpdesk: ['Aeron', 'Clifton'] #announcements: ['Clifton'] #foobar: [] ''' print(clifton.mute(aeron, 3, "Spam?")) # Clifton is not allowed to send messages! print(russell.unmute(clifton)) # Russell unmuted Clifton! display_hall_of_mute() ''' HALL OF MUTE #general-helpdesk: ['Aeron'] #announcements: [] #foobar: [] ''' print(clifton.mute(aeron, 3, "Spam?")) # Aeron is muted! print(aeron.message(general, "Yoooo")) # Aeron is not allowed to send messages in #general-helpdesk # since he's muted print(aeron.mute(kenghwee, 2, "Lol")) # Aeron is not allowed to send messages! # again, because he's still muted print(aeron.unmute(kenghwee)) # Keng Hwee is not muted :) print(clifton.unmute(aeron)) # Clifton unmuted Aeron! display_hall_of_mute() ''' HALL OF MUTE #general-helpdesk: [] #announcements: [] #foobar: [] ''' print(kenghwee.mute(russell, 10, None)) # Keng Hwee doesn't have a permission to mute another user print(kenghwee.message(general, "Hi guys I'm unmuted")) # Keng Hwee has not joined #general-helpdesk print(russell.message(general, "Hello")) # #general-helpdesk --- Russell: Hello print(clifton.message(general, "Hello!")) # #general-helpdesk --- Clifton: Hello! print(aeron.message(general, "I'm so happy!")) # #general-helpdesk --- Aeron: I'm so happy! print(aeron.message(announcements, "Test")) # Aeron has not joined #announcements print(kenghwee.join(general)) # Keng Hwee has no permission to join #general-helpdesk print(russell.unmute(kenghwee)) # Keng Hwee is not muted :) print(russell.unmute(russell)) # Russell is not muted :)
[ 201, 198, 24622, 796, 11102, 7203, 24622, 12, 16794, 8906, 74, 1600, 14631, 42419, 1600, 366, 51, 38409, 1600, 366, 38778, 8973, 8, 201, 198, 1236, 8652, 902, 796, 11102, 7203, 1236, 8652, 902, 1600, 14631, 42419, 1600, 366, 51, 38409, ...
2.112828
3,430
# Copyright 2022 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # less required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """train the PDarts model""" try: from moxing.framework import file print("import moxing success") except ModuleNotFoundError as e: print(f'not modelarts env, error={e}') import os import time import logging import argparse import numpy as np from mindspore import context import mindspore.nn as nn from mindspore.common.tensor import Tensor from mindspore.train.loss_scale_manager import FixedLossScaleManager from mindspore.train.callback._time_monitor import TimeMonitor from mindspore.train.callback._loss_monitor import LossMonitor from mindspore.train import Model from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.communication import init from mindspore.communication.management import get_rank from mindspore.train.model import ParallelMode import src.my_utils as my_utils import src.genotypes as genotypes from src.loss import SoftmaxCrossEntropyLoss from src.model import NetworkCIFAR as Network from src.dataset import create_cifar10_dataset from src.call_backs import Val_Callback, Set_Attr_CallBack parser = argparse.ArgumentParser("cifar") parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU'], help='device where the code will be implemented (default: CPU)') parser.add_argument('--local_data_root', default='/cache/', help='a directory used for transfer data between local path and OBS path') parser.add_argument('--data_url', type=str, default="cifar-10-binary", help='the training data path') parser.add_argument('--train_url', type=str, default="", help='the path to save training outputs') parser.add_argument('--batch_size', type=int, default=128, help='batch size') parser.add_argument('--load_weight', type=str, default='', help='load ckpt file path') parser.add_argument('--no_top', type=str, default='True', help='whether contains the top fc layer weights') parser.add_argument('--learning_rate', type=float, default=0.025, help='init learning rate') parser.add_argument('--momentum', type=float, default=0.9, help='momentum') parser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay') parser.add_argument('--epochs', type=int, default=600, help='num of training epochs') parser.add_argument('--init_channels', type=int, default=36, help='num of init channels') parser.add_argument('--layers', type=int, default=20, help='total number of layers') parser.add_argument('--auxiliary', action='store_true', default=True, help='use auxiliary tower') parser.add_argument('--auxiliary_weight', type=float, default=0.4, help='weight for auxiliary loss') parser.add_argument('--drop_path_prob', type=float, default=0.3, help='drop path probability') parser.add_argument('--arch', type=str, default='PDARTS', help='which architecture to use') parser.add_argument('--amp_level', type=str, default='O3', help='') parser.add_argument('--optimizer', type=str, default='Momentum', help='SGD or Momentum') parser.add_argument('--cutout_length', default=16, help='use cutout') args = parser.parse_args() CIFAR_CLASSES = 10 if __name__ == '__main__': start_time = time.time() main() end_time = time.time() duration = end_time - start_time logging.info('Total time: %ds.', duration)
[ 2, 15069, 33160, 43208, 21852, 1766, 1539, 12052, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 13789, 13, 198...
2.852227
1,482
import komand from .schema import DetectEncodingInput, DetectEncodingOutput, Input, Output, Component # Custom imports below import chardet import base64
[ 11748, 479, 296, 392, 198, 6738, 764, 15952, 2611, 1330, 35874, 27195, 7656, 20560, 11, 35874, 27195, 7656, 26410, 11, 23412, 11, 25235, 11, 35100, 198, 2, 8562, 17944, 2174, 198, 11748, 442, 446, 316, 198, 11748, 2779, 2414, 628 ]
3.875
40
import os path_files = 'Primos' csv_file = os.path.join(path_files, 'Informacion.csv') incremento = 1000 * 1000 inicio = ultimo_calculo() final = inicio * 100
[ 11748, 28686, 198, 6978, 62, 16624, 796, 705, 23828, 418, 6, 198, 40664, 62, 7753, 796, 28686, 13, 6978, 13, 22179, 7, 6978, 62, 16624, 11, 705, 818, 687, 49443, 13, 40664, 11537, 628, 198, 198, 24988, 434, 78, 796, 8576, 1635, 8576...
2.612903
62
import http.client, urllib.parse conn = http.client.HTTPConnection('api.positionstack.com') params = urllib.parse.urlencode({ 'access_key': '0d2a35d484a0c57e8d1da1d92620249c', 'query': 'Copacabana', 'region': 'Rio de Janeiro', 'limit': 1, }) conn.request('GET', '/v1/forward?{}'.format(params)) res = conn.getresponse() data = res.read() print(data.decode('utf-8'))
[ 11748, 2638, 13, 16366, 11, 2956, 297, 571, 13, 29572, 198, 198, 37043, 796, 2638, 13, 16366, 13, 40717, 32048, 10786, 15042, 13, 9150, 25558, 13, 785, 11537, 198, 198, 37266, 796, 2956, 297, 571, 13, 29572, 13, 6371, 268, 8189, 15090...
2.335329
167
import re import gzip import logging from dipper.sources.OMIMSource import OMIMSource from dipper.models.Model import Model from dipper.models.assoc.OrthologyAssoc import OrthologyAssoc from dipper.models.Genotype import Genotype from dipper.models.GenomicFeature import Feature, makeChromID, makeChromLabel from dipper.models.Reference import Reference LOG = logging.getLogger(__name__) class NCBIGene(OMIMSource): """ This is the processing module for the National Center for Biotechnology Information. It includes parsers for the gene_info (gene names, symbols, ids, equivalent ids), gene history (alt ids), and gene2pubmed publication references about a gene. This creates Genes as classes, when they are properly typed as such. For those entries where it is an 'unknown significance', it is added simply as an instance of a sequence feature. It will add equivalentClasses for a subset of external identifiers, including: ENSEMBL, HGMD, MGI, ZFIN, and gene product links for HPRD. They are additionally located to their Chromosomal band (until we process actual genomic coords in a separate file). We process the genes from the filtered taxa, starting with those configured by default (human, mouse, fish). This can be overridden in the calling script to include additional taxa, if desired. The gene ids in test_ids.yaml will be used to subset the data when testing. All entries in the gene_history file are added as deprecated classes, and linked to the current gene id, with "replaced_by" relationships. Since we do not know much about the specific link in the gene2pubmed; we simply create a "mentions" relationship. """ # NCBIFTP = ftp://ftp.ncbi.nih.gov/gene/DATA/ (n.b. ftp v.s http below) files = { 'gene_info': { 'file': 'gene_info.gz', 'url': 'http://ftp.ncbi.nih.gov/gene/DATA/gene_info.gz', 'columns': [ 'tax_id', 'GeneID', 'Symbol', 'LocusTag', 'Synonyms', 'dbXrefs', 'chromosome', 'map_location', 'description', 'type_of_gene', 'Symbol_from_nomenclature_authority', 'Full_name_from_nomenclature_authority', 'Nomenclature_status', 'Other_designations', 'Modification_date', 'Feature_type', ] }, 'gene_history': { 'file': 'gene_history.gz', 'url': 'http://ftp.ncbi.nih.gov/gene/DATA/gene_history.gz', 'columns': [ 'tax_id', 'GeneID', 'Discontinued_GeneID', 'Discontinued_Symbol', 'Discontinue_Date', ] }, 'gene2pubmed': { 'file': 'gene2pubmed.gz', 'url': 'http://ftp.ncbi.nih.gov/gene/DATA/gene2pubmed.gz', 'columns': [ 'tax_id', 'GeneID', 'PubMed_ID', ] }, 'gene_group': { 'file': 'gene_group.gz', 'url': 'http://ftp.ncbi.nih.gov/gene/DATA/gene_group.gz', 'columns': [ 'tax_id', 'GeneID', 'relationship', 'Other_tax_id', 'Other_GeneID', ] } } resources = { 'clique_leader': '../../resources/clique_leader.yaml' } informal_species = { 'NCBITaxon:9913': 'cattle', 'NCBITaxon:9031': 'chicken', 'NCBITaxon:9823': 'pig', 'NCBITaxon:9940': 'sheep', 'NCBITaxon:9796': 'horse', 'NCBITaxon:8022': 'rainbow_trout', } def _get_gene_info(self, limit): """ Currently loops through the gene_info file and creates the genes as classes, typed with SO. It will add their label, any alternate labels as synonyms, alternate ids as equivlaent classes. HPRDs get added as protein products. The chromosome and chr band get added as blank node regions, and the gene is faldo:located on the chr band. :param limit: :return: """ src_key = 'gene_info' if self.test_mode: graph = self.testgraph else: graph = self.graph geno = Genotype(graph) model = Model(graph) # not unzipping the file LOG.info("Processing 'Gene Info' records") line_counter = 0 gene_info = '/'.join((self.rawdir, self.files[src_key]['file'])) LOG.info("FILE: %s", gene_info) LOG.info('Add taxa and genome classes for those in our filter') band_regex = re.compile(r'[0-9A-Z]+[pq](\d+)?(\.\d+)?$') for tax_num in self.tax_ids: tax_curie = ':'.join(('NCBITaxon', tax_num)) # tax label can get added elsewhere geno.addGenome(tax_curie, tax_num) # label added elsewhere model.addClassToGraph(tax_curie, None) col = self.files[src_key]['columns'] LOG.info('Begin reading & parsing') with gzip.open(gene_info, 'rb') as tsv: row = tsv.readline().decode().strip().split('\t') row[0] = row[0][1:] # strip comment char if not self.check_fileheader(col, row): pass for line in tsv: line = line.strip() line_counter += 1 if line[0] == '#': # skip comments continue row = line.decode().strip().split('\t') # ##set filter=None in init if you don't want to have a filter # if self.id_filter is not None: # if ((self.id_filter == 'taxids' and \ # (tax_num not in self.tax_ids)) # or (self.id_filter == 'geneids' and \ # (int(gene_num) not in self.gene_ids))): # continue # #### end filter tax_num = row[col.index('tax_id')] gene_num = row[col.index('GeneID')] symbol = row[col.index('Symbol')] # = row[col.index('LocusTag')] synonyms = row[col.index('Synonyms')].strip() dbxrefs = row[col.index('dbXrefs')].strip() chrom = row[col.index('chromosome')].strip() map_loc = row[col.index('map_location')].strip() desc = row[col.index('description')] gtype = row[col.index('type_of_gene')].strip() # = row[col.index('Symbol_from_nomenclature_authority')] name = row[col.index('Full_name_from_nomenclature_authority')] # = row[col.index('Nomenclature_status')] other_designations = row[col.index('Other_designations')].strip() # = row[col.index('Modification_date')} # = row[col.index('Feature_type')] if self.test_mode and int(gene_num) not in self.gene_ids: continue if not self.test_mode and tax_num not in self.tax_ids: continue tax_curie = ':'.join(('NCBITaxon', tax_num)) gene_id = ':'.join(('NCBIGene', gene_num)) gene_type_id = self.resolve(gtype) if symbol == 'NEWENTRY': label = None else: label = symbol # sequence feature, not a gene if gene_type_id == self.globaltt['sequence_feature']: self.class_or_indiv[gene_id] = 'I' else: self.class_or_indiv[gene_id] = 'C' if not self.test_mode and limit is not None and line_counter > limit: continue if self.class_or_indiv[gene_id] == 'C': model.addClassToGraph(gene_id, label, gene_type_id, desc) # NCBI will be the default leader (for non mods), # so we will not add the leader designation here. else: model.addIndividualToGraph(gene_id, label, gene_type_id, desc) # in this case, they aren't genes. # so we want someone else to be the leader if name != '-': model.addSynonym(gene_id, name) if synonyms != '-': for syn in synonyms.split('|'): syn = syn.strip() # unknown curies may occur here if syn[:12] == 'AnimalQTLdb:' and \ tax_curie in self.informal_species: syn = self.informal_species[tax_curie] + 'QTL:' + syn[12:] LOG.info('AnimalQTLdb: CHANGED to: %s', syn) model.addSynonym( gene_id, syn, model.globaltt['has_related_synonym']) if other_designations != '-': for syn in other_designations.split('|'): model.addSynonym( gene_id, syn.strip(), model.globaltt['has_related_synonym']) if dbxrefs != '-': self._add_gene_equivalencies(dbxrefs, gene_id, tax_curie) # edge cases of id | symbol | chr | map_loc: # 263 AMD1P2 X|Y with Xq28 and Yq12 # 438 ASMT X|Y with Xp22.3 or Yp11.3 # in PAR # no idea why there's two bands listed - possibly 2 assemblies # 419 ART3 4 with 4q21.1|4p15.1-p14 # 28227 PPP2R3B X|Y Xp22.33; Yp11.3 # in PAR # this is of "unknown" type == susceptibility # 619538 OMS 10|19|3 10q26.3;19q13.42-q13.43;3p25.3 # unlocated scaffold # 101928066 LOC101928066 1|Un -\ # mouse --> 2C3 # 11435 Chrna1 2 2 C3|2 43.76 cM # mouse --> 11B1.1 # 11548 Adra1b 11 11 B1.1|11 25.81 cM # 11717 Ampd3 7 7 57.85 cM|7 E2-E3 # mouse # 14421 B4galnt1 10 10 D3|10 74.5 cM # mouse # 323212 wu:fb92e12 19|20 - # fish # 323368 ints10 6|18 - # fish # 323666 wu:fc06e02 11|23 - # fish # feel that the chr placement can't be trusted in this table # when there is > 1 listed # with the exception of human X|Y, # we will only take those that align to one chr # FIXME remove the chr mapping below # when we pull in the genomic coords if chrom != '-' and chrom != '': if re.search(r'\|', chrom) and chrom not in ['X|Y', 'X; Y']: # means that there's uncertainty in the mapping. # so skip it # TODO we'll need to figure out how to deal with # >1 loc mapping LOG.info( '%s is non-uniquely mapped to %s. Skipping for now.', gene_id, chrom) continue # X|Y Xp22.33;Yp11.3 # if(not re.match( # r'(\d+|(MT)|[XY]|(Un)$',str(chr).strip())): # print('odd chr=',str(chr)) if chrom == 'X; Y': chrom = 'X|Y' # rewrite the PAR regions for processing # do this in a loop to allow PAR regions like X|Y for chromosome in re.split(r'\|', chrom): # assume that the chromosome label is added elsewhere geno.addChromosomeClass(chromosome, tax_curie, None) mychrom = makeChromID(chromosome, tax_num, 'CHR') # temporarily use taxnum for the disambiguating label mychrom_syn = makeChromLabel(chromosome, tax_num) model.addSynonym(mychrom, mychrom_syn) band_match = re.match(band_regex, map_loc) if band_match is not None and len(band_match.groups()) > 0: # if tax_num != '9606': # continue # this matches the regular kind of chrs, # so make that kind of band # not sure why this matches? # chrX|Y or 10090chr12|Un" # TODO we probably need a different regex # per organism # the maploc_id already has the numeric chromosome # in it, strip it first bid = re.sub(r'^' + chromosome, '', map_loc) # the generic location (no coordinates) maploc_id = makeChromID(chromosome + bid, tax_num, 'CHR') # print(map_loc,'-->',bid,'-->',maploc_id) # Assume it's type will be added elsewhere band = Feature(graph, maploc_id, None, None) band.addFeatureToGraph() # add the band as the containing feature graph.addTriple( gene_id, self.globaltt['is subsequence of'], maploc_id) else: # TODO handle these cases: examples are: # 15q11-q22,Xp21.2-p11.23,15q22-qter,10q11.1-q24, # 12p13.3-p13.2|12p13-p12,1p13.3|1p21.3-p13.1, # 12cen-q21,22q13.3|22q13.3 LOG.debug( 'not regular band pattern for %s: %s', gene_id, map_loc) # add the gene as a subsequence of the chromosome graph.addTriple( gene_id, self.globaltt['is subsequence of'], mychrom) geno.addTaxon(tax_curie, gene_id) def _add_gene_equivalencies(self, dbxrefs, gene_id, taxon): """ Add equivalentClass and sameAs relationships Uses external resource map located in /resources/clique_leader.yaml to determine if an NCBITaxon ID space is a clique leader """ clique_map = self.open_and_parse_yaml(self.resources['clique_leader']) if self.test_mode: graph = self.testgraph else: graph = self.graph model = Model(graph) filter_out = ['Vega', 'IMGT/GENE-DB', 'Araport', ''] # deal with the dbxrefs # MIM:614444|HGNC:HGNC:16851|Ensembl:ENSG00000136828|HPRD:11479|Vega:OTTHUMG00000020696 for dbxref in dbxrefs.strip().split('|'): prefix = ':'.join(dbxref.split(':')[:-1]).strip() # restore nonterminal ':' if prefix in self.localtt: prefix = self.localtt[prefix] # skip some of these for now based on curie prefix if prefix in filter_out: continue if prefix == 'AnimalQTLdb' and taxon in self.informal_species: prefix = self.informal_species[taxon] + 'QTL' dbxref_curie = ':'.join((prefix, dbxref.split(':')[-1])) if dbxref_curie is not None: if prefix == 'HPRD': # proteins are not == genes. model.addTriple( gene_id, self.globaltt['has gene product'], dbxref_curie) continue if prefix == 'ENSEMBL': model.addXref(gene_id, dbxref_curie) if prefix == 'OMIM': omim_num = dbxref_curie[5:] if omim_num in self.omim_replaced: repl = self.omim_replaced[omim_num] for omim in repl: if omim in self.omim_type and \ self.omim_type[omim] == self.globaltt['gene']: dbxref_curie = 'OMIM:' + omim model.addXref(gene_id, dbxref_curie) omim_num = omim # last wins elif omim_num in self.omim_type and\ self.omim_type[omim_num] == self.globaltt['gene']: model.addXref(gene_id, dbxref_curie) else: continue # no equivilance between ncbigene and omin-nongene # designate clique leaders # (perhaps premature as this ingest can't know what else exists) try: if self.class_or_indiv.get(gene_id) == 'C': model.addEquivalentClass(gene_id, dbxref_curie) if taxon in clique_map: if clique_map[taxon] == prefix: model.makeLeader(dbxref_curie) elif clique_map[taxon] == gene_id.split(':')[0]: model.makeLeader(gene_id) else: model.addSameIndividual(gene_id, dbxref_curie) except AssertionError as err: LOG.warning("Error parsing %s: %s", gene_id, err) def _get_gene_history(self, limit): """ Loops through the gene_history file and adds the old gene ids as deprecated classes, where the new gene id is the replacement for it. The old gene symbol is added as a synonym to the gene. :param limit: :return: """ src_key = 'gene_history' if self.test_mode: graph = self.testgraph else: graph = self.graph model = Model(graph) LOG.info("Processing Gene records") line_counter = 0 myfile = '/'.join((self.rawdir, self.files[src_key]['file'])) LOG.info("FILE: %s", myfile) col = self.files[src_key]['columns'] with gzip.open(myfile, 'rb') as tsv: row = tsv.readline().decode().strip().split('\t') row[0] = row[0][1:] # strip comment if not self.check_fileheader(col, row): pass for line in tsv: # skip comments row = line.decode().strip().split('\t') if row[0][0] == '#': continue tax_num = row[col.index('tax_id')].strip() gene_num = row[col.index('GeneID')].strip() discontinued_num = row[col.index('Discontinued_GeneID')].strip() discontinued_symbol = row[col.index('Discontinued_Symbol')].strip() # discontinued_date = row[col.index('Discontinue_Date')] # set filter=None in init if you don't want to have a filter # if self.id_filter is not None: # if ((self.id_filter == 'taxids' and \ # (int(tax_num) not in self.tax_ids)) # or (self.id_filter == 'geneids' and \ # (int(gene_num) not in self.gene_ids))): # continue # end filter if gene_num == '-' or discontinued_num == '-': continue if self.test_mode and gene_num not in self.gene_ids: continue if not self.test_mode and tax_num not in self.tax_ids: continue line_counter += 1 gene_id = ':'.join(('NCBIGene', gene_num)) discontinued_gene_id = ':'.join(('NCBIGene', discontinued_num)) # add the two genes if self.class_or_indiv.get(gene_id) == 'C': model.addClassToGraph(gene_id, None) model.addClassToGraph(discontinued_gene_id, discontinued_symbol) # add the new gene id to replace the old gene id model.addDeprecatedClass(discontinued_gene_id, [gene_id]) else: model.addIndividualToGraph(gene_id, None) model.addIndividualToGraph( discontinued_gene_id, discontinued_symbol) model.addDeprecatedIndividual(discontinued_gene_id, [gene_id]) # also add the old symbol as a synonym of the new gene model.addSynonym(gene_id, discontinued_symbol) if not self.test_mode and (limit is not None and line_counter > limit): break def _get_gene2pubmed(self, limit): """ Loops through the gene2pubmed file and adds a simple triple to say that a given publication is_about a gene. Publications are added as NamedIndividuals. These are filtered on the taxon. :param limit: :return: """ src_key = 'gene2pubmed' if self.test_mode: graph = self.testgraph else: graph = self.graph model = Model(graph) LOG.info("Processing Gene records") line_counter = 0 myfile = '/'.join((self.rawdir, self.files[src_key]['file'])) LOG.info("FILE: %s", myfile) assoc_counter = 0 col = self.files[src_key]['columns'] with gzip.open(myfile, 'rb') as tsv: row = tsv.readline().decode().strip().split('\t') row[0] = row[0][1:] # strip comment if not self.check_fileheader(col, row): pass for line in tsv: line_counter += 1 # skip comments row = line.decode().strip().split('\t') if row[0][0] == '#': continue tax_num = row[col.index('tax_id')].strip() gene_num = row[col.index('GeneID')].strip() pubmed_num = row[col.index('PubMed_ID')].strip() # ## set id_filter=None in init if you don't want to have a filter # if self.id_filter is not None: # if ((self.id_filter == 'taxids' and \ # (int(tax_num) not in self.tax_ids)) # or (self.id_filter == 'geneids' and \ # (int(gene_num) not in self.gene_ids))): # continue # #### end filter if self.test_mode and int(gene_num) not in self.gene_ids: continue if not self.test_mode and tax_num not in self.tax_ids: continue if gene_num == '-' or pubmed_num == '-': continue gene_id = ':'.join(('NCBIGene', gene_num)) pubmed_id = ':'.join(('PMID', pubmed_num)) if self.class_or_indiv.get(gene_id) == 'C': model.addClassToGraph(gene_id, None) else: model.addIndividualToGraph(gene_id, None) # add the publication as a NamedIndividual # add type publication model.addIndividualToGraph(pubmed_id, None, None) reference = Reference( graph, pubmed_id, self.globaltt['journal article']) reference.addRefToGraph() graph.addTriple( pubmed_id, self.globaltt['is_about'], gene_id) assoc_counter += 1 if not self.test_mode and limit is not None and line_counter > limit: break LOG.info("Processed %d pub-gene associations", assoc_counter) def add_orthologs_by_gene_group(self, graph, gene_ids): """ This will get orthologies between human and other vertebrate genomes based on the gene_group annotation pipeline from NCBI. More information 9can be learned here: http://www.ncbi.nlm.nih.gov/news/03-13-2014-gene-provides-orthologs-regions/ The method for associations is described in [PMCID:3882889](http://www.ncbi.nlm.nih.gov/pmc/articles/PMC3882889/) == [PMID:24063302](http://www.ncbi.nlm.nih.gov/pubmed/24063302/). Because these are only between human and vertebrate genomes, they will certainly miss out on very distant orthologies, and should not be considered complete. We do not run this within the NCBI parser itself; rather it is a convenience function for others parsers to call. :param graph: :param gene_ids: Gene ids to fetch the orthology :return: """ src_key = 'gene_group' LOG.info("getting gene groups") src_file = '/'.join((self.rawdir, self.files[src_key]['file'])) found_counter = 0 # because many of the orthologous groups are grouped by human gene, # we need to do this by generating two-way hash # group_id => orthologs # ortholog id => group # this will be the fastest approach, though not memory-efficient. geno = Genotype(graph) model = Model(graph) group_to_orthology = {} gene_to_group = {} gene_to_taxon = {} col = self.files[src_key]['columns'] with gzip.open(src_file, 'rb') as tsv: row = tsv.readline().decode().strip().split('\t') row[0] = row[0][1:] # strip octothorp if not self.check_fileheader(col, row): pass for row in tsv: row = row.decode().strip().split('\t') tax_a = row[col.index('tax_id')] gene_a = row[col.index('GeneID')] rel = row[col.index('relationship')] tax_b = row[col.index('Other_tax_id')] gene_b = row[col.index('Other_GeneID')] if rel != 'Ortholog': continue if gene_a not in group_to_orthology: group_to_orthology[gene_a] = set() group_to_orthology[gene_a].add(gene_b) if gene_b not in gene_to_group: gene_to_group[gene_b] = set() gene_to_group[gene_b].add(gene_a) gene_to_taxon[gene_a] = tax_a gene_to_taxon[gene_b] = tax_b # also add the group lead as a member of the group group_to_orthology[gene_a].add(gene_a) # end loop through gene_group file LOG.debug("Finished hashing gene groups") LOG.debug("Making orthology associations") for gid in gene_ids: gene_num = re.sub(r'NCBIGene:', '', gid) group_nums = gene_to_group.get(gene_num) if group_nums is not None: for group_num in group_nums: orthologs = group_to_orthology.get(group_num) if orthologs is not None: for orth in orthologs: oid = 'NCBIGene:' + str(orth) model.addClassToGraph(oid, None, self.globaltt['gene']) otaxid = 'NCBITaxon:' + str(gene_to_taxon[orth]) geno.addTaxon(otaxid, oid) assoc = OrthologyAssoc(graph, self.name, gid, oid) assoc.add_source('PMID:24063302') assoc.add_association_to_graph() # todo get gene label for orthologs - # this could get expensive found_counter += 1 # finish loop through annotated genes LOG.info( "Made %d orthology relationships for %d genes", found_counter, len(gene_ids))
[ 11748, 302, 198, 11748, 308, 13344, 198, 11748, 18931, 198, 198, 6738, 2566, 2848, 13, 82, 2203, 13, 2662, 3955, 7416, 1330, 32468, 3955, 7416, 198, 6738, 2566, 2848, 13, 27530, 13, 17633, 1330, 9104, 198, 6738, 2566, 2848, 13, 27530, ...
1.828639
15,622
#!/user/bin/env python import numpy as np world = World()
[ 2, 48443, 7220, 14, 8800, 14, 24330, 21015, 198, 11748, 299, 32152, 355, 45941, 628, 628, 198, 6894, 796, 2159, 3419, 198 ]
2.818182
22
#!/usr/bin/env python """ _XMLParser_ Read the raw XML output from the cmsRun executable. """ from __future__ import division, print_function import logging import re from WMCore.Algorithms.ParseXMLFile import coroutine, xmlFileToNode from WMCore.DataStructs.Run import Run from WMCore.FwkJobReport import Report def reportBuilder(nodeStruct, report, target): """ _reportBuilder_ Driver for coroutine pipe for building reports from the Node structure. """ for node in nodeStruct.children: target.send((report, node)) @coroutine def reportDispatcher(targets): """ _reportDispatcher_ Top level routine for dispatching the parts of the job report to the handlers. """ while True: report, node = (yield) if node.name != "FrameworkJobReport": print("Not Handling: ", node.name) # TODO: throw continue for subnode in node.children: if subnode.name == "File": targets['File'].send((report, subnode)) elif subnode.name == "InputFile": targets['InputFile'].send((report, subnode)) elif subnode.name == "AnalysisFile": targets['AnalysisFile'].send((report, subnode)) elif subnode.name == "PerformanceReport": targets['PerformanceReport'].send((report, subnode)) elif subnode.name == "FrameworkError": targets['FrameworkError'].send((report, subnode)) elif subnode.name == "SkippedFile": targets['SkippedFile'].send((report, subnode)) elif subnode.name == "FallbackAttempt": targets['FallbackAttempt'].send((report, subnode)) elif subnode.name == "SkippedEvent": targets['SkippedEvent'].send((report, subnode)) else: setattr(report.report.parameters, subnode.name, subnode.text) @coroutine def fileHandler(targets): """ _fileHandler_ coroutine to create files and handle sub data in the appropriate dispatchers """ while True: report, node = (yield) moduleName = None moduleNode = [x for x in node.children if x.name == "ModuleLabel"][0] moduleName = moduleNode.text fileRef = report.addOutputFile(moduleName) fileAttrs = {} for subnode in node.children: if subnode.name == "Inputs": targets['Inputs'].send((fileRef, subnode)) elif subnode.name == "Runs": targets['Runs'].send((fileRef, subnode)) elif subnode.name == "Branches": targets['Branches'].send((fileRef, subnode)) else: fileAttrs[subnode.name] = subnode.text Report.addAttributesToFile(fileRef, lfn=fileAttrs["LFN"], pfn=fileAttrs["PFN"], catalog=fileAttrs["Catalog"], module_label=fileAttrs["ModuleLabel"], guid=fileAttrs["GUID"], output_module_class=fileAttrs["OutputModuleClass"], events=int(fileAttrs["TotalEvents"]), branch_hash=fileAttrs["BranchHash"]) @coroutine def inputFileHandler(targets): """ _inputFileHandler_ coroutine to create input files in the report and dispatch sub data down the pipeline """ while True: report, node = (yield) moduleName = None moduleNode = [x for x in node.children if x.name == "ModuleLabel"][0] moduleName = moduleNode.text fileRef = report.addInputFile(moduleName) fileAttrs = {} for subnode in node.children: if subnode.name == "Runs": targets['Runs'].send((fileRef, subnode)) elif subnode.name == "Branches": targets['Branches'].send((fileRef, subnode)) else: fileAttrs[subnode.name] = subnode.text Report.addAttributesToFile(fileRef, lfn=fileAttrs["LFN"], pfn=fileAttrs["PFN"], catalog=fileAttrs["Catalog"], module_label=fileAttrs["ModuleLabel"], guid=fileAttrs["GUID"], input_type=fileAttrs["InputType"], input_source_class=fileAttrs["InputSourceClass"], events=int(fileAttrs["EventsRead"])) @coroutine def analysisFileHandler(targets): """ _analysisFileHandler_ handle analysis file entries in the report """ while True: report, node = (yield) filename = None attrs = {} for subnode in node.children: if subnode.name == "FileName": filename = subnode.text else: attrs[subnode.name] = subnode.attrs.get('Value', None) report.addAnalysisFile(filename, **attrs) @coroutine def errorHandler(): """ _errorHandler_ Handle FrameworkError reports. """ while True: report, node = (yield) excepcode = node.attrs.get("ExitStatus", 8001) exceptype = node.attrs.get("Type", "CMSException") # There should be atmost one step in the report at this point in time. if len(report.listSteps()) == 0: report.addError("unknownStep", excepcode, exceptype, node.text) else: report.addError(report.listSteps()[0], excepcode, exceptype, node.text) @coroutine @coroutine @coroutine @coroutine def runHandler(): """ _runHandler_ Sink to add run information to a file. Given the following XML: <Runs> <Run ID="122023"> <LumiSection NEvents="100" ID="215"/> <LumiSection NEvents="100" ID="216"/> </Run> <Run ID="122024"> <LumiSection ID="1"/> <LumiSection ID="2"/> </Run> </Runs> Create a WMCore.DataStructs.Run object for each run and call the addRunInfoToFile() function to add the run information to the file section. """ while True: fileSection, node = (yield) for subnode in node.children: if subnode.name == "Run": runId = subnode.attrs.get("ID", None) if runId is None: continue lumis = [] for lumi in subnode.children: if "ID" in lumi.attrs: lumiNumber = int(lumi.attrs['ID']) nEvents = lumi.attrs.get("NEvents", None) if nEvents is not None: try: nEvents = int(nEvents) except ValueError: nEvents = None lumis.append((lumiNumber, nEvents)) runInfo = Run(runNumber=runId) runInfo.extendLumis(lumis) Report.addRunInfoToFile(fileSection, runInfo) @coroutine def branchHandler(): """ _branchHandler_ Sink to pack branch information into a file. Given the following XML: <Branches> <Branch>Branch Name 1</Branch> <Branch>Branch Name 2</Branch> </Branches> Create a list containing all the branch names as use the addBranchNamesToFile method to add them to the fileSection. Nulled out, we dont need these anyways... """ while True: fileSection, node = (yield) pass # branches = [ subnode.text for subnode in node.children # if subnode.name == "Branch" ] # Report.addBranchNamesToFile(fileSection, branches) @coroutine def inputAssocHandler(): """ _inputAssocHandler_ Sink to handle output:input association information. Given the following XML: <Input> <LFN>/path/to/some/lfn.root</LFN> <PFN>/some/pfn/info/path/to/some/lfn.root</PFN> </Input> Extract the LFN and call the addInputToFile() function to associate input to output in the FWJR. """ while True: fileSection, node = (yield) for inputnode in node.children: data = {} for subnode in inputnode.children: data.__setitem__(subnode.name, subnode.text) Report.addInputToFile(fileSection, data["LFN"], data['PFN']) @coroutine def perfRepHandler(targets): """ _perfRepHandler_ handle performance report subsections """ while True: report, node = (yield) perfRep = report.report.performance perfRep.section_("summaries") perfRep.section_("cpu") perfRep.section_("memory") perfRep.section_("storage") for subnode in node.children: metric = subnode.attrs.get('Metric', None) if metric == "Timing": targets['CPU'].send((perfRep.cpu, subnode)) elif metric == "SystemMemory" or metric == "ApplicationMemory": targets['Memory'].send((perfRep.memory, subnode)) elif metric == "StorageStatistics": targets['Storage'].send((perfRep.storage, subnode)) else: targets['PerformanceSummary'].send((perfRep.summaries, subnode)) @coroutine def perfSummaryHandler(): """ _perfSummaryHandler_ Sink to handle performance summaries """ while True: report, node = (yield) summary = node.attrs.get('Metric', None) if summary is None: continue # Add performance section if it doesn't exist if not hasattr(report, summary): report.section_(summary) summRep = getattr(report, summary) for subnode in node.children: setattr(summRep, subnode.attrs['Name'], subnode.attrs['Value']) @coroutine def perfCPUHandler(): """ _perfCPUHandler_ sink that packs CPU reports into the job report """ while True: report, node = (yield) for subnode in node.children: setattr(report, subnode.attrs['Name'], subnode.attrs['Value']) @coroutine def perfMemHandler(): """ _perfMemHandler_ Pack memory performance reports into the report """ # Make a list of performance info we actually want goodStatistics = ['PeakValueRss', 'PeakValueVsize', 'LargestRssEvent-h-PSS'] while True: report, node = (yield) for prop in node.children: if prop.attrs['Name'] in goodStatistics: if prop.attrs['Name'] == 'LargestRssEvent-h-PSS': # need to remove - chars from name as it buggers up downtstream code setattr(report, 'PeakValuePss', prop.attrs['Value']) else: setattr(report, prop.attrs['Name'], prop.attrs['Value']) @coroutine def perfStoreHandler(): """ _perfStoreHandler_ Handle the information from the Storage report """ # Make a list of performance info we actually want goodStatistics = ['Timing-([a-z]{4})-read(v?)-totalMegabytes', 'Timing-([a-z]{4})-write(v?)-totalMegabytes', 'Timing-([a-z]{4})-read(v?)-totalMsecs', 'Timing-([a-z]{4})-read(v?)-numOperations', 'Timing-([a-z]{4})-write(v?)-numOperations', 'Timing-([a-z]{4})-read(v?)-maxMsecs', 'Timing-tstoragefile-readActual-numOperations', 'Timing-tstoragefile-read-numOperations', 'Timing-tstoragefile-readViaCache-numSuccessfulOperations', 'Timing-tstoragefile-read-numOperations', 'Timing-tstoragefile-read-totalMsecs', 'Timing-tstoragefile-write-totalMsecs', ] while True: report, node = (yield) logging.debug("Preparing to parse storage statistics") storageValues = {} for prop in node.children: name = prop.attrs['Name'] for statName in goodStatistics: if checkRegEx(statName, name): storageValues[name] = float(prop.attrs['Value']) # setattr(report, name, prop.attrs['Value']) writeMethod = None readMethod = None # Figure out read method for key in storageValues.keys(): if checkRegEx('Timing-([a-z]{4})-read(v?)-numOperations', key): if storageValues[key] != 0.0: # This is the reader readMethod = key.split('-')[1] break # Figure out the write method for key in storageValues.keys(): if checkRegEx('Timing-([a-z]{4})-write(v?)-numOperations', key): if storageValues[key] != 0.0: # This is the reader writeMethod = key.split('-')[1] break # Then assemble the information # Calculate the values logging.debug("ReadMethod: %s", readMethod) logging.debug("WriteMethod: %s", writeMethod) try: readTotalMB = storageValues.get("Timing-%s-read-totalMegabytes" % readMethod, 0) \ + storageValues.get("Timing-%s-readv-totalMegabytes" % readMethod, 0) readMSecs = storageValues.get("Timing-%s-read-totalMsecs" % readMethod, 0) \ + storageValues.get("Timing-%s-readv-totalMsecs" % readMethod, 0) totalReads = storageValues.get("Timing-%s-read-numOperations" % readMethod, 0) \ + storageValues.get("Timing-%s-readv-numOperations" % readMethod, 0) readMaxMSec = max(storageValues.get("Timing-%s-read-maxMsecs" % readMethod, 0), storageValues.get("Timing-%s-readv-maxMsecs" % readMethod, 0)) readPercOps = storageValues.get("Timing-tstoragefile-readActual-numOperations", 0) / \ storageValues.get("Timing-tstoragefile-read-numOperations", 0) readCachOps = storageValues.get("Timing-tstoragefile-readViaCache-numSuccessfulOperations", 0) / \ storageValues.get("Timing-tstoragefile-read-numOperations", 0) readTotalT = storageValues.get("Timing-tstoragefile-read-totalMSecs", 0) / 1000 readNOps = storageValues.get("Timing-tstoragefile-read-numOperations", 0) writeTime = storageValues.get("Timing-tstoragefile-write-totalMsecs", 0) / 1000 writeTotMB = storageValues.get("Timing-%s-write-totalMegabytes" % writeMethod, 0) \ + storageValues.get("Timing-%s-writev-totalMegabytes" % writeMethod, 0) if readMSecs > 0: readMBSec = readTotalMB / readMSecs * 1000 else: readMBSec = 0 if totalReads > 0: readAveragekB = 1024 * readTotalMB / totalReads else: readAveragekB = 0 # Attach them to the report setattr(report, 'readTotalMB', readTotalMB) setattr(report, 'readMBSec', readMBSec) setattr(report, 'readAveragekB', readAveragekB) setattr(report, 'readMaxMSec', readMaxMSec) setattr(report, 'readPercentageOps', readPercOps) setattr(report, 'readTotalSecs', readTotalT) setattr(report, 'readNumOps', readNOps) setattr(report, 'writeTotalSecs', writeTime) setattr(report, 'writeTotalMB', writeTotMB) setattr(report, 'readCachePercentageOps', readCachOps) except ZeroDivisionError: logging.error("Tried to divide by zero doing storage statistics report parsing.") logging.error("Either you aren't reading and writing data, or you aren't reporting it.") logging.error("Not adding any storage performance info to report.") def xmlToJobReport(reportInstance, xmlFile): """ _xmlToJobReport_ parse the XML file and insert the information into the Report instance provided """ # read XML, build node structure node = xmlFileToNode(xmlFile) # // # // Set up coroutine pipeline # // fileDispatchers = { "Runs": runHandler(), "Branches": branchHandler(), "Inputs": inputAssocHandler(), } perfRepDispatchers = { "PerformanceSummary": perfSummaryHandler(), "CPU": perfCPUHandler(), "Memory": perfMemHandler(), "Storage": perfStoreHandler(), } dispatchers = { "File": fileHandler(fileDispatchers), "InputFile": inputFileHandler(fileDispatchers), "PerformanceReport": perfRepHandler(perfRepDispatchers), "AnalysisFile": analysisFileHandler(fileDispatchers), "FrameworkError": errorHandler(), "SkippedFile": skippedFileHandler(), "FallbackAttempt": fallbackAttemptHandler(), "SkippedEvent": skippedEventHandler(), } # // # // Feed pipeline with node structure and report result instance # // reportBuilder( node, reportInstance, reportDispatcher(dispatchers) ) return childrenMatching = lambda node, nname: [x for x in node.children if x.name == nname]
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 37811, 198, 62, 55, 5805, 46677, 62, 198, 198, 5569, 262, 8246, 23735, 5072, 422, 262, 269, 907, 10987, 28883, 13, 198, 37811, 198, 6738, 11593, 37443, 834, 1330, 7297, 11, 3601, 62, ...
2.134033
8,192
"""iRODS backend API for SODAR Django apps""" import logging import math import pytz import random import re import string from irods.api_number import api_number from irods.collection import iRODSCollection from irods.column import Criterion from irods.exception import CollectionDoesNotExist, CAT_NO_ROWS_FOUND from irods.message import TicketAdminRequest, iRODSMessage from irods.models import Collection, DataObject from irods.query import SpecificQuery from irods.session import iRODSSession from irods.ticket import Ticket from django.conf import settings from django.urls import reverse from django.utils import timezone from django.utils.http import urlencode from django.utils.text import slugify logger = logging.getLogger(__name__) # Local constants ACCEPTED_PATH_TYPES = [ 'Assay', 'LandingZone', 'Project', 'Investigation', 'Study', ] NAME_LIKE_OVERHEAD = 23 # Magic number for query overhead for name filtering NAME_LIKE_MAX_LEN = 2200 # Magic number for maximum length of name filters ENV_INT_PARAMS = [ 'irods_encryption_key_size', 'irods_encryption_num_hash_rounds', 'irods_encryption_salt_size', ] class IrodsAPI: """iRODS API to be used by Django apps""" #: iRODS session or None if not initialized irods = None class IrodsQueryException(Exception): """iRODS query exception""" # Internal functions ------------------------------------------------------- @classmethod def _get_datetime(cls, naive_dt): """ Return a printable datetime in the system timezone from a naive datetime object. """ dt = naive_dt.replace(tzinfo=pytz.timezone('GMT')) dt = dt.astimezone(timezone.get_default_timezone()) return dt.strftime('%Y-%m-%d %H:%M') @classmethod def _get_query_alias(cls): """Return a random iCAT SQL query alias""" return 'sodar_query_{}'.format( ''.join( random.SystemRandom().choice( string.ascii_lowercase + string.ascii_uppercase ) for _ in range(16) ) ) @classmethod def _sanitize_coll_path(cls, path): """ Return sanitized version of iRODS collection path. :param path: iRODS collection path (string) :return: String """ if path: if path[0] != '/': path = '/' + path if path[-1] == '/': path = path[:-1] return path def _send_request(self, api_id, *args): """ Temporary function for sending a raw API request using python-irodsclient. :param *args: Arguments for the request body :return: Response :raise: Exception if iRODS is not initialized """ if not self.irods: raise Exception('iRODS session not initialized') msg_body = TicketAdminRequest(*args) msg = iRODSMessage( 'RODS_API_REQ', msg=msg_body, int_info=api_number[api_id] ) with self.irods.pool.get_connection() as conn: conn.send(msg) response = conn.recv() return response # Helpers ------------------------------------------------------------------ @classmethod def get_sub_path(cls, obj, landing_zone=False, include_parent=True): """ Get the collection path for a study or assay under the sample data collection. :param obj: Study or Assay object :param landing_zone: Return dir for landing zone if True (bool) :param include_parent: Include parent dir if True (bool) :return: String :raise: TypeError if obj type is not correct :raise: NotImplementedError if get_display_name() is not found in obj """ ret = '' obj_class = obj.__class__.__name__ if obj_class not in ['Assay', 'Study']: raise TypeError('Object of type "{}" not supported') if landing_zone and not hasattr(obj, 'get_display_name'): raise NotImplementedError( 'Function get_display_name() not implemented' ) # If assay, add study first if obj_class == 'Assay' and include_parent: ret += _get_path(obj.study) + '/' ret += _get_path(obj) return ret @classmethod def get_path(cls, obj): """ Return the iRODS path for for a SODAR database object. :param obj: Django model object :return: String :raise: TypeError if obj is not of supported type :raise: ValueError if project is not found """ obj_class = obj.__class__.__name__ if obj_class not in ACCEPTED_PATH_TYPES: raise TypeError( 'Object of type "{}" not supported! Accepted models: {}'.format( obj_class, ', '.join(ACCEPTED_PATH_TYPES) ) ) if obj_class == 'Project': project = obj else: project = obj.get_project() if not project: raise ValueError('Project not found for given object') # Base path (project) rp = settings.IRODS_ROOT_PATH path = '/{zone}/projects/{root_prefix}{uuid_prefix}/{uuid}'.format( root_prefix=rp + '/' if rp else '', zone=settings.IRODS_ZONE, uuid_prefix=str(project.sodar_uuid)[:2], uuid=project.sodar_uuid, ) # Project if obj_class == 'Project': return path # Investigation (sample data root) elif obj_class == 'Investigation': path += '/{sample_dir}'.format( sample_dir=settings.IRODS_SAMPLE_COLL ) # Study (in sample data) elif obj_class == 'Study': path += '/{sample_dir}/{study}'.format( sample_dir=settings.IRODS_SAMPLE_COLL, study=cls.get_sub_path(obj), ) # Assay (in sample data) elif obj_class == 'Assay': path += '/{sample_dir}/{study_assay}'.format( sample_dir=settings.IRODS_SAMPLE_COLL, study_assay=cls.get_sub_path(obj), ) # LandingZone elif obj_class == 'LandingZone': path += ( '/{zone_coll}/{user}/{study_assay}/{zone_title}' '{zone_config}'.format( zone_coll=settings.IRODS_LANDING_ZONE_COLL, user=obj.user.username, study_assay=cls.get_sub_path(obj.assay, landing_zone=True), zone_title=obj.title, zone_config='_' + obj.configuration if obj.configuration else '', ) ) return path @classmethod def get_sample_path(cls, project): """ Return the iRODS path for project sample data. :param project: Project object :return: String :raise: ValueError if "project" is not a valid Project object """ if project.__class__.__name__ != 'Project': raise ValueError('Argument "project" is not a Project object') return cls.get_path(project) + '/' + settings.IRODS_SAMPLE_COLL @classmethod def get_root_path(cls): """Return the root path for SODAR data""" root_prefix = ( '/' + settings.IRODS_ROOT_PATH if settings.IRODS_ROOT_PATH else '' ) return '/{}{}'.format(settings.IRODS_ZONE, root_prefix) @classmethod def get_projects_path(cls): """Return the SODAR projects collection path""" return cls.get_root_path() + '/projects' @classmethod def get_uuid_from_path(cls, path, obj_type): """ Return project, study or assay UUID from iRODS path or None if not found. :param path: Full iRODS path (string) :param obj_type: Type of object ("project", "study" or "assay") :return: String or None :raise: ValueError if obj_type is not accepted """ root_prefix = ( settings.IRODS_ROOT_PATH + '/' if settings.IRODS_ROOT_PATH else '' ) path_regex = { 'project': '/{}/'.format(settings.IRODS_ZONE) + root_prefix + 'projects/[a-zA-Z0-9]{2}/(.+?)(?:/|$)', 'study': '/study_(.+?)(?:/|$)', 'assay': '/assay_(.+?)(?:/|$)', } obj_type = obj_type.lower() if obj_type not in path_regex.keys(): raise ValueError( 'Invalid argument for obj_type "{}"'.format(obj_type) ) s = re.search(path_regex[obj_type], cls._sanitize_coll_path(path)) if s: return s.group(1) # TODO: Add tests @classmethod def get_url( cls, view, project=None, path='', md5=False, colls=False, method='GET', absolute=False, request=None, ): """ Get the list or stats URL for an iRODS path. :param view: View of the URL ("stats" or "list") :param path: Full iRODS path (string) :param project: Project object or None :param md5: Include MD5 or not for a list view (boolean, default=False) :param colls: Include collections in list (boolean, default=False) :param method: Method for the function (string) :param absolute: Whether or not an absolute URI is required (boolean) :param request: Request object (required for building an absolute URI) :return: String :raise: ValueError if the view or method param is invalid """ if view not in ['list', 'stats']: raise ValueError('Invalid type "{}" for view'.format(view)) if method not in ['GET', 'POST']: raise ValueError('Invalid method "{}"'.format(method)) url_kwargs = {'project': str(project.sodar_uuid)} if project else None rev_url = reverse('irodsbackend:{}'.format(view), kwargs=url_kwargs) if method == 'GET': query_string = {'path': cls._sanitize_coll_path(path)} if view == 'list': query_string['md5'] = int(md5) query_string['colls'] = int(colls) rev_url += '?' + urlencode(query_string) if absolute and request: return request.build_absolute_uri(rev_url) return rev_url # iRODS Operations --------------------------------------------------------- def get_session(self): """ Return the iRODS session object for direct API access. :return: iRODSSession object (already initialized) """ return self.irods def get_info(self): """ Return iRODS server info. :return: Dict :raise: NetworkException if iRODS is unreachable :raise: CAT_INVALID_AUTHENTICATION if iRODS authentication is invalid :raise: irods_backend.IrodsQueryException for iRODS query errors """ return { 'server_ok': True, 'server_status': 'Available', 'server_host': self.irods.host, 'server_port': self.irods.port, 'server_zone': self.irods.zone, 'server_version': '.'.join( str(x) for x in self.irods.pool.get_connection().server_version ), } def get_object_stats(self, path): """ Return file count and total file size for all files within a path. :param path: Full path to iRODS collection :return: Dict """ try: coll = self.irods.collections.get(self._sanitize_coll_path(path)) except CollectionDoesNotExist: raise FileNotFoundError('iRODS collection not found') ret = {'file_count': 0, 'total_size': 0} sql = ( 'SELECT COUNT(data_id) as file_count, ' 'SUM(data_size) as total_size ' 'FROM (SELECT data_id, data_size FROM r_data_main ' 'JOIN r_coll_main USING (coll_id) ' 'WHERE (coll_name = \'{coll_path}\' ' 'OR coll_name LIKE \'{coll_path}/%\') ' 'AND data_name NOT LIKE \'%.md5\' ' 'GROUP BY data_id, data_size) AS sub_query'.format( coll_path=coll.path ) ) # logger.debug('Object stats query = "{}"'.format(sql)) query = self.get_query(sql) try: result = next(query.get_results()) ret['file_count'] = int(result[0]) if result[0] else 0 ret['total_size'] = int(result[1]) if result[1] else 0 except CAT_NO_ROWS_FOUND: pass except Exception as ex: logger.error( 'iRODS exception in get_object_stats(): {}; SQL = "{}"'.format( ex.__class__.__name__, sql ) ) finally: query.remove() return ret def collection_exists(self, path): """ Return True/False depending if the collection defined in path exists :param path: Full path to iRODS collection :return: Boolean """ return self.irods.collections.exists(self._sanitize_coll_path(path)) @classmethod def get_colls_recursively(cls, coll): """ Return all subcollections for a coll efficiently (without multiple queries). :param coll: Collection object :return: List """ query = coll.manager.sess.query(Collection).filter( Criterion('like', Collection.parent_name, coll.path + '%') ) return [iRODSCollection(coll.manager, row) for row in query] def get_objs_recursively(self, coll, md5=False, name_like=None, limit=None): """ Return objects below a coll recursively. Replacement for the non-scalable walk() function in the API. Also gets around the query length limitation in iRODS. :param coll: Collection object :param md5: if True, return .md5 files, otherwise anything but them :param name_like: Filtering of file names (string or list of strings) :param limit: Limit search to n rows (int) :return: List """ ret = [] md5_filter = 'LIKE' if md5 else 'NOT LIKE' path_lookup = [] q_count = 1 # HACK: Long queries cause a crash with iRODS so we have to split them if name_like and isinstance(name_like, list) and len(name_like) > 1: f_len = sum([len(x) + NAME_LIKE_OVERHEAD for x in name_like]) q_count = math.ceil(f_len / NAME_LIKE_MAX_LEN) q_len = math.ceil(len(name_like) / q_count) q_idx = 0 for i in range(q_count): _do_query(name_like[q_idx : q_idx + q_len]) q_idx = q_idx + q_len else: # Single query _do_query(name_like) return sorted(ret, key=lambda x: x['path']) def get_objects( self, path, check_md5=False, include_colls=False, name_like=None, limit=None, ): """ Return an iRODS object list. :param path: Full path to iRODS collection :param check_md5: Whether to add md5 checksum file info (bool) :param include_colls: Include collections (bool) :param name_like: Filtering of file names (string or list of strings) :param limit: Limit search to n rows (int) :return: Dict :raise: FileNotFoundError if collection is not found """ try: coll = self.irods.collections.get(self._sanitize_coll_path(path)) except CollectionDoesNotExist: raise FileNotFoundError('iRODS collection not found') if name_like: if not isinstance(name_like, list): name_like = [name_like] name_like = [n.replace('_', '\_') for n in name_like] # noqa ret = {'irods_data': []} md5_paths = None data_objs = self.get_objs_recursively( coll, name_like=name_like, limit=limit ) if data_objs and check_md5: md5_paths = [ o['path'] for o in self.get_objs_recursively( coll, md5=True, name_like=name_like ) ] for o in data_objs: if check_md5: if o['path'] + '.md5' in md5_paths: o['md5_file'] = True else: o['md5_file'] = False ret['irods_data'].append(o) # Add collections if enabled # TODO: Combine into a single query? if include_colls: colls = self.get_colls_recursively(coll) for c in colls: ret['irods_data'].append( { 'name': c.name, 'type': 'coll', 'path': c.path, } ) ret['irods_data'] = sorted( ret['irods_data'], key=lambda x: x['path'] ) return ret def get_query(self, sql, columns=None, register=True): """ Return a SpecificQuery object with a standard query alias. If registered, should be removed with remove() after use. :param sql: SQL (string) :param columns: List of columns to return (optional) :param register: Register query before returning (bool, default=True) :return: SpecificQuery """ query = SpecificQuery(self.irods, sql, self._get_query_alias(), columns) if register: query.register() return query # TODO: Fork python-irodsclient and implement ticket functionality there def issue_ticket(self, mode, path, ticket_str=None, expiry_date=None): """ Issue ticket for a specific iRODS collection. :param mode: "read" or "write" :param path: iRODS path for creating the ticket :param ticket_str: String to use as the ticket :param expiry_date: Expiry date (DateTime object, optional) :return: irods client Ticket object """ ticket = Ticket(self.irods, ticket=ticket_str) ticket.issue(mode, self._sanitize_coll_path(path)) # Remove default file writing limitation self._send_request( 'TICKET_ADMIN_AN', 'mod', ticket._ticket, 'write-file', '0' ) # Set expiration if expiry_date: exp_str = expiry_date.strftime('%Y-%m-%d.%H:%M:%S') self._send_request( 'TICKET_ADMIN_AN', 'mod', ticket._ticket, 'expire', exp_str ) return ticket def delete_ticket(self, ticket_str): """ Delete ticket. :param ticket_str: String """ try: self._send_request('TICKET_ADMIN_AN', 'delete', ticket_str) except Exception: raise Exception('Failed to delete iRODS ticket %s' % ticket_str)
[ 37811, 72, 49, 3727, 50, 30203, 7824, 329, 311, 3727, 1503, 37770, 6725, 37811, 198, 198, 11748, 18931, 198, 11748, 10688, 198, 11748, 12972, 22877, 198, 11748, 4738, 198, 11748, 302, 198, 11748, 4731, 198, 198, 6738, 1312, 305, 9310, 1...
2.120844
9,053
""" Happy numbers solution, code eval. https://www.codeeval.com/open_challenges/39/ A happy number is defined by the following process. Starting with any positive integer, replace the number by the sum of the squares of its digits, and repeat the process until the number equals 1 (where it will stay), or it loops endlessly in a cycle which does not include 1. Those numbers for which this process ends in 1 are happy numbers, while those that do not end in 1 are unhappy numbers. INPUT SAMPLE: The first argument is the pathname to a file which contains test data, one test case per line. Each line contains a positive integer. E.g. 1 7 22 OUTPUT SAMPLE: If the number is a happy number, print out 1. If not, print out 0. E.g 1 1 0 For the curious, here's why 7 is a happy number: 7->49->97->130->10->1. Here's why 22 is NOT a happy number: 22->8->64->52->29->85->89->145->42->20->4->16->37->58->89 ... """ import sys if __name__ == '__main__': input_file = sys.argv[1] main(input_file)
[ 37811, 198, 25082, 3146, 4610, 11, 2438, 5418, 13, 198, 5450, 1378, 2503, 13, 8189, 18206, 13, 785, 14, 9654, 62, 36747, 34120, 14, 2670, 14, 198, 32, 3772, 1271, 318, 5447, 416, 262, 1708, 1429, 13, 17962, 351, 597, 3967, 18253, 11...
3.298361
305
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available. Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. BK-BASE 蓝鲸基础平台 is licensed under the MIT License. License for BK-BASE 蓝鲸基础平台: -------------------------------------------------------------------- Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from dataflow.flow.tasklog.yarn_api_client.hadoop_conf import get_nodemanager_endpoint, get_nodemanager_webapp_endpoint from dataflow.shared.log import flow_logger as logger from . import yarn_api_client
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 198, 24893, 1087, 318, 10607, 284, 1104, 262, 1280, 2723, 2055, 416, 1642, 347, 42, 12, 33, 11159, 5525, 241, 251, 165, 110, 116, 161, 253, 118, 163, 94, 222, ...
3.434783
460
""" 42. Trapping Rain Water Example: Input: [0,1,0,2,1,0,1,3,2,1,2,1] Output: 6 """ #Descending Stack # Greedy # @param {integer[]} height # @return {integer}
[ 37811, 201, 198, 3682, 13, 4759, 2105, 10301, 5638, 201, 198, 16281, 25, 201, 198, 201, 198, 20560, 25, 685, 15, 11, 16, 11, 15, 11, 17, 11, 16, 11, 15, 11, 16, 11, 18, 11, 17, 11, 16, 11, 17, 11, 16, 60, 201, 198, 26410, ...
1.865385
104
#!/usr/bin/env python from load import ROOT as R from gna.env import env from gna.parameters.printer import print_parameters from gna.converters import convert ns = env.globalns names = [ 'one', 'two', 'three', 'four', 'five' ] for i, name in enumerate( names ): ns.defparameter( name, central=(100-i*20), relsigma=0.1 ) vnames = convert( names, 'stdvector' ) vp = R.VarDiff( vnames, 'diff', ns=ns ) v=ns['diff'].get() v.setLabel('a-b-c-...') diff = ns['diff'] print_parameters(ns, labels=True) print('Change one input at a time:') for i, name in enumerate( names, 2 ): ns[name].set(i) print_parameters(ns) print() print( 'Subtracto from 100' ) vp = R.VarDiff( vnames, 'diff100', 100., ns=ns ) v=ns['diff100'].get() v.setLabel('100-a-b-c-...') print_parameters(ns, labels=True)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 198, 6738, 3440, 1330, 15107, 2394, 355, 371, 198, 6738, 308, 2616, 13, 24330, 1330, 17365, 198, 6738, 308, 2616, 13, 17143, 7307, 13, 1050, 3849, 1330, 3601, 62, 17143, 7307, 198, 673...
2.487578
322
from app.pipelines.base import Pipeline, PipelineException # isort:skip from app.pipelines.image_classification import ImageClassificationPipeline
[ 6738, 598, 13, 79, 541, 20655, 13, 8692, 1330, 37709, 11, 37709, 16922, 220, 1303, 318, 419, 25, 48267, 198, 198, 6738, 598, 13, 79, 541, 20655, 13, 9060, 62, 4871, 2649, 1330, 7412, 9487, 2649, 47, 541, 4470, 198 ]
3.725
40
import json import uuid from dataclasses import dataclass, field from typing import Any, Dict, List, Optional import jinja2 HTML_TEMPLATE = jinja2.Template( """ <!DOCTYPE html> <html> <head> <link rel="stylesheet" href="{{ base_url }}/higlass@{{ higlass_version }}/dist/hglib.css"> </head> <body> <div id="{{ output_div }}"></div> <script type="module"> async function loadScript(src) { return new Promise(resolve => { const script = document.createElement('script'); script.onload = resolve; script.src = src; script.async = false; document.head.appendChild(script); }); } async function loadHiglass() { // need to manually load higlass; disable requirejs // https://github.com/DanielHreben/requirejs-toggle window.__requirejsToggleBackup = { define: window.define, require: window.require, requirejs: window.requirejs, }; for (const field of Object.keys(window.__requirejsToggleBackup)) { window[field] = undefined; } let sources = [{% for plugin_url in plugin_urls %}"{{ plugin_url }}",{% endfor %}]; if (!window.hglib){ sources = sources.concat([ "{{ base_url }}/react@{{ react_version }}/umd/react.production.min.js", "{{ base_url }}/react-dom@{{ react_version }}/umd/react-dom.production.min.js", "{{ base_url }}/pixi.js@{{ pixijs_version }}/dist/browser/pixi.min.js", "{{ base_url }}/higlass@{{ higlass_version }}/dist/hglib.js", ]); } for (const src of sources) await loadScript(src); // restore requirejs after scripts have loaded Object.assign(window, window.__requirejsToggleBackup); delete window.__requirejsToggleBackup; return window.hglib; }; var el = document.getElementById('{{ output_div }}'); var spec = JSON.parse({{ spec }}); loadHiglass().then(hglib => { hglib.viewer(el, spec); }) </script> </body> </html> """ ) @dataclass html_renderer = HTMLRenderer() renderers = RendererRegistry() renderers.register("default", html_renderer) renderers.register("html", html_renderer) renderers.register("colab", html_renderer) renderers.register("kaggle", html_renderer) renderers.register("zeppelin", html_renderer) renderers.enable("default")
[ 11748, 33918, 198, 11748, 334, 27112, 198, 6738, 4818, 330, 28958, 1330, 4818, 330, 31172, 11, 2214, 198, 6738, 19720, 1330, 4377, 11, 360, 713, 11, 7343, 11, 32233, 198, 198, 11748, 474, 259, 6592, 17, 198, 198, 28656, 62, 51, 3620, ...
2.341603
1,048
# -*- coding: utf-8 -*- """Module to evaluate corrupting gains for BDA simulations.""" from __future__ import (print_function, absolute_import) import numpy import time import os def allan_deviation(data, dt, tau): """ Evaluate the Allan deviation of a time series. References: https://en.wikipedia.org/wiki/Allan_variance Args: data (array_like): Array of time series data. dt (float): Sample spacing of the time series data. tau (float): Interval at which to calculate the allan deviation. Returns: sm: Allan deviation sme: error on the allan deviation n: number of pairs in the Allan computation """ data = numpy.asarray(data) num_points = data.shape[0] m = int(tau / dt) # Number of samples in length tau data = data[:num_points - (num_points % m)] # Resize to a multiple of m # Reshape into blocks of length m and take the average of each block. data = data.reshape((-1, m)) data_mean = numpy.mean(data, axis=1) data_diff = numpy.diff(data_mean) n = data_diff.shape[0] a_dev = numpy.sqrt((0.5 / n) * (numpy.sum(data_diff**2))) a_dev_err = a_dev / numpy.sqrt(n) return a_dev, a_dev_err, n def fractional_brownian_motion(n, hurst): """Generate Fractional brownian motion noise. http://www.maths.uq.edu.au/~kroese/ps/MCSpatial.pdf Args: n (int): Length of the time series. hurst (float): Hurst parameter. Returns: Time series array. """ a = 2.0 * hurst r = numpy.empty(n + 1) * numpy.NaN r[0] = 1.0 k = numpy.arange(1, n + 1) r[1:] = 0.5 * ((k + 1)**a - 2.0 * k**a + (k - 1)**a) r = numpy.append(r, r[-2:0:-1]) # first row of circulant matrix l = numpy.real(numpy.fft.fft(r)) / (2 * n) w = numpy.fft.fft(numpy.sqrt(l) * (numpy.random.randn(2 * n) + 1.0j * numpy.random.randn(2 * n))) w = n**(-hurst) * numpy.cumsum(numpy.real(w[:n+1])) # Rescale return w[:n] if __name__ == '__main__': test_unblocked()
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 26796, 284, 13446, 10622, 278, 8810, 329, 347, 5631, 27785, 526, 15931, 198, 198, 6738, 11593, 37443, 834, 1330, 357, 4798, 62, 8818, 11, 4112, 62, 11748, 8, 198,...
2.224359
936