content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import rdoinfo
import sh
import shutil
import sys
if len(sys.argv) > 1:
UC_RELEASE = sys.argv[1]
else:
UC_RELEASE = 'wallaby-uc'
def update_puppet_uc():
if os.path.exists(os.path.join(".", "modules")):
shutil.rmtree("./modules")
info = rdoinfo.parse_info_file('rdo.yml')
puppet_info = []
for package in info['packages']:
if package['name'].startswith('puppet'):
puppet_info.append([package['name'], package['upstream']])
for package in puppet_info:
url = package[1]
if 'openstack' in url: # Do not bump OpenStack modules
continue
module = package[0]
gitpath = os.path.join("modules", module)
sh.git.clone(url, gitpath)
git = sh.git.bake(_cwd=gitpath, _tty_out=False)
try:
rev_list = str(git('rev-list', '--tags', '--max-count=1')).strip()
tag = str(git.describe('--tags', rev_list)).strip()
with open('upper-constraints.txt', 'a') as fp:
fp.write("%s===%s\n" % (module, tag))
except Exception:
continue
shutil.rmtree(gitpath)
update_uc = sh.Command('./update-uc.py')
update_uc(UC_RELEASE)
if __name__ == '__main__':
update_puppet_uc()
|
nilq/baby-python
|
python
|
import os
import csv
import json
import torch
import pickle
import random
import warnings
import numpy as np
from functools import reduce
from typing import Dict, List, Tuple, Set, Any
__all__ = [
'to_one_hot',
'seq_len_to_mask',
'ignore_waring',
'make_seed',
'load_pkl',
'save_pkl',
'ensure_dir',
'load_csv',
'load_jsonld',
'jsonld2csv',
'csv2jsonld',
]
Path = str
def to_one_hot(x, length):
batch_size = x.size(0)
x_one_hot = torch.zeros(batch_size, length).to(x.device)
for i in range(batch_size):
x_one_hot[i, x[i]] = 1.0
return x_one_hot
def model_summary(model):
"""
得到模型的总参数量
:params model: Pytorch 模型
:return tuple: 包含总参数量,可训练参数量,不可训练参数量
"""
train = []
nontrain = []
def layer_summary(module):
def count_size(sizes):
return reduce(lambda x, y: x * y, sizes)
for p in module.parameters(recurse=False):
if p.requires_grad:
train.append(count_size(p.shape))
else:
nontrain.append(count_size(p.shape))
for subm in module.children():
layer_summary(subm)
layer_summary(model)
total_train = sum(train)
total_nontrain = sum(nontrain)
total = total_train + total_nontrain
strings = []
strings.append('Total params: {:,}'.format(total))
strings.append('Trainable params: {:,}'.format(total_train))
strings.append('Non-trainable params: {:,}'.format(total_nontrain))
max_len = len(max(strings, key=len))
bar = '-' * (max_len + 3)
strings = [bar] + strings + [bar]
print('\n'.join(strings))
return total, total_train, total_nontrain
def seq_len_to_mask(seq_len, max_len=None):
"""
将一个表示sequence length的一维数组转换为二维的mask,不包含的位置为0。
转变 1-d seq_len到2-d mask.
.. code-block::
>>> seq_len = torch.arange(2, 16)
>>> mask = seq_len_to_mask(seq_len)
>>> print(mask.size())
torch.Size([14, 15])
>>> seq_len = np.arange(2, 16)
>>> mask = seq_len_to_mask(seq_len)
>>> print(mask.shape)
(14, 15)
>>> seq_len = torch.arange(2, 16)
>>> mask = seq_len_to_mask(seq_len, max_len=100)
>>>print(mask.size())
torch.Size([14, 100])
:param np.ndarray,torch.LongTensor seq_len: shape将是(B,)
:param int max_len: 将长度pad到这个长度。默认(None)使用的是seq_len中最长的长度。但在nn.DataParallel的场景下可能不同卡的seq_len会有
区别,所以需要传入一个max_len使得mask的长度是pad到该长度。
:return: np.ndarray, torch.Tensor 。shape将是(B, max_length), 元素类似为bool或torch.uint8
"""
if isinstance(seq_len, np.ndarray):
assert len(np.shape(seq_len)) == 1, f"seq_len can only have one dimension, got {len(np.shape(seq_len))}."
max_len = int(max_len) if max_len else int(seq_len.max())
broad_cast_seq_len = np.tile(np.arange(max_len), (len(seq_len), 1))
mask = broad_cast_seq_len < seq_len.reshape(-1, 1)
elif isinstance(seq_len, torch.Tensor):
assert seq_len.dim() == 1, f"seq_len can only have one dimension, got {seq_len.dim() == 1}."
batch_size = seq_len.size(0)
max_len = int(max_len) if max_len else seq_len.max().long()
broad_cast_seq_len = torch.arange(max_len).expand(batch_size, -1).to(seq_len)
mask = broad_cast_seq_len.lt(seq_len.unsqueeze(1))
else:
raise TypeError("Only support 1-d numpy.ndarray or 1-d torch.Tensor.")
return mask
def ignore_waring():
warnings.filterwarnings("ignore")
def make_seed(num: int = 1) -> None:
random.seed(num)
np.random.seed(num)
torch.manual_seed(num)
torch.cuda.manual_seed(num)
torch.cuda.manual_seed_all(num)
def load_pkl(fp: str, obj_name: str = 'data', verbose: bool = True) -> Any:
if verbose:
print(f'load {obj_name} in {fp}')
with open(fp, 'rb') as f:
data = pickle.load(f)
return data
def save_pkl(fp: Path, obj, obj_name: str = 'data', verbose: bool = True) -> None:
if verbose:
print(f'save {obj_name} in {fp}')
with open(fp, 'wb') as f:
pickle.dump(obj, f)
def ensure_dir(d: str, verbose: bool = True) -> None:
'''
判断目录是否存在,不存在时创建
:param d: directory
:param verbose: whether print logging
:return: None
'''
if not os.path.exists(d):
if verbose:
print("Directory '{}' do not exist; creating...".format(d))
os.makedirs(d)
def load_csv(fp: str) -> List:
print(f'load {fp}')
with open(fp, encoding='utf-8') as f:
reader = csv.DictReader(f)
return list(reader)
def load_jsonld(fp: str) -> List:
print(f'load {fp}')
datas = []
with open(fp, encoding='utf-8') as f:
for l in f:
line = json.loads(l)
data = list(line.values())
datas.append(data)
return datas
def jsonld2csv(fp: str, verbose: bool = True) -> str:
'''
读入 jsonld 文件,存储在同位置同名的 csv 文件
:param fp: jsonld 文件地址
:param verbose: whether print logging
:return: csv 文件地址
'''
data = []
root, ext = os.path.splitext(fp)
fp_new = root + '.csv'
if verbose:
print(f'read jsonld file in: {fp}')
with open(fp, encoding='utf-8') as f:
for l in f:
line = json.loads(l)
data.append(line)
if verbose:
print('saving...')
with open(fp_new, 'w', encoding='utf-8') as f:
fieldnames = data[0].keys()
writer = csv.DictWriter(f, fieldnames=fieldnames, dialect='excel')
writer.writeheader()
writer.writerows(data)
if verbose:
print(f'saved csv file in: {fp_new}')
return fp_new
def csv2jsonld(fp: str, verbose: bool = True) -> str:
'''
读入 csv 文件,存储为同位置同名的 jsonld 文件
:param fp: csv 文件地址
:param verbose: whether print logging
:return: jsonld 地址
'''
data = []
root, ext = os.path.splitext(fp)
fp_new = root + '.jsonld'
if verbose:
print(f'read csv file in: {fp}')
with open(fp, encoding='utf-8') as f:
writer = csv.DictReader(f, fieldnames=None, dialect='excel')
for line in writer:
data.append(line)
if verbose:
print('saving...')
with open(fp_new, 'w', encoding='utf-8') as f:
f.write(os.linesep.join([json.dumps(l, ensure_ascii=False) for l in data]))
if verbose:
print(f'saved jsonld file in: {fp_new}')
return fp_new
if __name__ == '__main__':
pass
|
nilq/baby-python
|
python
|
import os
import boto3
AMI = os.environ["AMI"]
INSTANCE_TYPE = os.environ["INSTANCE_TYPE"]
KEY_NAME = os.environ["KEY_NAME"]
SUBNET_ID = os.environ["SUBNET_ID"]
REGION = os.environ["REGION"]
INSTANCE_PROFILE = os.environ["INSTANCE_PROFILE"]
ec2 = boto3.client("ec2", region_name=REGION)
def create_instance(event, files_to_download):
"""
Using the event variables and the modified list of files to download create the ec2 instance to do the downloading
"""
# convert to string with double quotes so it knows its a string
files_to_download = ",".join(map('"{0}"'.format, files_to_download))
vars = {
"FTP_HOST": event["ftp_url"],
"FTP_PATH": event["ftp_path"],
"FTP_USERNAME": event["username"],
"FTP_PASSWORD": event["password"],
"FTP_AUTH_KEY": event["auth_key"],
"S3_BUCKET_NAME": event["s3_bucket"],
"PRODUCTS_TABLE": event["product_table"],
"files_to_download": files_to_download,
"s3_path": event["s3_path"],
}
print(vars)
init_script = """#!/bin/bash
/bin/echo "**************************"
/bin/echo "* Running FTP to S3. *"
/bin/echo "**************************"
/bin/pwd
/bin/whoami
export S3_BUCKET_NAME={S3_BUCKET_NAME}
export PRODUCTS_TABLE={PRODUCTS_TABLE}
export FTP_HOST={FTP_HOST}
export FTP_PATH={FTP_PATH}
export FTP_USERNAME={FTP_USERNAME}
export FTP_PASSWORD={FTP_PASSWORD}
/bin/echo python3 /home/ec2-user/ftp_to_s3.py {s3_path} {files_to_download}
PYTHONUSERBASE=/home/ec2-user/.local python3.8 /home/ec2-user/ftp_to_s3.py {s3_path} {files_to_download}
shutdown now -h""".format(
**vars
)
instance = ec2.run_instances(
ImageId=AMI,
InstanceType=INSTANCE_TYPE,
KeyName=KEY_NAME,
SubnetId=SUBNET_ID,
MaxCount=1,
MinCount=1,
InstanceInitiatedShutdownBehavior="terminate",
UserData=init_script,
IamInstanceProfile={"Arn": INSTANCE_PROFILE},
BlockDeviceMappings=[{"DeviceName": "/dev/xvda", "Ebs": {"VolumeSize": 50}}],
)
instance_id = instance["Instances"][0]["InstanceId"]
print("***New Instance! {0}***".format(instance_id))
print("Instance downloading these files: {0}".format(files_to_download))
return instance_id
def lambda_handler(event, context):
# variables sent from scheduler.py
print(event, context)
# calculate files to download total size
files_list = event["files_to_download"]
total_size = 0
size_limit = 30212254720 # set to 30GBish
files_to_download = []
for obj in files_list:
total_size += int(obj["size"])
if total_size < size_limit:
files_to_download.append(obj)
else:
create_instance(event, files_to_download)
files_to_download = [obj]
total_size = int(obj["size"])
# files_to_download.append(obj)
create_instance(event, files_to_download)
print("Finished.")
|
nilq/baby-python
|
python
|
# coding: utf-8
"""Pytest fixtures and utilities for testing algorithms."""
import gym
import torch
import torch.nn as nn
import torch.distributions as distrib
import pytest
from irl.algo.value_methods import TensorQValues
class ProbPolicy(nn.Module):
"""A simple test probabilistic policy."""
def __init__(
self, dim_in: int, dim_out: int, continuous: bool = False, critic: bool = False
) -> None:
"""Initialize probabilistic policy."""
super().__init__()
self.lin = nn.Linear(dim_in, dim_out)
self.critic = nn.Linear(dim_in, 1) if critic else None
self.continuous = continuous
def forward(self, obs: torch.Tensor) -> distrib.Distribution:
"""Forward pass."""
h = self.lin(obs)
if self.continuous:
probs = distrib.Normal(h, 1.0)
else:
probs = distrib.Categorical(logits=h)
if self.critic is not None:
return probs, self.critic(obs)
else:
return probs
def new_with_critic(self) -> "ProbPolicy":
"""Return a similar probabilistic policy with a critic."""
return ProbPolicy(
dim_in=self.lin.in_features,
dim_out=self.lin.out_features,
continuous=self.continuous,
critic=True,
)
@pytest.fixture
def prob_policy(env_factory) -> nn.Module:
"""Create a ProbPolicy relevant for the environment."""
env = env_factory()
dim_in, = env.observation_space.shape
continuous = isinstance(env.action_space, gym.spaces.Box)
if continuous:
dim_out, = env.action_space.shape
else:
dim_out = env.action_space.n
return ProbPolicy(dim_in, dim_out, continuous)
class DQN(nn.Module):
"""A simple test deep Q network."""
def __init__(self, dim_in: int, dim_out: int) -> None:
"""Initialize a deep Q network."""
super().__init__()
self.lin = nn.Linear(dim_in, dim_out)
def forward(self, obs: torch.Tensor) -> distrib.Distribution:
"""Forward pass."""
return TensorQValues(self.lin(obs))
@pytest.fixture
def dqn(env_factory) -> nn.Module:
"""Createa a DQN relevant for the environment."""
env = env_factory()
if isinstance(env.action_space, gym.spaces.Box):
pytest.skip("DQN is not suitted for continuous environment.")
dim_in, = env.observation_space.shape
dim_out = env.action_space.n
return DQN(dim_in, dim_out)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: © 2021 Massachusetts Institute of Technology.
# SPDX-FileCopyrightText: © 2021 Lee McCuller <mcculler@mit.edu>
# NOTICE: authors should document their contributions in concisely in NOTICE
# with details inline in source files, comments, and docstrings.
"""
"""
from wavestate.model import optics
from wavestate.model import base
def system1064(SHG=True):
obj_sys = base.SimulationObject()
freqs = base.SimulationObject()
obj_sys["frequencies/"] = freqs
freqs["Nd1064/"] = optics.OpticalFrequency()
freqs["Nd1064/wavelength[m]"] = 1064e-9
if SHG:
freqs["Nd1064/order"] = 2
else:
freqs["Nd1064/order"] = 1
aliases1064 = freqs["aliases_1064/"] = optics.OpticalFrequencyAliases()
aliases1064["to"] = {"Nd1064": 1}
aliases1064["names"] = ["1064", 1064, "1064nm", 1064e-9]
aliases532 = freqs["aliases_532/"] = optics.OpticalFrequencyAliases()
aliases532["to"] = {"Nd1064": 2}
aliases532["names"] = ["532", 532, "532nm", 532e-9]
return obj_sys
def system1550(SHG=True):
obj_sys = base.SimulationObject()
freqs = base.SimulationObject()
obj_sys["frequencies/"] = freqs
freqs["1550/"] = optics.OpticalFrequency()
freqs["1550/wavelength[m]"] = 1550e-9
if SHG:
freqs["1550/order"] = 2
else:
freqs["1550/order"] = 1
aliases1550 = freqs["aliases_1550/"] = optics.OpticalFrequencyAliases()
aliases1550["to"] = {"1550": 1}
aliases1550["names"] = ["1550", 1550, "1550nm", 1550e-9]
aliases775 = freqs["aliases_775/"] = optics.OpticalFrequencyAliases()
aliases775["to"] = {"1550": 2}
aliases775["names"] = ["775", 775, "775nm", 775e-9]
return obj_sys
|
nilq/baby-python
|
python
|
from __future__ import print_function, division, absolute_import
from distributed.compatibility import (
gzip_compress, gzip_decompress, finalize)
def test_gzip():
b = b'Hello, world!'
c = gzip_compress(b)
d = gzip_decompress(c)
assert b == d
def test_finalize():
class C(object):
pass
l = []
def cb(value):
l.append(value)
o = C()
finalize(o, cb, 1)
assert not l
del o
assert l.pop() == 1
o = C()
fin = finalize(o, cb, 2)
assert fin.alive
fin()
assert not fin.alive
assert l.pop() == 2
del o
assert not l
|
nilq/baby-python
|
python
|
# ActivitySim
# See full license in LICENSE.txt.
import logging
import pandas as pd
from activitysim.core import tracing
from activitysim.core import config
from activitysim.core import pipeline
from activitysim.core import inject
from activitysim.core.util import assign_in_place
from activitysim.abm.models.trip_purpose import run_trip_purpose
from activitysim.abm.models.trip_destination import run_trip_destination
from activitysim.abm.models.util.trip import flag_failed_trip_leg_mates
from activitysim.abm.models.util.trip import cleanup_failed_trips
logger = logging.getLogger(__name__)
def run_trip_purpose_and_destination(
trips_df,
tours_merged_df,
chunk_size,
trace_hh_id,
trace_label):
assert not trips_df.empty
choices = run_trip_purpose(
trips_df,
chunk_size=chunk_size,
trace_hh_id=trace_hh_id,
trace_label=tracing.extend_trace_label(trace_label, 'purpose')
)
trips_df['purpose'] = choices
trips_df, save_sample_df = run_trip_destination(
trips_df,
tours_merged_df,
chunk_size, trace_hh_id,
trace_label=tracing.extend_trace_label(trace_label, 'destination'))
return trips_df, save_sample_df
@inject.step()
def trip_purpose_and_destination(
trips,
tours_merged,
chunk_size,
trace_hh_id):
trace_label = "trip_purpose_and_destination"
model_settings = config.read_model_settings('trip_purpose_and_destination.yaml')
# for consistency, read sample_table_name setting from trip_destination settings file
trip_destination_model_settings = config.read_model_settings('trip_destination.yaml')
sample_table_name = trip_destination_model_settings.get('DEST_CHOICE_SAMPLE_TABLE_NAME')
want_sample_table = config.setting('want_dest_choice_sample_tables') and sample_table_name is not None
MAX_ITERATIONS = model_settings.get('MAX_ITERATIONS', 5)
trips_df = trips.to_frame()
tours_merged_df = tours_merged.to_frame()
if trips_df.empty:
logger.info("%s - no trips. Nothing to do." % trace_label)
return
# FIXME could allow MAX_ITERATIONS=0 to allow for cleanup-only run
# in which case, we would need to drop bad trips, WITHOUT failing bad_trip leg_mates
assert (MAX_ITERATIONS > 0)
# if trip_destination has been run before, keep only failed trips (and leg_mates) to retry
if 'destination' in trips_df:
if 'failed' not in trips_df.columns:
# trip_destination model cleaned up any failed trips
logger.info("%s - no failed column from prior model run." % trace_label)
return
elif not trips_df.failed.any():
# 'failed' column but no failed trips from prior run of trip_destination
logger.info("%s - no failed trips from prior model run." % trace_label)
trips_df.drop(columns='failed', inplace=True)
pipeline.replace_table("trips", trips_df)
return
else:
logger.info("trip_destination has already been run. Rerunning failed trips")
flag_failed_trip_leg_mates(trips_df, 'failed')
trips_df = trips_df[trips_df.failed]
tours_merged_df = tours_merged_df[tours_merged_df.index.isin(trips_df.tour_id)]
logger.info("Rerunning %s failed trips and leg-mates" % trips_df.shape[0])
# drop any previously saved samples of failed trips
if want_sample_table and pipeline.is_table(sample_table_name):
logger.info("Dropping any previously saved samples of failed trips")
save_sample_df = pipeline.get_table(sample_table_name)
save_sample_df.drop(trips_df.index, level='trip_id', inplace=True)
pipeline.replace_table(sample_table_name, save_sample_df)
del save_sample_df
processed_trips = []
save_samples = []
i = 0
TRIP_RESULT_COLUMNS = ['purpose', 'destination', 'origin', 'failed']
while True:
i += 1
for c in TRIP_RESULT_COLUMNS:
if c in trips_df:
del trips_df[c]
trips_df, save_sample_df = run_trip_purpose_and_destination(
trips_df,
tours_merged_df,
chunk_size=chunk_size,
trace_hh_id=trace_hh_id,
trace_label=tracing.extend_trace_label(trace_label, "i%s" % i))
# # if testing, make sure at least one trip fails
if config.setting('testing_fail_trip_destination', False) \
and (i == 1) and not trips_df.failed.any():
fail_o = trips_df[trips_df.trip_num < trips_df.trip_count].origin.max()
trips_df.failed = (trips_df.origin == fail_o) & \
(trips_df.trip_num < trips_df.trip_count)
num_failed_trips = trips_df.failed.sum()
# if there were no failed trips, we are done
if num_failed_trips == 0:
processed_trips.append(trips_df[TRIP_RESULT_COLUMNS])
if save_sample_df is not None:
save_samples.append(save_sample_df)
break
logger.warning("%s %s failed trips in iteration %s" % (trace_label, num_failed_trips, i))
file_name = "%s_i%s_failed_trips" % (trace_label, i)
logger.info("writing failed trips to %s" % file_name)
tracing.write_csv(trips_df[trips_df.failed], file_name=file_name, transpose=False)
# if max iterations reached, add remaining trips to processed_trips and give up
# note that we do this BEFORE failing leg_mates so resulting trip legs are complete
if i >= MAX_ITERATIONS:
logger.warning("%s too many iterations %s" % (trace_label, i))
processed_trips.append(trips_df[TRIP_RESULT_COLUMNS])
if save_sample_df is not None:
save_sample_df.drop(trips_df[trips_df.failed].index, level='trip_id', inplace=True)
save_samples.append(save_sample_df)
break
# otherwise, if any trips failed, then their leg-mates trips must also fail
flag_failed_trip_leg_mates(trips_df, 'failed')
# add the good trips to processed_trips
processed_trips.append(trips_df[~trips_df.failed][TRIP_RESULT_COLUMNS])
# and keep the failed ones to retry
trips_df = trips_df[trips_df.failed]
tours_merged_df = tours_merged_df[tours_merged_df.index.isin(trips_df.tour_id)]
# add trip samples of processed_trips to processed_samples
if save_sample_df is not None:
# drop failed trip samples
save_sample_df.drop(trips_df.index, level='trip_id', inplace=True)
save_samples.append(save_sample_df)
# - assign result columns to trips
processed_trips = pd.concat(processed_trips)
if len(save_samples) > 0:
save_sample_df = pd.concat(save_samples)
logger.info("adding %s samples to %s" % (len(save_sample_df), sample_table_name))
pipeline.extend_table(sample_table_name, save_sample_df)
logger.info("%s %s failed trips after %s iterations" %
(trace_label, processed_trips.failed.sum(), i))
trips_df = trips.to_frame()
assign_in_place(trips_df, processed_trips)
trips_df = cleanup_failed_trips(trips_df)
pipeline.replace_table("trips", trips_df)
# check to make sure we wrote sample file if requestsd
if want_sample_table and len(trips_df) > 0:
assert pipeline.is_table(sample_table_name)
# since we have saved samples for all successful trips
# once we discard failed trips, we should samples for all trips
save_sample_df = pipeline.get_table(sample_table_name)
# expect samples only for intermediate trip destinatinos
assert \
len(save_sample_df.index.get_level_values(0).unique()) == \
len(trips_df[trips_df.trip_num < trips_df.trip_count])
del save_sample_df
if trace_hh_id:
tracing.trace_df(trips_df,
label=trace_label,
slicer='trip_id',
index_label='trip_id',
warn_if_empty=True)
|
nilq/baby-python
|
python
|
# pytest file that runs the things in shell-sessions/
import codecs
import os
import pathlib
import re
import shutil
import sys
import time
import pytest
import asdac.__main__
sessions_dir = pathlib.Path(__file__).absolute().parent / 'shell-sessions'
@pytest.fixture
def shell_session_environment(tmp_path):
os.chdir(str(tmp_path))
for file in (sessions_dir / 'files').iterdir():
shutil.copy(str(file), '.')
with open('bom.asda', 'wb') as file:
file.write(codecs.BOM_UTF8 + b'print("Hello")\n')
with open('bombom.asda', 'wb') as file:
file.write(codecs.BOM_UTF8 + codecs.BOM_UTF8 + b'print("Hello")\n')
def create_test_func(path):
@pytest.mark.slow
def test_func(shell_session_environment, monkeypatch, capsys):
with path.open('r', encoding='utf-8') as file:
session = file.read().replace(r'<\uFEFF>', '\uFEFF')
for command, output in re.findall(r'^\$ (.*)\n([^\$]*)', session,
flags=re.MULTILINE):
program, *args = command.split()
expected_output = output.rstrip()
if expected_output:
expected_output += '\n'
if program == '#':
actual_output = ''
elif program == 'touch':
time.sleep(0.05) # sometimes fails without this
[path_string] = args
pathlib.Path(path_string).touch()
actual_output = ''
elif program == 'asdac':
monkeypatch.setattr(sys, 'argv', ['asdac'] + args)
try:
asdac.__main__.main()
except SystemExit as e:
if isinstance(e.code, str):
print(e.code, file=sys.stderr)
output, errors = capsys.readouterr()
assert not output
actual_output = errors.replace(os.sep, '/')
else:
raise RuntimeError("unknown program: " + program)
assert expected_output == actual_output
# magic is fun
test_func.__name__ = test_func.__qualname__ = 'test_%s_session' % path.stem
globals()[test_func.__name__] = test_func
for path in sessions_dir.iterdir():
if path.suffix == '.txt':
create_test_func(path)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import asyncio
import websockets
isExit = False
async def client(uri):
global isExit
async with websockets.connect(uri) as websocket:
while True:
content = await websocket.recv()
print(content)
await asyncio.sleep(0.1)
if isExit:
break
try:
asyncio.get_event_loop().run_until_complete(client('ws://localhost:8765'))
except KeyboardInterrupt as e:
print("KeyboardInterrupt")
isExit = True
|
nilq/baby-python
|
python
|
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 02/20/2016 6110 tgurney Extract and refactor from AvnFPS
# MosData.py
#
#
##
# This is a base file that is not intended to be overridden.
##
from com.raytheon.uf.edex.aviation.aag import AAGData
import time
import AvnLib
import TafGen
UNLIMITED = 99999
TO_KT = 3600.0/1852.0
FILL_VALUE = -9999.0
class AAGTafGen(TafGen.TafGen):
def __init__(self, allFcst):
self.model = 'gfslamp'
self.ident = allFcst['ident']['str']
self.fcst = allFcst['group']
self.startTimes = [t['time']['from'] for t in self.fcst]
self.endTimes = [t['time']['to'] for t in self.fcst]
self.grpTaf = TafGen.Config(self.ident, 'gfslamp').grpTaf()
self.fltCat = TafGen.Config(self.ident, 'gfslamp').fltCat()
self.tafTime = time.time()
self.tafDuration = 24
nBeg, nEnd = self.getTafPrd(self.tafDuration)
self.projData = [
TafGen.LampProjection(self.ident, self.grpTaf, self.fltCat,
dat, self.tafTime).getData()
for dat in self.fcst[nBeg:nEnd]
]
self.subStartTimes = self.startTimes[nBeg:nEnd]
self.subEndTimes = self.endTimes[nBeg:nEnd]
class _PointDataView:
def __init__(self, java_PointDataView):
self.__javaPdv = java_PointDataView
self.__keys = []
keyset = self.__javaPdv.getContainer().getParameters()
itr = keyset.iterator()
while itr.hasNext():
self.__keys.append(str(itr.next()))
def __getitem__(self, key):
result = None
strValType = self.getType(key)
if strValType == 'FLOAT':
result = self.__javaPdv.getFloat(key)
elif strValType == 'STRING':
result = self.__javaPdv.getString(key)
elif strValType == 'INT':
result = self.__javaPdv.getInt(key)
elif strValType == 'LONG':
result = self.__javaPdv.getLong(key)
return result
def getType(self, key):
val = self.__javaPdv.getType(key)
if val:
val = str(val)
return val
def has_key(self, key):
return self.__keys.__contains__(key)
def keys(self):
return self.__keys
def __contains__(self, key):
return self.has_key(key)
###############################################################################
def accumulate(iterable):
s = 0
for v in iterable:
s += v
yield s
###############################################################################
def _getCigProbs(v,element,numCategories):
try:
probs = [v[element+str(x)] for x in xrange(1,numCategories+1)]
return [min(x,100) for x in accumulate(probs)]
except KeyError:
return [0]*(numCategories-1)+[100]
def _getVisProbs(v,element,numCategories):
try:
return [v[element+str(x)] for x in xrange(1,numCategories+1)]+[100]
except KeyError:
return [0]*(numCategories-1)+[100]
###############################################################################
class _GfsLampData:
VSBY_VALUE = {
8: 0.25,
9: 0.5,
10: 1.5,
11: 2.5,
5: 4.0,
6: 6,
7: 10.0
}
CIG_VALUE = {
1: 100,
2: 300,
3: 700,
8: 1500,
9: 2500,
5: 5000,
6: 10000,
7: 25000
}
COVER_STR = {
0: 'SKC',
13: 'FEW',
11: 'SCT',
12: 'BKN',
8: 'OVC'
}
numData = 25 # 25 forecast hours
PRECIP_TYPE = {
13: 'SHPL',
11: 'FZDZ',
12: 'FZRA',
23: 'SHSN',
21: 'DZSN',
22: 'SN',
33: 'SHRA',
31: 'DZ',
32: 'RA'
}
OBV_TYPE = {
1: 'BL',
2: 'HZ',
3: 'FG',
4: '',
5: 'BR'
}
CIG_COVT = {1:1, 2:2, 3:3, 8:4, 9:5, 5:6, 6:7, 7:8}
VIS_COVT = {8:1, 9:2, 10:3, 11:4, 5:5, 6:6, 7:7}
def cigBestCat(self,t):
try:
return '%3.0f' % self.CIG_COVT[int(t+0.1)]
except:
return ''
def visBestCat(self,t):
try:
return '%3.0f' % self.VIS_COVT[int(t+0.1)]
except:
return ''
def makeObv(self, v):
t = int(v['obVis_bestCat'])
s = self.OBV_TYPE.get(t, '')
if s:
return {'str': s}
else:
return None
def makePcp(self, v, vsby, pdc, n, fcstHrList):
d = {}
p = v['POP_hour']
if p != FILL_VALUE:
d['pop'] = int(p)
p = int(v['POP_hour_bestCat'])
if p != FILL_VALUE:
d['pcat'] = p
# tstm has overlapped 2-hour forecasts in the first five hours,then 2 hour
if n < self.numData - 1:
p = _PointDataView(pdc.readRandom(fcstHrList[n+1]))['ltg2hr']
if p == FILL_VALUE and n < self.numData-2:
try:
p = _PointDataView(pdc.readRandom(fcstHrList[n+2]))['ltg2hr']
except:
p = FILL_VALUE
else:
p = FILL_VALUE
if p != FILL_VALUE:
d['pot'] = int(p)
if n < self.numData-1:
p = int(_PointDataView(pdc.readRandom(fcstHrList[n+1]))['ltg_bestCat'])
if p == FILL_VALUE and n < self.numData-2:
try:
#p = int(v['ltg_bestCat'][recno,n+2])
p = int(_PointDataView(pdc.readRandom(fcstHrList[n+2]))['ltg_bestCat'])
except:
p = FILL_VALUE
else:
p = FILL_VALUE
if p != FILL_VALUE:
d['tcat'] = p
ptype = int(v['precipType'])
#if ptype is missing, it's rain
if ptype == FILL_VALUE:
ptype = 3 # rain
pchar = int(v['POP_bestCat'])
if pchar == FILL_VALUE:
pchar = 2
intensity = ''
if ptype == 2 or pchar == 1: # SN or DZ
if vsby:
if vsby < 0.245:
intensity = '+'
elif vsby > 0.50:
intensity = '-'
else:
intensity = '-'
pcp = self.PRECIP_TYPE[ptype * 10 + pchar]
d.update({'str': intensity + pcp, 'int': intensity})
return d
def makeSky(self, ceiling_bestCat, clouds_bestCat):
cig = self.CIG_VALUE.get(int(ceiling_bestCat), None)
cover = int(clouds_bestCat)
#if sky cover is not BKN or OVC, set cig to unlimited.
if cover in [0, 13, 11]:
cig = UNLIMITED
if cig is not None:
if cover == 0:
d = {'str': 'SKC', 'cover': 0, 'cig': UNLIMITED}
elif cover in self.COVER_STR.keys():
if cig != UNLIMITED:
d = {
'str': '%s%03d' % (self.COVER_STR[cover], cig/100),
'cover': cover,
'cig': cig
}
else:
d = {'str': '%s%03d' % (self.COVER_STR[cover], 250),
'cover': cover,
'cig': cig
}
else:
return None
return d
else:
return None
def makeWind(self, v, noToKt):
d = {}
gg = 0
dd = int(v['windDir'])
if dd != FILL_VALUE:
dd = 10* ((dd + 5) // 10)
if dd == 0:
dd = 360
d['dd'] = dd
if 'windSpeedInflated' in v:
ff = float(v['windSpeedInflated'])
fillValue = FILL_VALUE
else:
ff = float(v['windSpeed'])
fillValue = FILL_VALUE
if ff != fillValue:
if noToKt:
d['ff'] = int(ff + 0.5)
else:
d['ff'] = int(ff * TO_KT + 0.5)
if d['ff'] == 0:
d['dd'] = 0
if 'MaxWindSpeed' in v:
gg = int(v['MaxWindSpeed'] * TO_KT + 0.5)
if 'dd' in d and 'ff' in d and 9998 > gg > 0:
d['gg'] = int(gg)
d['str'] = '%03d%02dG%02dKT' % (d['dd'], d['ff'], d['gg'])
else:
if 'dd' in d and 'ff' in d:
d['str'] = '%03d%02dKT' % (d['dd'], d['ff'])
else:
d['str'] = '??????KT'
return d
def makeVsby(self, var):
# returns mid point of category range
tmp = self.VSBY_VALUE.get(int(var), None)
if tmp:
return AvnLib.fixTafVsby(tmp)
else:
return None
def makeData(self, pdc, ident):
self.numData = min(self.numData, pdc.getCurrentSz())
self.issuetime = pdc.readRandom(0).getDataTime(False).getRefTime().getTime() / 1000
fcstHrList = range(pdc.getCurrentSz())
fcstHrList.sort()
self._validTimeList = []
for f in fcstHrList:
self._validTimeList.append(self.issuetime + (f * 3600))
d = {'itime': {'value': self.issuetime,
'str': time.strftime('%d%H%MZ', time.gmtime(self.issuetime))},
'ident': {'str': ident}}
d['group'] = [self.makePeriod(pdc, n, fcstHrList) for n in range(self.numData)]
return d
def makePeriod(self, pdc, n, fcstHrList):
v = _PointDataView(pdc.readRandom(fcstHrList[n]))
try:
f, t = self._validTimeList[n:n+2]
except ValueError:
# LAMP only has 25 projections, so need to consider running out of pairs
f, t = self._validTimeList[n], self._validTimeList[n]+3600
g = {'time': {'from': f, 'to': t}}
d = self.makeWind(v, 0)
if d:
g['wind'] = d
d = self.makeVsby(v['vis_bestCat'])
if d:
g['vsby'] = d
vsby = d['value']
else:
vsby = None
d = self.makeVsby(v['cvis_bestCat'])
if d:
g['cvsby'] = d
cvsby = d['value']
else:
cvsby = None
if v['POP_hour']*100 > 40:
vsby = cvsby
d = self.makePcp(v, vsby, pdc, n, fcstHrList)
if d:
g['pcp'] = d
d = self.makeObv(v)
if d:
g['obv'] = d
#cobv is the same as obv until 'FG' and 'BR' is switched based vis
g['cobv'] = d
d = self.makeSky(v['ceiling_bestCat'], v['clouds_bestCat'])
if d:
g['sky'] = d
try:
d = self.makeSky(v['c_ceiling_bestCat'], v['clouds_bestCat'])
if d:
g['csky'] = d
except:
pass
# fix visibility and obstruction to vision
if 'vsby' in g and 'obv' in g and g['obv']['str'] in ['BR', 'FG']:
vsby = g['vsby']['value']
if vsby > 6.1:
g['vsby'] = {'str': '6SM', 'value': 6.0}
if vsby < 0.6:
g['obv']['str'] = 'FG'
elif vsby <= 6.1:
g['obv']['str'] = 'BR'
# fix conditional visibility and obstruction to vision
if 'cvsby' in g and 'obv' in g and g['obv']['str'] in ['BR', 'FG']:
vsby = g['cvsby']['value']
if vsby > 6.1:
g['cvsby'] = {'str': '6SM', 'value': 6.0}
if vsby < 0.6:
g['cobv']['str'] = 'FG'
elif vsby <= 6.1:
g['cobv']['str'] = 'BR'
#
# include the probabilities
# Look ahead for the 6hr QPF POP
#
g['pop6hr'] = -1
try:
for i in range(n, 25):
if _PointDataView(pdc.readRandom(fcstHrList[i]))['PQPF_6hr'] < 100:
g['pop6hr'] = _PointDataView(pdc.readRandom(fcstHrList[i]))['PQPF_6hr']
break
except KeyError:
pass
# Probability of ceiling categories including best category
g['cprob'] = _getCigProbs(v,'ceiling_cat',8)
g['ccprob'] = _getCigProbs(v,'c_ceiling_cat',8)
try:
g['cig_bestCat'] = int(self.cigBestCat(v['ceiling_bestCat']))
g['ccig_bestCat'] =int(self.cigBestCat(v['c_ceiling_bestCat']))
except ValueError:
pass
# Probability of visibility categories including best category
g['vprob'] = _getVisProbs(v,'vis_cat',6)
g['cvprob'] = _getVisProbs(v,'cvis_cat',6)
try:
g['vis_bestCat'] = int(self.visBestCat(v['vis_bestCat']))
g['cvis_bestCat'] = int(self.visBestCat(v['cvis_bestCat']))
except ValueError:
pass
return g
def tafPartToAAGData(tafPart, fcstType):
aagData = None
if fcstType in tafPart:
aagData = AAGData()
pcpObv = ""
if 'time' in tafPart[fcstType]:
aagData.setTimeFromSeconds(tafPart[fcstType]['time']['from'])
aagData.setTimeToSeconds(tafPart[fcstType]['time']['to'])
if 'vsby' in tafPart[fcstType]:
aagData.setVisibility(tafPart[fcstType]['vsby']['str'])
if 'wind' in tafPart[fcstType]:
aagData.setWind(tafPart[fcstType]['wind']['str'])
if 'sky' in tafPart[fcstType]:
aagData.setSky(tafPart[fcstType]['sky']['str'])
if 'pcp' in tafPart[fcstType]:
pcpObv += tafPart[fcstType]['pcp']['str']
if 'obv' in tafPart[fcstType]:
if pcpObv != "":
pcpObv += " "
pcpObv += tafPart[fcstType]['obv']['str']
if pcpObv != "":
aagData.setWeather(pcpObv)
return aagData
def getAAGData(siteID, pdc):
data = _GfsLampData().makeData(pdc, siteID)
tafParts = AAGTafGen(data).formNewDic(False)
aagDatas = []
for tafPart in tafParts:
# If not one of "prev","ocnl" then it is junk, ignore it
for fcstType in ('ocnl', 'prev'):
aagData = tafPartToAAGData(tafPart, fcstType)
if aagData:
if fcstType == 'ocnl':
aagData.setForecastType(tafPart['ocnl']['type'])
elif fcstType == 'prev':
aagData.setForecastType('FM')
aagDatas.append(aagData)
return aagDatas
|
nilq/baby-python
|
python
|
from actors.actions.action import Action
class DelayedAction(Action):
def __init__(self, action, delay_remaining=1):
self.action = action
self.delay_remaining = delay_remaining
def on(self, actor, tile, root):
delay_remaining = self.delay_remaining - 1
return root, (DelayedAction(self.action, delay_remaining) if delay_remaining > 0 else self.action)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG),
# acting on behalf of its Max Planck Institute for Intelligent Systems and the
# Max Planck Institute for Biological Cybernetics. All rights reserved.
#
# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is holder of all proprietary rights
# on this computer program. You can only use this computer program if you have closed a license agreement
# with MPG or you get the right to use the computer program from someone who is authorized to grant you that right.
# Any use of the computer program without a valid license is prohibited and liable to prosecution.
# Contact: ps-license@tuebingen.mpg.de
#
#
# If you use this code in a research publication please consider citing the following:
#
# Expressive Body Capture: 3D Hands, Face, and Body from a Single Image <https://arxiv.org/abs/1904.05866>
#
# Code Developed by: Nima Ghorbani <https://www.linkedin.com/in/nghorbani/>
# 2018.11.07
import cv2
import numpy as np
import os
from PIL import Image
fontColors = {'red': (255, 0, 0),
'green': (0, 255, 0),
'yellow': (255, 255, 0),
'blue': (0, 255, 255),
'orange': (255, 165, 0),
'black': (0, 0, 0),
'grey': (169, 169, 169),
'white': (255, 255, 255),
}
def crop_to_bounding_box(image, offset_height, offset_width, target_height, target_width):
cropped = image[offset_height:offset_height + target_height, offset_width:offset_width + target_width, :]
return cropped
def pad_to_bounding_box(image, offset_height, offset_width, target_height, target_width):
height, width, depth = image.shape
after_padding_width = target_width - offset_width - width
after_padding_height = target_height - offset_height - height
# Do not pad on the depth dimensions.
paddings = ((offset_height, after_padding_height), (offset_width, after_padding_width), (0, 0))
padded = np.pad(image, paddings, 'constant')
return padded
def resize_image_with_crop_or_pad(image, target_height, target_width):
# crop to ratio, center
height, width, c = image.shape
width_diff = target_width - width
offset_crop_width = max(-width_diff // 2, 0)
offset_pad_width = max(width_diff // 2, 0)
height_diff = target_height - height
offset_crop_height = max(-height_diff // 2, 0)
offset_pad_height = max(height_diff // 2, 0)
# Maybe crop if needed.
# print('image shape', image.shape)
cropped = crop_to_bounding_box(image, offset_crop_height, offset_crop_width,
min(target_height, height),
min(target_width, width))
# print('after cropp', cropped.shape)
# Maybe pad if needed.
resized = pad_to_bounding_box(cropped, offset_pad_height, offset_pad_width,
target_height, target_width)
# print('after pad', resized.shape)
return resized[:target_height, :target_width, :]
def cropout_openpose(pil_image,pose, want_image=True, crop_margin=0.08):
im_orig = cv2.cvtColor(np.array(pil_image), cv2.COLOR_RGB2BGR)
im_height, im_width = im_orig.shape[0], im_orig.shape[1]
pose = pose[pose[:, 2] > 0.0]
x_min, x_max = pose[:, 0].min(), pose[:, 0].max()
y_min, y_max = pose[:, 1].min(), pose[:, 1].max()
margin_h = crop_margin * im_height
margin_w = crop_margin * im_width
offset_height = int(max((y_min - margin_h), 0))
target_height = int(min((y_max + margin_h), im_height)) - offset_height
offset_width = int(max((x_min - margin_w), 0))
target_width = int(min((x_max + margin_w), im_width)) - offset_width
crop_info = {'crop_boundary':
{'offset_height':offset_height,
'target_height':target_height,
'offset_width':offset_width,
'target_width':target_width}}
if want_image:
crop_info['cropped_image'] = crop_to_bounding_box(im_orig, offset_height, offset_width, target_height, target_width)
return crop_info
def put_text_in_image(images, text, color ='white', position=None):
'''
:param images: 4D array of images
:param text: list of text to be printed in each image
:param color: the color or colors of each text
:return:
'''
import cv2
if not isinstance(text, list): text = [text]
if not isinstance(color, list): color = [color for _ in range(images.shape[0])]
if images.ndim == 3: images = images.reshape(1,images.shape[0],images.shape[1],3)
images_out = []
for imIdx in range(images.shape[0]):
img = images[imIdx].astype(np.uint8)
font = cv2.FONT_HERSHEY_SIMPLEX
if position is None:position = (10, img.shape[1])
fontScale = 1.
lineType = 2
fontColor = fontColors[color[imIdx]]
cv2.putText(img, text[imIdx],
position,
font,
fontScale,
fontColor,
lineType)
images_out.append(img)
return np.array(images_out)
def read_prep_image(im_fname, avoid_distortion=True):
'''
if min(height, width) is larger than 224 subsample to 224. this will also affect the larger dimension.
in the end crop and pad the whole image to get to 224x224
:param im_fname:
:return:
'''
import cv2
if isinstance(im_fname, np.ndarray):
image_data = im_fname
else:
image_data = cv2.imread(im_fname, 3)
# height, width = image_reader.read_image_dims(sess, image_data)
# image_data = image_reader.decode_jpeg(sess, image_data)
# print(image_data.min(), image_data.max(), image_data.shape)
# import matplotlib.pyplot as plt
# plt.imshow(image_data[:,:,::-1].astype(np.uint8))
# plt.show()
# height, width = image_data.shape[0], image_data.shape[1]
# if min(height, width) > 224:
# print(image_data.shape)
# rt = 224. / min(height, width)
# image_data = cv2.resize(image_data, (int(rt * width), int(rt * height)), interpolation=cv2.INTER_AREA)
# print('>>resized to>>',image_data.shape)
height, width = image_data.shape[0], image_data.shape[1]
if avoid_distortion:
if max(height, width) > 224:
# print(image_data.shape)
rt = 224. / max(height, width)
image_data = cv2.resize(image_data, (int(rt * width), int(rt * height)), interpolation=cv2.INTER_AREA)
# print('>>resized to>>',image_data.shape)
else:
from skimage.transform import resize
image_data = resize(image_data, (224, 224), mode='constant', anti_aliasing=False, preserve_range=True)
# print(image_data.min(), image_data.max(), image_data.shape)
# import matplotlib.pyplot as plt
# plt.imshow(image_data[:,:,::-1].astype(np.uint8))
# plt.show()
image_data = resize_image_with_crop_or_pad(image_data, 224, 224)
# print(image_data.min(), image_data.max(), image_data.shape)
# import matplotlib.pyplot as plt
# plt.imshow(image_data[:, :, ::-1].astype(np.uint8))
# plt.show()
#return image_data.astype(np.float32)
return image_data.astype(np.uint8)
def save_images(images, out_dir, im_names = None):
from homogenus.tools.omni_tools import id_generator
if images.ndim == 3: images = images.reshape(1,images.shape[0],images.shape[1],3)
from PIL import Image
if im_names is None:
im_names = ['%s.jpg'%id_generator(4) for i in range(images.shape[0])]
for imIdx in range(images.shape[0]):
result = Image.fromarray(images[imIdx].astype(np.uint8))
result.save(os.path.join(out_dir, im_names[imIdx]))
return True
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
"""
Script to calculate the mean and std
Usage:
./scripts/cal_deepfashion_ds_meanstd.py
"""
import os.path
import sys
cur_path = os.path.realpath(__file__)
cur_dir = os.path.dirname(cur_path)
parent_dir = cur_dir[:cur_dir.rfind(os.path.sep)]
sys.path.insert(0, parent_dir)
# --------------------------------------------
from utils.datasets import DeepFashionDataset
from utils.preprocessing import StandardScaler
from torchvision.transforms import ToTensor
from torchvision.transforms import Compose
from torchvision.transforms import Resize
from torchvision.transforms import Normalize
from torch.utils.data import DataLoader
import torch
from tqdm import tqdm
if __name__ == "__main__":
deep_fashion_root_dir = "./deepfashion_data"
trans = Compose([
Resize((224, 224)),
ToTensor(),
# Normalize([0.7464, 0.7155, 0.7043], [0.2606, 0.2716, 0.2744]), # For check against
])
train_ds = DeepFashionDataset(
deep_fashion_root_dir, 'train', transform=trans)
loader = DataLoader(train_ds, batch_size=200, num_workers=2)
scalar = StandardScaler()
for imgs, _ in tqdm(loader):
scalar.partial_fit(imgs)
print("--------------------")
print(scalar._mean)
print(scalar._var)
print(scalar._std)
print("--------------------")
|
nilq/baby-python
|
python
|
'''
This function returns the first longest word from the input string
'''
def LongestWord(sen):
max=0
st=""
for c in sen:
if c.isalnum():
st=st+c
else:
st=st+" "
words=st.split(" ")
for word in words:
if len(word) > max:
max=len(word)
longestword=word
return longestword
print(LongestWord(input("Please enter a string: \n")))
|
nilq/baby-python
|
python
|
from django.urls import NoReverseMatch, reverse
from django.utils.html import format_html
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_noop
from django.views import View
from memoized import memoized
from dimagi.utils.parsing import string_to_utc_datetime
from dimagi.utils.web import json_response
from corehq import toggles
from corehq.apps.reports.analytics.esaccessors import get_paged_forms_by_type
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.display import xmlns_to_name
from corehq.apps.reports.standard.deployments import DeploymentsReport
from corehq.apps.reports.standard.forms.filters import SubmissionTypeFilter
from corehq.apps.users.util import cached_user_id_to_username
from corehq.const import SERVER_DATETIME_FORMAT
from corehq.form_processor.reprocess import ReprocessingError
from corehq.util import cmp
from corehq.util.timezones.conversions import ServerTime
def _compare_submissions(x, y):
# these are backwards because we want most recent to come first
return cmp(y.received_on, x.received_on)
class SubmissionErrorReport(DeploymentsReport):
name = ugettext_noop("Raw Forms, Errors & Duplicates")
slug = "submit_errors"
ajax_pagination = True
asynchronous = False
base_template = 'reports/standard/submission_error_report.html'
fields = ['corehq.apps.reports.standard.forms.filters.SubmissionTypeFilter']
@property
@memoized
def headers(self):
headers = DataTablesHeader(DataTablesColumn(_("View Form"), sortable=False),
DataTablesColumn(_("Username"), prop_name="username"),
DataTablesColumn(_("Submit Time"), prop_name="received_on"),
DataTablesColumn(_("Form Type"), sortable=False),
DataTablesColumn(_("Error Type"), sortable=False),
DataTablesColumn(_("Error Message"), sortable=False))
if self.support_toggle_enabled:
headers.add_column(DataTablesColumn(_("Re-process Form")))
headers.custom_sort = [[2, "desc"]]
return headers
_submitfilter = None
@property
def submitfilter(self):
if self._submitfilter is None:
self._submitfilter = SubmissionTypeFilter.get_filter_toggle(self.request)
return self._submitfilter
@property
def sort_params(self):
sort_col_idx = int(self.request.GET['iSortCol_0'])
col = self.headers.header[sort_col_idx]
sort_prop = hasattr(col, "prop_name") and col.prop_name
desc = self.request.GET.get('sSortDir_0') == 'desc'
return sort_prop, desc
@property
@memoized
def paged_result(self):
doc_types = [filter_.doc_type for filter_ in [filter_ for filter_ in self.submitfilter if filter_.show]]
sort_col, desc = self.sort_params
return get_paged_forms_by_type(
self.domain,
doc_types,
sort_col=sort_col,
desc=desc,
start=self.pagination.start,
size=self.pagination.count,
)
@property
def shared_pagination_GET_params(self):
shared_params = super(SubmissionErrorReport, self).shared_pagination_GET_params
shared_params.append(dict(
name=SubmissionTypeFilter.slug,
value=[f.type for f in self.submitfilter if f.show]
))
return shared_params
@property
def total_records(self):
return self.paged_result.total
@property
def support_toggle_enabled(self):
return toggles.SUPPORT.enabled_for_request(self.request)
def _make_reproces_button(self, xform_dict):
if not xform_dict['doc_type'] == 'XFormError':
return ''
return '''
<button
class="btn btn-default reprocess-error"
data-form-id={}>
Re-process Form
</button>
'''.format(xform_dict['_id'])
@property
def rows(self):
EMPTY_ERROR = _("No Error")
EMPTY_USER = _("No User")
EMPTY_FORM = _("Unknown Form")
def _to_row(xform_dict):
def _fmt_url(doc_id):
if xform_dict['doc_type'] in [
"XFormInstance",
"XFormArchived",
"XFormError",
"XFormDeprecated"]:
view_name = 'render_form_data'
else:
view_name = 'download_form'
try:
return format_html(
"<a class='ajax_dialog' href='{url}'>{text}</a>",
url=reverse(view_name, args=[self.domain, doc_id]),
text=_("View Form")
)
except NoReverseMatch:
return 'unable to view form'
def _fmt_date(somedate):
time = ServerTime(somedate).user_time(self.timezone).done()
return time.strftime(SERVER_DATETIME_FORMAT)
if xform_dict['form'].get('meta'):
form_name = xmlns_to_name(
self.domain,
xform_dict.get('xmlns'),
app_id=xform_dict.get('app_id'),
)
form_username = xform_dict['form']['meta'].get('username', EMPTY_USER)
else:
form_name = EMPTY_FORM
form_username = EMPTY_USER
error_type = SubmissionTypeFilter.display_name_by_doc_type(xform_dict['doc_type'])
if xform_dict['doc_type'] == "XFormArchived":
archive_operations = [operation for operation in xform_dict.get('history')
if operation.get('operation') == 'archive']
if archive_operations:
error_type = _("{username} {archived_form} on {date}").format(
username=cached_user_id_to_username(archive_operations[-1].get('user')) or "",
archived_form=SubmissionTypeFilter.display_name_by_doc_type(xform_dict['doc_type']),
date=_fmt_date(string_to_utc_datetime(archive_operations[-1].get('date'))),
)
return [
_fmt_url(xform_dict['_id']),
form_username,
_fmt_date(string_to_utc_datetime(xform_dict['received_on'])),
form_name,
error_type,
xform_dict.get('problem', EMPTY_ERROR),
self._make_reproces_button(xform_dict) if self.support_toggle_enabled else '',
]
return [_to_row(xform_dict) for xform_dict in self.paged_result.hits]
class ReprocessXFormErrorView(View):
urlname = 'reprocess_xform_errors'
http_method_names = ['post']
def post(self, request, domain):
from corehq.form_processor.reprocess import reprocess_xform_error_by_id
form_id = request.POST['form_id']
if not form_id:
return json_response({
'success': False,
'failure_reason': 'Missing "form_id"'
})
try:
reprocess_xform_error_by_id(form_id, domain=domain)
except ReprocessingError as e:
return json_response({
'success': False,
'failure_reason': str(e),
})
else:
return json_response({
'success': True,
})
|
nilq/baby-python
|
python
|
"""Base class for module overlays."""
from pytype import datatypes
from pytype.abstract import abstract
class Overlay(abstract.Module):
"""A layer between pytype and a module's pytd definition.
An overlay pretends to be a module, but provides members that generate extra
typing information that cannot be expressed in a pytd file. For example,
collections.namedtuple is a factory method that generates class definitions
at runtime. An overlay is needed for Pytype to generate these classes.
An Overlay will typically import its underlying module in its __init__, e.g.
by calling ctx.loader.import_name(). Due to this, Overlays should only be used
when their underlying module is imported by the Python script being analyzed!
A subclass of Overlay should have an __init__ with the signature:
def __init__(self, ctx)
Attributes:
real_module: An abstract.Module wrapping the AST for the underlying module.
"""
def __init__(self, ctx, name, member_map, ast):
"""Initialize the overlay.
Args:
ctx: Instance of context.Context.
name: A string containing the name of the underlying module.
member_map: Dict of str to abstract.BaseValues that provide type
information not available in the underlying module.
ast: An pytd.TypeDeclUnit containing the AST for the underlying module.
Used to access type information for members of the module that are not
explicitly provided by the overlay.
"""
super().__init__(ctx, name, member_map, ast)
self.real_module = ctx.convert.constant_to_value(
ast, subst=datatypes.AliasingDict(), node=ctx.root_node)
def _convert_member(self, member, subst=None):
val = member(self.ctx)
val.module = self.name
return val.to_variable(self.ctx.root_node)
def get_module(self, name):
"""Returns the abstract.Module for the given name."""
if name in self._member_map:
return self
else:
return self.real_module
def items(self):
items = super().items()
items += [(name, item) for name, item in self.real_module.items()
if name not in self._member_map]
return items
def build(name, builder):
"""Wrapper to turn (name, ctx) -> val method signatures into (ctx) -> val."""
return lambda ctx: builder(name, ctx)
|
nilq/baby-python
|
python
|
import cplex
import numpy as np
names = ["x11", "x12", "x13", "x14",
"x21", "x22", "x23", "x24",
"x31", "x32", "x33", "x34",
"y11", "y12", "y13", "y14",
"y21", "y22", "y23", "y24",
"y31", "y32", "y33", "y34"]
T = np.array([[3.0, 2.0, 2.0, 1.0],
[4.0, 3.0, 3.0, 2.0],
[5.0, 5.0, 4.0, 2.0]])
Q = np.array([50.0, 30.0, 20.0])
N = np.array([5.0, 8.0, 10.0])
D = np.array([700.0, 1500.0, 700.0, 1500.0])
C = np.array([[1000.0, 1100.0, 1200.0, 1500.0],
[800.0, 900.0, 1000.0, 1000.0],
[600.0, 800.0, 800.0, 900.0]])
P = np.array([40.0, 50.0, 45.0, 70.0])
# Example with deep branching:
# T = np.array([[3.0, 2.0, 2.0, 7.0],
# [4.0, 3.0, 1.0, 2.0],
# [7.0, 2.0, 4.0, 2.0]])
# Q = np.array([52.0, 29.0, 13.0])
# N = np.array([5.0, 8.0, 10.0])
# D = np.array([530.0, 1720.0, 780.0, 1530.0])
#
# C = np.array([[1000.0, 1100.0, 1200.0, 1500.0],
# [800.0, 900.0, 1000.0, 1000.0],
# [600.0, 800.0, 800.0, 900.0]])
# P = np.array([42.0, 39.0, 45.0, 69.0])
Z = np.array([Q * T[:, i] * P[i] for i in range(4)]).T
objective = list(np.array([Z, -T * C]).flatten())
lower_bounds = [0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0]
upper_bounds = [cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity,
cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity,
cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity,
cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity,
cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity,
cplex.infinity, cplex.infinity, cplex.infinity, cplex.infinity]
constraint_names = ["n1", "n2", "n3",
"d1", "d2", "d3", "d4",
"xy11", "xy12", "xy13", "xy14",
"xy21", "xy22", "xy23", "xy24",
"xy31", "xy32", "xy33", "xy34"]
constraint_n1 = [["y11", "y12", "y13", "y14"], [1.0, 1.0, 1.0, 1.0]]
constraint_n2 = [["y21", "y22", "y23", "y24"], [1.0, 1.0, 1.0, 1.0]]
constraint_n3 = [["y31", "y32", "y33", "y34"], [1.0, 1.0, 1.0, 1.0]]
constraint_d1 = [["x11", "x21", "x31"], [Q[0] * T[0][0], Q[1] * T[1][0], Q[2] * T[2][0]]]
constraint_d2 = [["x12", "x22", "x32"], [Q[0] * T[0][1], Q[1] * T[1][1], Q[2] * T[2][1]]]
constraint_d3 = [["x13", "x23", "x33"], [Q[0] * T[0][2], Q[1] * T[1][2], Q[2] * T[2][2]]]
constraint_d4 = [["x14", "x24", "x34"], [Q[0] * T[0][3], Q[1] * T[1][3], Q[2] * T[2][3]]]
constraint_xy11 = [["x11", "y11"], [1.0, -1.0]]
constraint_xy12 = [["x12", "y12"], [1.0, -1.0]]
constraint_xy13 = [["x13", "y13"], [1.0, -1.0]]
constraint_xy14 = [["x14", "y14"], [1.0, -1.0]]
constraint_xy21 = [["x21", "y21"], [1.0, -1.0]]
constraint_xy22 = [["x22", "y22"], [1.0, -1.0]]
constraint_xy23 = [["x23", "y23"], [1.0, -1.0]]
constraint_xy24 = [["x24", "y24"], [1.0, -1.0]]
constraint_xy31 = [["x31", "y31"], [1.0, -1.0]]
constraint_xy32 = [["x32", "y32"], [1.0, -1.0]]
constraint_xy33 = [["x33", "y33"], [1.0, -1.0]]
constraint_xy34 = [["x34", "y34"], [1.0, -1.0]]
constraints = [constraint_n1, constraint_n2, constraint_n3,
constraint_d1, constraint_d2, constraint_d3, constraint_d4,
constraint_xy11, constraint_xy12, constraint_xy13, constraint_xy14,
constraint_xy21, constraint_xy22, constraint_xy23, constraint_xy24,
constraint_xy31, constraint_xy32, constraint_xy33, constraint_xy34]
rhs = [N[0], N[1], N[2], # for constraint_n#
D[0], D[1], D[2], D[3], # for constraint_d#
0.0, 0.0, 0.0, 0.0, # for constraint_xy##
0.0, 0.0, 0.0, 0.0, # for constraint_xy##
0.0, 0.0, 0.0, 0.0 # for constraint_xy##
]
constraint_senses = ["L", "L", "L",
"L", "L", "L", "L",
"L", "L", "L", "L",
"L", "L", "L", "L",
"L", "L", "L", "L"]
|
nilq/baby-python
|
python
|
"""Main entry point for the pixelation tool."""
import sys
from .constants import SUCCESS
from .core import PixelArt
from .parser import build_parser, parse_args
def main() -> None:
"""Parses the command line arguments and runs the tool."""
arg_parser = build_parser()
args = parse_args(arg_parser)
pixel_art = PixelArt(args['filename'],
args['granularity'],
args['ncolors'],
args['nbits'],
args['color_space'],
args['verbose'])
pixelated_image = pixel_art.pixelate()
pixelated_image.show()
if args['save']:
if args['verbose']:
print("Saving to " + args['filename'].split(".")[0] + "_pixelated.png ...")
pixelated_image.save(args['filename'].split(".")[0] + "_pixelated.png")
if args['verbose']:
print('Done')
return SUCCESS
if __name__ == "__main__":
sys.exit(main())
|
nilq/baby-python
|
python
|
"""Classes for defining instructions."""
from __future__ import absolute_import
from . import camel_case
from .types import ValueType
from .operands import Operand
from .formats import InstructionFormat
try:
from typing import Union, Sequence, List, Tuple, Any, TYPE_CHECKING # noqa
from typing import Dict # noqa
if TYPE_CHECKING:
from .ast import Expr, Apply, Var, Def, VarAtomMap # noqa
from .typevar import TypeVar # noqa
from .ti import TypeConstraint # noqa
from .xform import XForm, Rtl
# List of operands for ins/outs:
OpList = Union[Sequence[Operand], Operand]
ConstrList = Union[Sequence[TypeConstraint], TypeConstraint]
MaybeBoundInst = Union['Instruction', 'BoundInstruction']
InstructionSemantics = Sequence[XForm]
SemDefCase = Union[Rtl, Tuple[Rtl, Sequence[TypeConstraint]], XForm]
except ImportError:
pass
class InstructionGroup(object):
"""
Every instruction must belong to exactly one instruction group. A given
target architecture can support instructions from multiple groups, and it
does not necessarily support all instructions in a group.
New instructions are automatically added to the currently open instruction
group.
"""
# The currently open instruction group.
_current = None # type: InstructionGroup
def open(self):
# type: () -> None
"""
Open this instruction group such that future new instructions are
added to this group.
"""
assert InstructionGroup._current is None, (
"Can't open {} since {} is already open"
.format(self, InstructionGroup._current))
InstructionGroup._current = self
def close(self):
# type: () -> None
"""
Close this instruction group. This function should be called before
opening another instruction group.
"""
assert InstructionGroup._current is self, (
"Can't close {}, the open instuction group is {}"
.format(self, InstructionGroup._current))
InstructionGroup._current = None
def __init__(self, name, doc):
# type: (str, str) -> None
self.name = name
self.__doc__ = doc
self.instructions = [] # type: List[Instruction]
self.open()
@staticmethod
def append(inst):
# type: (Instruction) -> None
assert InstructionGroup._current, \
"Open an instruction group before defining instructions."
InstructionGroup._current.instructions.append(inst)
class Instruction(object):
"""
The operands to the instruction are specified as two tuples: ``ins`` and
``outs``. Since the Python singleton tuple syntax is a bit awkward, it is
allowed to specify a singleton as just the operand itself, i.e., `ins=x`
and `ins=(x,)` are both allowed and mean the same thing.
:param name: Instruction mnemonic, also becomes opcode name.
:param doc: Documentation string.
:param ins: Tuple of input operands. This can be a mix of SSA value
operands and other operand kinds.
:param outs: Tuple of output operands. The output operands must be SSA
values or `variable_args`.
:param constraints: Tuple of instruction-specific TypeConstraints.
:param is_terminator: This is a terminator instruction.
:param is_branch: This is a branch instruction.
:param is_indirect_branch: This is an indirect branch instruction.
:param is_call: This is a call instruction.
:param is_return: This is a return instruction.
:param is_ghost: This is a ghost instruction, which has no encoding and no
other register allocation constraints.
:param can_trap: This instruction can trap.
:param can_load: This instruction can load from memory.
:param can_store: This instruction can store to memory.
:param other_side_effects: Instruction has other side effects.
"""
# Boolean instruction attributes that can be passed as keyword arguments to
# the constructor. Map attribute name to doc comment for generated Rust
# code.
ATTRIBS = {
'is_terminator': 'True for instructions that terminate the EBB.',
'is_branch': 'True for all branch or jump instructions.',
'is_indirect_branch':
'True for all indirect branch or jump instructions.',
'is_call': 'Is this a call instruction?',
'is_return': 'Is this a return instruction?',
'is_ghost': 'Is this a ghost instruction?',
'can_load': 'Can this instruction read from memory?',
'can_store': 'Can this instruction write to memory?',
'can_trap': 'Can this instruction cause a trap?',
'other_side_effects':
'Does this instruction have other side effects besides can_*',
'writes_cpu_flags': 'Does this instruction write to CPU flags?',
}
def __init__(self, name, doc, ins=(), outs=(), constraints=(), **kwargs):
# type: (str, str, OpList, OpList, ConstrList, **Any) -> None
self.name = name
self.camel_name = camel_case(name)
self.__doc__ = doc
self.ins = self._to_operand_tuple(ins)
self.outs = self._to_operand_tuple(outs)
self.constraints = self._to_constraint_tuple(constraints)
self.format = InstructionFormat.lookup(self.ins, self.outs)
self.semantics = None # type: InstructionSemantics
# Opcode number, assigned by gen_instr.py.
self.number = None # type: int
# Indexes into `self.outs` for value results.
# Other results are `variable_args`.
self.value_results = tuple(
i for i, o in enumerate(self.outs) if o.is_value())
# Indexes into `self.ins` for value operands.
self.value_opnums = tuple(
i for i, o in enumerate(self.ins) if o.is_value())
# Indexes into `self.ins` for non-value operands.
self.imm_opnums = tuple(
i for i, o in enumerate(self.ins) if o.is_immediate())
self._verify_polymorphic()
for attr in kwargs:
if attr not in Instruction.ATTRIBS:
raise AssertionError(
"unknown instruction attribute '" + attr + "'")
for attr in Instruction.ATTRIBS:
setattr(self, attr, not not kwargs.get(attr, False))
# Infer the 'writes_cpu_flags' field value.
if 'writes_cpu_flags' not in kwargs:
self.writes_cpu_flags = any(
out.is_cpu_flags() for out in self.outs)
InstructionGroup.append(self)
def __str__(self):
# type: () -> str
prefix = ', '.join(o.name for o in self.outs)
if prefix:
prefix = prefix + ' = '
suffix = ', '.join(o.name for o in self.ins)
return '{}{} {}'.format(prefix, self.name, suffix)
def snake_name(self):
# type: () -> str
"""
Get the snake_case name of this instruction.
Keywords in Rust and Python are altered by appending a '_'
"""
if self.name == 'return':
return 'return_'
else:
return self.name
def blurb(self):
# type: () -> str
"""Get the first line of the doc comment"""
for line in self.__doc__.split('\n'):
line = line.strip()
if line:
return line
return ""
def _verify_polymorphic(self):
# type: () -> None
"""
Check if this instruction is polymorphic, and verify its use of type
variables.
"""
poly_ins = [
i for i in self.value_opnums
if self.ins[i].typevar.free_typevar()]
poly_outs = [
i for i, o in enumerate(self.outs)
if o.is_value() and o.typevar.free_typevar()]
self.is_polymorphic = len(poly_ins) > 0 or len(poly_outs) > 0
if not self.is_polymorphic:
return
# Prefer to use the typevar_operand to infer the controlling typevar.
self.use_typevar_operand = False
typevar_error = None
tv_op = self.format.typevar_operand
if tv_op is not None and tv_op < len(self.value_opnums):
try:
opnum = self.value_opnums[tv_op]
tv = self.ins[opnum].typevar
if tv is tv.free_typevar() or tv.singleton_type() is not None:
self.other_typevars = self._verify_ctrl_typevar(tv)
self.ctrl_typevar = tv
self.use_typevar_operand = True
except RuntimeError as e:
typevar_error = e
if not self.use_typevar_operand:
# The typevar_operand argument doesn't work. Can we infer from the
# first result instead?
if len(self.outs) == 0:
if typevar_error:
raise typevar_error
else:
raise RuntimeError(
"typevar_operand must be a free type variable")
tv = self.outs[0].typevar
if tv is not tv.free_typevar():
raise RuntimeError("first result must be a free type variable")
self.other_typevars = self._verify_ctrl_typevar(tv)
self.ctrl_typevar = tv
def _verify_ctrl_typevar(self, ctrl_typevar):
# type: (TypeVar) -> List[TypeVar]
"""
Verify that the use of TypeVars is consistent with `ctrl_typevar` as
the controlling type variable.
All polymorhic inputs must either be derived from `ctrl_typevar` or be
independent free type variables only used once.
All polymorphic results must be derived from `ctrl_typevar`.
Return list of other type variables used, or raise an error.
"""
other_tvs = [] # type: List[TypeVar]
# Check value inputs.
for opnum in self.value_opnums:
typ = self.ins[opnum].typevar
tv = typ.free_typevar()
# Non-polymorphic or derived form ctrl_typevar is OK.
if tv is None or tv is ctrl_typevar:
continue
# No other derived typevars allowed.
if typ is not tv:
raise RuntimeError(
"{}: type variable {} must be derived from {}"
.format(self.ins[opnum], typ.name, ctrl_typevar))
# Other free type variables can only be used once each.
if tv in other_tvs:
raise RuntimeError(
"type variable {} can't be used more than once"
.format(tv.name))
other_tvs.append(tv)
# Check outputs.
for result in self.outs:
if not result.is_value():
continue
typ = result.typevar
tv = typ.free_typevar()
# Non-polymorphic or derived from ctrl_typevar is OK.
if tv is None or tv is ctrl_typevar:
continue
raise RuntimeError(
"type variable in output not derived from ctrl_typevar")
return other_tvs
def all_typevars(self):
# type: () -> List[TypeVar]
"""
Get a list of all type variables in the instruction.
"""
if self.is_polymorphic:
return [self.ctrl_typevar] + self.other_typevars
else:
return []
@staticmethod
def _to_operand_tuple(x):
# type: (Union[Sequence[Operand], Operand]) -> Tuple[Operand, ...]
# Allow a single Operand instance instead of the awkward singleton
# tuple syntax.
if isinstance(x, Operand):
y = (x,) # type: Tuple[Operand, ...]
else:
y = tuple(x)
for op in y:
assert isinstance(op, Operand)
return y
@staticmethod
def _to_constraint_tuple(x):
# type: (ConstrList) -> Tuple[TypeConstraint, ...]
"""
Allow a single TypeConstraint instance instead of the awkward singleton
tuple syntax.
"""
# import placed here to avoid circular dependency
from .ti import TypeConstraint # noqa
if isinstance(x, TypeConstraint):
y = (x,) # type: Tuple[TypeConstraint, ...]
else:
y = tuple(x)
for op in y:
assert isinstance(op, TypeConstraint)
return y
def bind(self, *args):
# type: (*ValueType) -> BoundInstruction
"""
Bind a polymorphic instruction to a concrete list of type variable
values.
"""
assert self.is_polymorphic
return BoundInstruction(self, args)
def __getattr__(self, name):
# type: (str) -> BoundInstruction
"""
Bind a polymorphic instruction to a single type variable with dot
syntax:
>>> iadd.i32
"""
assert name != 'any', 'Wildcard not allowed for ctrl_typevar'
return self.bind(ValueType.by_name(name))
def fully_bound(self):
# type: () -> Tuple[Instruction, Tuple[ValueType, ...]]
"""
Verify that all typevars have been bound, and return a
`(inst, typevars)` pair.
This version in `Instruction` itself allows non-polymorphic
instructions to duck-type as `BoundInstruction`\\s.
"""
assert not self.is_polymorphic, self
return (self, ())
def __call__(self, *args):
# type: (*Expr) -> Apply
"""
Create an `ast.Apply` AST node representing the application of this
instruction to the arguments.
"""
from .ast import Apply # noqa
return Apply(self, args)
def set_semantics(self, src, *dsts):
# type: (Union[Def, Apply], *SemDefCase) -> None
"""Set our semantics."""
from semantics import verify_semantics
from .xform import XForm, Rtl
sem = [] # type: List[XForm]
for dst in dsts:
if isinstance(dst, Rtl):
sem.append(XForm(Rtl(src).copy({}), dst))
elif isinstance(dst, XForm):
sem.append(XForm(
dst.src.copy({}),
dst.dst.copy({}),
dst.constraints))
else:
assert isinstance(dst, tuple)
sem.append(XForm(Rtl(src).copy({}), dst[0],
constraints=dst[1]))
verify_semantics(self, Rtl(src), sem)
self.semantics = sem
class BoundInstruction(object):
"""
A polymorphic `Instruction` bound to concrete type variables.
"""
def __init__(self, inst, typevars):
# type: (Instruction, Tuple[ValueType, ...]) -> None
self.inst = inst
self.typevars = typevars
assert len(typevars) <= 1 + len(inst.other_typevars)
def __str__(self):
# type: () -> str
return '.'.join([self.inst.name, ] + list(map(str, self.typevars)))
def bind(self, *args):
# type: (*ValueType) -> BoundInstruction
"""
Bind additional typevars.
"""
return BoundInstruction(self.inst, self.typevars + args)
def __getattr__(self, name):
# type: (str) -> BoundInstruction
"""
Bind an additional typevar dot syntax:
>>> uext.i32.i8
"""
if name == 'any':
# This is a wild card bind represented as a None type variable.
return self.bind(None)
return self.bind(ValueType.by_name(name))
def fully_bound(self):
# type: () -> Tuple[Instruction, Tuple[ValueType, ...]]
"""
Verify that all typevars have been bound, and return a
`(inst, typevars)` pair.
"""
if len(self.typevars) < 1 + len(self.inst.other_typevars):
unb = ', '.join(
str(tv) for tv in
self.inst.other_typevars[len(self.typevars) - 1:])
raise AssertionError("Unbound typevar {} in {}".format(unb, self))
assert len(self.typevars) == 1 + len(self.inst.other_typevars)
return (self.inst, self.typevars)
def __call__(self, *args):
# type: (*Expr) -> Apply
"""
Create an `ast.Apply` AST node representing the application of this
instruction to the arguments.
"""
from .ast import Apply # noqa
return Apply(self, args)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
"""
Abstract base class for data Readers.
"""
import sys
sys.path.append('.')
from logger.utils import formats
################################################################################
class Reader:
"""
Base class Reader about which we know nothing else. By default the
output format is Unknown unless overridden.
"""
def __init__(self, output_format=formats.Unknown):
self.output_format(output_format)
############################
def output_format(self, new_format=None):
"""
Return our output format or set a new output format
"""
if new_format is not None:
if not formats.is_format(new_format):
raise TypeError('Argument "%s" is not a known format type' % new_format)
self.out_format = new_format
return self.out_format
############################
def read(self):
"""
read() should return None when there are no more records.
"""
raise NotImplementedError('Class %s (subclass of Reader) is missing '
'implementation of read() method.'
% self.__class__.__name__)
################################################################################
class StorageReader(Reader):
"""
A StorageReader is something like a file, where we can, in theory,
seek and rewind, or retrieve a range of records.
"""
def __init__(self, output_format=formats.Unknown):
super().__init__(output_format=output_format)
pass
# Behavior is intended to mimic file seek() behavior but with
# respect to records: 'offset' means number of records, and origin
# is either 'start', 'current' or 'end'.
def seek(self, offset=0, origin='current'):
raise NotImplementedError('Class %s (subclass of StorageReader) is missing '
'implementation of seek() method.'
% self.__class__.__name__)
############################
def read_range(self, start=None, stop=None):
"""
Read a range of records beginning with record number start, and ending
*before* record number stop.
"""
raise NotImplementedError('Class %s (subclass of StorageReader) is missing '
'implementation of read_range() method.'
% self.__class__.__name__)
################################################################################
class TimestampedReader(StorageReader):
"""
A TimestampedReader is a special case of a StorageReader where we
can seek and retrieve a range specified by timestamps.
"""
def __init__(self, output_format=formats.Unknown):
super().__init__(output_format=output_format)
pass
# Behavior is intended to mimic file seek() behavior but with
# respect to timestamps: 'offset' means number of milliseconds, and
# origin is either 'start', 'current' or 'end'.
def seek_time(self, offset=0, origin='current'):
raise NotImplementedError('Class %s (subclass of TimestampedReader) is missing '
'implementation of seek_time() method.'
% self.__class__.__name__)
# Read a range of records beginning with timestamp start
# milliseconds, and ending *before* timestamp stop milliseconds.
def read_time_range(self, start=None, stop=None):
raise NotImplementedError('Class %s (subclass of TimestampedReader) is missing '
'implementation of read_range() method.'
% self.__class__.__name__)
|
nilq/baby-python
|
python
|
import os
import shutil
from codecs import open
from os import path
from setuptools import setup, Command
here = path.abspath(path.dirname(__file__))
name = 'sqe'
version = '0.1.0'
class CleanCommand(Command):
description = "custom clean command that forcefully removes dist and build directories"
user_options = []
def initialize_options(self):
self.cwd = None
def finalize_options(self):
self.cwd = os.getcwd()
def run(self):
if path.exists(path.join(here, 'build')):
shutil.rmtree(path.join(here, 'build'))
if path.exists(path.join(here, 'dist')):
shutil.rmtree(path.join(here, 'dist'))
if path.exists(path.join(here, name.replace('-', '_') + '.egg-info')):
shutil.rmtree(path.join(here, name.replace('-', '_') + '.egg-info'))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f:
data = f.readlines()
requires = data
setup(
name=name,
version=version, # Required
description='JFrog Artifactory Exporter',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/nthienan/sonarqube-exporter',
author='An Nguyen',
author_email='nthienan.it@gmail.com',
license='MIT',
classifiers=[ # Optional
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: DevOps',
"License :: OSI Approved :: MIT License",
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
],
keywords='sonarqube-exporter, sonarqube, exporter, sqe, devops',
# You can just specify package directories manually here if your project is
# simple. Or you can use find_packages().
#
# Alternatively, if you just want to distribute a single Python file, use
# the `py_modules` argument instead as follows, which will expect a file
# called `my_module.py` to exist:
#
# py_modules=["my_module"],
#
packages=['sqe'],
package_dir={'sqe': 'src'},
# scripts=['src/jae'],
# This field lists other packages that your project depends on to run.
# Any package you put here will be installed by pip when your project is
# installed, so they must be valid existing projects.
#
# For an analysis of "install_requires" vs pip's requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=requires,
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# `pip` to create the appropriate form of executable for the target
# platform.
#
# For example, the following would provide a command called `sample` which
# executes the function `main` from this package when invoked:
# entry_points={ # Optional
# 'console_scripts': [
# 'sample=sample:main',
# ],
# },
entry_points={
'console_scripts': [
'sqe=sqe.app:main'
],
},
project_urls={
'Bug Reports': 'https://github.com/nthienan/sonarqube-exporter/issues',
'Source': 'https://github.com/nthienan/sonarqube-exporter',
},
cmdclass={
'clean': CleanCommand
},
)
|
nilq/baby-python
|
python
|
from datetime import date, datetime, timedelta
from django.test import TestCase
from projects.models import Project, Invoice
from inspectors.models import Inspector
class ProjectModelTest(TestCase):
def setUp(self):
date_1 = date.today()
date_2 = date_1 + timedelta(days=10)
p1 = Project.objects.create(
prudent_number = '103.219',
penndot_number = 'E01993',
name = 'CP2',
inspector = [Inspector.objects.create(
id=2,
first_name = "bard",
last_name = "Test",
office = "King of Prussia",
classification = "TCI-2",
address ="123 kitty lane",
location = "Pittsburgh, PA",
work_radius = 95,
email = "bardtest@prudenteng.com",
phone_number = "3022993322"
).save()],
office = 'King of Prussia',
start_date = date_1,
end_date = date_2,
st_hours = 300,
ot_hours = 25,
payroll_budget = 132000,
other_cost_budget = 10000,
)
p2 = Project.objects.create(
prudent_number = '103.111',
penndot_number = 'E01994',
name = 'Septa bridge over mars',
inspector = [Inspector.objects.create(
id=1,
first_name = "Mark",
last_name = "Test",
office = "King of Prussia",
classification = "TCI-2",
address ="123 kitty lane",
location = "Pittsburgh, PA",
work_radius = 95,
email = "marktest@prudenteng.com",
phone_number = "3022993322",
).save()],
office = 'King of Prussia',
start_date = date_1,
end_date = date_2,
st_hours = 300,
ot_hours = 25,
payroll_budget = 132000,
other_cost_budget = 10000,
)
i1 = Invoice.objects.create(
project = p1,
estimate_num = 1,
start_date = date_1,
end_date = date_1 + timedelta(days=3),
payroll= 11260.90,
other_cost = 505.11,
st_hours = 13,
ot_hours = 16,
invoice_num = '19961',
)
i2 = Invoice.objects.create(
project = p1,
estimate_num = 2,
start_date = date_1,
end_date = date_1 + timedelta(days=-3),
payroll = 32134.90,
other_cost = 505.11,
st_hours = 13,
ot_hours = 16,
invoice_num = '19962',
)
i3 = Invoice.objects.create(
project = p2,
estimate_num = 2,
start_date = date_1,
end_date = date_1 + timedelta(days=-3),
payroll = 11260.90,
other_cost = 505.11,
st_hours = 13,
ot_hours = 16,
invoice_num = '19963',
)
def test_project_creation(self):
''' Project manager accounts for all 3 invoices created'''
project_count = Project.objects.count()
self.assertEqual(project_count, 2)
def test_invoice_creation(self):
''' Invoice manager accounts for all 3 invoices created'''
invoice_count = Invoice.objects.count()
self.assertEqual(invoice_count, 3)
def test_budget_totals(self):
'''sum of payroll for associated invoices'''
p1 = Project.objects.last()
self.assertEqual(p1.payroll_to_date, (11260.90 + 32134.90))
self.assertEqual(p1.other_cost_to_date, 1010.22)
# def test_total_other_cost(self):
# '''sum of other cost in all associated invoices'''
# p1 = Project.objects.last()
# self.assertEqual(p1)
def test_last_invoiced(self):
''' Returns end date of most recent invoice in mm/dd/yyyy format'''
p1 = Project.objects.last()
self.assertEqual(p1.last_invoiced, datetime.strftime(date.today() + timedelta(days=3),"%m/%d/%Y"))
def test_burn_rate_calc(self):
'''Calculates months remaining at current rate'''
p1 = Project.objects.last()
self.assertEqual(p1.is_almost_finished(), 4)
# Make sure its recalculated after a new invoice
Invoice.objects.create(
project = p1,
estimate_num = 3,
start_date = date.today(),
end_date = date.today() + timedelta(days=-15),
payroll = 25000.90,
other_cost = 505.11,
st_hours = 13,
ot_hours = 16,
invoice_num = '19964',
)
self.assertEqual(p1.is_almost_finished(), 3)
|
nilq/baby-python
|
python
|
# coding: utf-8
"""
vloadbalancer
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ncloud_vloadbalancer.api_client import ApiClient
class V2Api(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_target(self, add_target_request, **kwargs): # noqa: E501
"""add_target # noqa: E501
타겟추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_target(add_target_request, async=True)
>>> result = thread.get()
:param async bool
:param AddTargetRequest add_target_request: addTargetRequest (required)
:return: AddTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.add_target_with_http_info(add_target_request, **kwargs) # noqa: E501
else:
(data) = self.add_target_with_http_info(add_target_request, **kwargs) # noqa: E501
return data
def add_target_with_http_info(self, add_target_request, **kwargs): # noqa: E501
"""add_target # noqa: E501
타겟추가 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_target_with_http_info(add_target_request, async=True)
>>> result = thread.get()
:param async bool
:param AddTargetRequest add_target_request: addTargetRequest (required)
:return: AddTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['add_target_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_target" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'add_target_request' is set
if ('add_target_request' not in params or
params['add_target_request'] is None):
raise ValueError("Missing the required parameter `add_target_request` when calling `add_target`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'add_target_request' in params:
body_params = params['add_target_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/addTarget', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AddTargetResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_load_balancer_instance_configuration(self, change_load_balancer_instance_configuration_request, **kwargs): # noqa: E501
"""change_load_balancer_instance_configuration # noqa: E501
로드밸런서인스턴스설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_load_balancer_instance_configuration(change_load_balancer_instance_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeLoadBalancerInstanceConfigurationRequest change_load_balancer_instance_configuration_request: changeLoadBalancerInstanceConfigurationRequest (required)
:return: ChangeLoadBalancerInstanceConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.change_load_balancer_instance_configuration_with_http_info(change_load_balancer_instance_configuration_request, **kwargs) # noqa: E501
else:
(data) = self.change_load_balancer_instance_configuration_with_http_info(change_load_balancer_instance_configuration_request, **kwargs) # noqa: E501
return data
def change_load_balancer_instance_configuration_with_http_info(self, change_load_balancer_instance_configuration_request, **kwargs): # noqa: E501
"""change_load_balancer_instance_configuration # noqa: E501
로드밸런서인스턴스설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_load_balancer_instance_configuration_with_http_info(change_load_balancer_instance_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeLoadBalancerInstanceConfigurationRequest change_load_balancer_instance_configuration_request: changeLoadBalancerInstanceConfigurationRequest (required)
:return: ChangeLoadBalancerInstanceConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_load_balancer_instance_configuration_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_load_balancer_instance_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'change_load_balancer_instance_configuration_request' is set
if ('change_load_balancer_instance_configuration_request' not in params or
params['change_load_balancer_instance_configuration_request'] is None):
raise ValueError("Missing the required parameter `change_load_balancer_instance_configuration_request` when calling `change_load_balancer_instance_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_load_balancer_instance_configuration_request' in params:
body_params = params['change_load_balancer_instance_configuration_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/changeLoadBalancerInstanceConfiguration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChangeLoadBalancerInstanceConfigurationResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_load_balancer_listener_configuration(self, change_load_balancer_listener_configuration_request, **kwargs): # noqa: E501
"""change_load_balancer_listener_configuration # noqa: E501
로드밸런서리스너설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_load_balancer_listener_configuration(change_load_balancer_listener_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeLoadBalancerListenerConfigurationRequest change_load_balancer_listener_configuration_request: changeLoadBalancerListenerConfigurationRequest (required)
:return: ChangeLoadBalancerListenerConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.change_load_balancer_listener_configuration_with_http_info(change_load_balancer_listener_configuration_request, **kwargs) # noqa: E501
else:
(data) = self.change_load_balancer_listener_configuration_with_http_info(change_load_balancer_listener_configuration_request, **kwargs) # noqa: E501
return data
def change_load_balancer_listener_configuration_with_http_info(self, change_load_balancer_listener_configuration_request, **kwargs): # noqa: E501
"""change_load_balancer_listener_configuration # noqa: E501
로드밸런서리스너설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_load_balancer_listener_configuration_with_http_info(change_load_balancer_listener_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeLoadBalancerListenerConfigurationRequest change_load_balancer_listener_configuration_request: changeLoadBalancerListenerConfigurationRequest (required)
:return: ChangeLoadBalancerListenerConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_load_balancer_listener_configuration_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_load_balancer_listener_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'change_load_balancer_listener_configuration_request' is set
if ('change_load_balancer_listener_configuration_request' not in params or
params['change_load_balancer_listener_configuration_request'] is None):
raise ValueError("Missing the required parameter `change_load_balancer_listener_configuration_request` when calling `change_load_balancer_listener_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_load_balancer_listener_configuration_request' in params:
body_params = params['change_load_balancer_listener_configuration_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/changeLoadBalancerListenerConfiguration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChangeLoadBalancerListenerConfigurationResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_target_group_configuration(self, change_target_group_configuration_request, **kwargs): # noqa: E501
"""change_target_group_configuration # noqa: E501
타겟그룹설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_target_group_configuration(change_target_group_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeTargetGroupConfigurationRequest change_target_group_configuration_request: changeTargetGroupConfigurationRequest (required)
:return: ChangeTargetGroupConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.change_target_group_configuration_with_http_info(change_target_group_configuration_request, **kwargs) # noqa: E501
else:
(data) = self.change_target_group_configuration_with_http_info(change_target_group_configuration_request, **kwargs) # noqa: E501
return data
def change_target_group_configuration_with_http_info(self, change_target_group_configuration_request, **kwargs): # noqa: E501
"""change_target_group_configuration # noqa: E501
타겟그룹설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_target_group_configuration_with_http_info(change_target_group_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeTargetGroupConfigurationRequest change_target_group_configuration_request: changeTargetGroupConfigurationRequest (required)
:return: ChangeTargetGroupConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_target_group_configuration_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_target_group_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'change_target_group_configuration_request' is set
if ('change_target_group_configuration_request' not in params or
params['change_target_group_configuration_request'] is None):
raise ValueError("Missing the required parameter `change_target_group_configuration_request` when calling `change_target_group_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_target_group_configuration_request' in params:
body_params = params['change_target_group_configuration_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/changeTargetGroupConfiguration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChangeTargetGroupConfigurationResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_target_group_health_check_configuration(self, change_target_group_health_check_configuration_request, **kwargs): # noqa: E501
"""change_target_group_health_check_configuration # noqa: E501
타겟그룹헬스체크설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_target_group_health_check_configuration(change_target_group_health_check_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeTargetGroupHealthCheckConfigurationRequest change_target_group_health_check_configuration_request: changeTargetGroupHealthCheckConfigurationRequest (required)
:return: ChangeTargetGroupHealthCheckConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.change_target_group_health_check_configuration_with_http_info(change_target_group_health_check_configuration_request, **kwargs) # noqa: E501
else:
(data) = self.change_target_group_health_check_configuration_with_http_info(change_target_group_health_check_configuration_request, **kwargs) # noqa: E501
return data
def change_target_group_health_check_configuration_with_http_info(self, change_target_group_health_check_configuration_request, **kwargs): # noqa: E501
"""change_target_group_health_check_configuration # noqa: E501
타겟그룹헬스체크설정변경 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.change_target_group_health_check_configuration_with_http_info(change_target_group_health_check_configuration_request, async=True)
>>> result = thread.get()
:param async bool
:param ChangeTargetGroupHealthCheckConfigurationRequest change_target_group_health_check_configuration_request: changeTargetGroupHealthCheckConfigurationRequest (required)
:return: ChangeTargetGroupHealthCheckConfigurationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['change_target_group_health_check_configuration_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_target_group_health_check_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'change_target_group_health_check_configuration_request' is set
if ('change_target_group_health_check_configuration_request' not in params or
params['change_target_group_health_check_configuration_request'] is None):
raise ValueError("Missing the required parameter `change_target_group_health_check_configuration_request` when calling `change_target_group_health_check_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_target_group_health_check_configuration_request' in params:
body_params = params['change_target_group_health_check_configuration_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/changeTargetGroupHealthCheckConfiguration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChangeTargetGroupHealthCheckConfigurationResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_load_balancer_instance(self, create_load_balancer_instance_request, **kwargs): # noqa: E501
"""create_load_balancer_instance # noqa: E501
로드밸런서인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_load_balancer_instance(create_load_balancer_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateLoadBalancerInstanceRequest create_load_balancer_instance_request: createLoadBalancerInstanceRequest (required)
:return: CreateLoadBalancerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_load_balancer_instance_with_http_info(create_load_balancer_instance_request, **kwargs) # noqa: E501
else:
(data) = self.create_load_balancer_instance_with_http_info(create_load_balancer_instance_request, **kwargs) # noqa: E501
return data
def create_load_balancer_instance_with_http_info(self, create_load_balancer_instance_request, **kwargs): # noqa: E501
"""create_load_balancer_instance # noqa: E501
로드밸런서인스턴스생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_load_balancer_instance_with_http_info(create_load_balancer_instance_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateLoadBalancerInstanceRequest create_load_balancer_instance_request: createLoadBalancerInstanceRequest (required)
:return: CreateLoadBalancerInstanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_load_balancer_instance_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_load_balancer_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_load_balancer_instance_request' is set
if ('create_load_balancer_instance_request' not in params or
params['create_load_balancer_instance_request'] is None):
raise ValueError("Missing the required parameter `create_load_balancer_instance_request` when calling `create_load_balancer_instance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_load_balancer_instance_request' in params:
body_params = params['create_load_balancer_instance_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createLoadBalancerInstance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateLoadBalancerInstanceResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_load_balancer_listener(self, create_load_balancer_listener_request, **kwargs): # noqa: E501
"""create_load_balancer_listener # noqa: E501
로드밸런서리스너생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_load_balancer_listener(create_load_balancer_listener_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateLoadBalancerListenerRequest create_load_balancer_listener_request: createLoadBalancerListenerRequest (required)
:return: CreateLoadBalancerListenerResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_load_balancer_listener_with_http_info(create_load_balancer_listener_request, **kwargs) # noqa: E501
else:
(data) = self.create_load_balancer_listener_with_http_info(create_load_balancer_listener_request, **kwargs) # noqa: E501
return data
def create_load_balancer_listener_with_http_info(self, create_load_balancer_listener_request, **kwargs): # noqa: E501
"""create_load_balancer_listener # noqa: E501
로드밸런서리스너생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_load_balancer_listener_with_http_info(create_load_balancer_listener_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateLoadBalancerListenerRequest create_load_balancer_listener_request: createLoadBalancerListenerRequest (required)
:return: CreateLoadBalancerListenerResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_load_balancer_listener_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_load_balancer_listener" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_load_balancer_listener_request' is set
if ('create_load_balancer_listener_request' not in params or
params['create_load_balancer_listener_request'] is None):
raise ValueError("Missing the required parameter `create_load_balancer_listener_request` when calling `create_load_balancer_listener`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_load_balancer_listener_request' in params:
body_params = params['create_load_balancer_listener_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createLoadBalancerListener', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateLoadBalancerListenerResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_target_group(self, create_target_group_request, **kwargs): # noqa: E501
"""create_target_group # noqa: E501
타겟그룹생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_target_group(create_target_group_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateTargetGroupRequest create_target_group_request: createTargetGroupRequest (required)
:return: CreateTargetGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_target_group_with_http_info(create_target_group_request, **kwargs) # noqa: E501
else:
(data) = self.create_target_group_with_http_info(create_target_group_request, **kwargs) # noqa: E501
return data
def create_target_group_with_http_info(self, create_target_group_request, **kwargs): # noqa: E501
"""create_target_group # noqa: E501
타겟그룹생성 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_target_group_with_http_info(create_target_group_request, async=True)
>>> result = thread.get()
:param async bool
:param CreateTargetGroupRequest create_target_group_request: createTargetGroupRequest (required)
:return: CreateTargetGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_target_group_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_target_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_target_group_request' is set
if ('create_target_group_request' not in params or
params['create_target_group_request'] is None):
raise ValueError("Missing the required parameter `create_target_group_request` when calling `create_target_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_target_group_request' in params:
body_params = params['create_target_group_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/createTargetGroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateTargetGroupResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_load_balancer_instances(self, delete_load_balancer_instances_request, **kwargs): # noqa: E501
"""delete_load_balancer_instances # noqa: E501
로드밸런서인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_load_balancer_instances(delete_load_balancer_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteLoadBalancerInstancesRequest delete_load_balancer_instances_request: deleteLoadBalancerInstancesRequest (required)
:return: DeleteLoadBalancerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_load_balancer_instances_with_http_info(delete_load_balancer_instances_request, **kwargs) # noqa: E501
else:
(data) = self.delete_load_balancer_instances_with_http_info(delete_load_balancer_instances_request, **kwargs) # noqa: E501
return data
def delete_load_balancer_instances_with_http_info(self, delete_load_balancer_instances_request, **kwargs): # noqa: E501
"""delete_load_balancer_instances # noqa: E501
로드밸런서인스턴스삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_load_balancer_instances_with_http_info(delete_load_balancer_instances_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteLoadBalancerInstancesRequest delete_load_balancer_instances_request: deleteLoadBalancerInstancesRequest (required)
:return: DeleteLoadBalancerInstancesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_load_balancer_instances_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_load_balancer_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_load_balancer_instances_request' is set
if ('delete_load_balancer_instances_request' not in params or
params['delete_load_balancer_instances_request'] is None):
raise ValueError("Missing the required parameter `delete_load_balancer_instances_request` when calling `delete_load_balancer_instances`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_load_balancer_instances_request' in params:
body_params = params['delete_load_balancer_instances_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteLoadBalancerInstances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteLoadBalancerInstancesResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_load_balancer_listeners(self, delete_load_balancer_listeners_request, **kwargs): # noqa: E501
"""delete_load_balancer_listeners # noqa: E501
로드밸런서리스너삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_load_balancer_listeners(delete_load_balancer_listeners_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteLoadBalancerListenersRequest delete_load_balancer_listeners_request: deleteLoadBalancerListenersRequest (required)
:return: DeleteLoadBalancerListenersResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_load_balancer_listeners_with_http_info(delete_load_balancer_listeners_request, **kwargs) # noqa: E501
else:
(data) = self.delete_load_balancer_listeners_with_http_info(delete_load_balancer_listeners_request, **kwargs) # noqa: E501
return data
def delete_load_balancer_listeners_with_http_info(self, delete_load_balancer_listeners_request, **kwargs): # noqa: E501
"""delete_load_balancer_listeners # noqa: E501
로드밸런서리스너삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_load_balancer_listeners_with_http_info(delete_load_balancer_listeners_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteLoadBalancerListenersRequest delete_load_balancer_listeners_request: deleteLoadBalancerListenersRequest (required)
:return: DeleteLoadBalancerListenersResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_load_balancer_listeners_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_load_balancer_listeners" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_load_balancer_listeners_request' is set
if ('delete_load_balancer_listeners_request' not in params or
params['delete_load_balancer_listeners_request'] is None):
raise ValueError("Missing the required parameter `delete_load_balancer_listeners_request` when calling `delete_load_balancer_listeners`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_load_balancer_listeners_request' in params:
body_params = params['delete_load_balancer_listeners_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteLoadBalancerListeners', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteLoadBalancerListenersResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_target_groups(self, delete_target_groups_request, **kwargs): # noqa: E501
"""delete_target_groups # noqa: E501
타겟그룹삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_target_groups(delete_target_groups_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteTargetGroupsRequest delete_target_groups_request: deleteTargetGroupsRequest (required)
:return: DeleteTargetGroupsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_target_groups_with_http_info(delete_target_groups_request, **kwargs) # noqa: E501
else:
(data) = self.delete_target_groups_with_http_info(delete_target_groups_request, **kwargs) # noqa: E501
return data
def delete_target_groups_with_http_info(self, delete_target_groups_request, **kwargs): # noqa: E501
"""delete_target_groups # noqa: E501
타겟그룹삭제 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_target_groups_with_http_info(delete_target_groups_request, async=True)
>>> result = thread.get()
:param async bool
:param DeleteTargetGroupsRequest delete_target_groups_request: deleteTargetGroupsRequest (required)
:return: DeleteTargetGroupsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete_target_groups_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_target_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete_target_groups_request' is set
if ('delete_target_groups_request' not in params or
params['delete_target_groups_request'] is None):
raise ValueError("Missing the required parameter `delete_target_groups_request` when calling `delete_target_groups`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete_target_groups_request' in params:
body_params = params['delete_target_groups_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/deleteTargetGroups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeleteTargetGroupsResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_load_balancer_instance_detail(self, get_load_balancer_instance_detail_request, **kwargs): # noqa: E501
"""get_load_balancer_instance_detail # noqa: E501
로드밸런서인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_instance_detail(get_load_balancer_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerInstanceDetailRequest get_load_balancer_instance_detail_request: getLoadBalancerInstanceDetailRequest (required)
:return: GetLoadBalancerInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_load_balancer_instance_detail_with_http_info(get_load_balancer_instance_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_load_balancer_instance_detail_with_http_info(get_load_balancer_instance_detail_request, **kwargs) # noqa: E501
return data
def get_load_balancer_instance_detail_with_http_info(self, get_load_balancer_instance_detail_request, **kwargs): # noqa: E501
"""get_load_balancer_instance_detail # noqa: E501
로드밸런서인스턴스상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_instance_detail_with_http_info(get_load_balancer_instance_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerInstanceDetailRequest get_load_balancer_instance_detail_request: getLoadBalancerInstanceDetailRequest (required)
:return: GetLoadBalancerInstanceDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_load_balancer_instance_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_load_balancer_instance_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_load_balancer_instance_detail_request' is set
if ('get_load_balancer_instance_detail_request' not in params or
params['get_load_balancer_instance_detail_request'] is None):
raise ValueError("Missing the required parameter `get_load_balancer_instance_detail_request` when calling `get_load_balancer_instance_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_load_balancer_instance_detail_request' in params:
body_params = params['get_load_balancer_instance_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getLoadBalancerInstanceDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetLoadBalancerInstanceDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_load_balancer_instance_list(self, get_load_balancer_instance_list_request, **kwargs): # noqa: E501
"""get_load_balancer_instance_list # noqa: E501
로드밸런서인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_instance_list(get_load_balancer_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerInstanceListRequest get_load_balancer_instance_list_request: getLoadBalancerInstanceListRequest (required)
:return: GetLoadBalancerInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_load_balancer_instance_list_with_http_info(get_load_balancer_instance_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_load_balancer_instance_list_with_http_info(get_load_balancer_instance_list_request, **kwargs) # noqa: E501
return data
def get_load_balancer_instance_list_with_http_info(self, get_load_balancer_instance_list_request, **kwargs): # noqa: E501
"""get_load_balancer_instance_list # noqa: E501
로드밸런서인스턴스리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_instance_list_with_http_info(get_load_balancer_instance_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerInstanceListRequest get_load_balancer_instance_list_request: getLoadBalancerInstanceListRequest (required)
:return: GetLoadBalancerInstanceListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_load_balancer_instance_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_load_balancer_instance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_load_balancer_instance_list_request' is set
if ('get_load_balancer_instance_list_request' not in params or
params['get_load_balancer_instance_list_request'] is None):
raise ValueError("Missing the required parameter `get_load_balancer_instance_list_request` when calling `get_load_balancer_instance_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_load_balancer_instance_list_request' in params:
body_params = params['get_load_balancer_instance_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getLoadBalancerInstanceList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetLoadBalancerInstanceListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_load_balancer_listener_list(self, get_load_balancer_listener_list_request, **kwargs): # noqa: E501
"""get_load_balancer_listener_list # noqa: E501
로드밸런서리스너리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_listener_list(get_load_balancer_listener_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerListenerListRequest get_load_balancer_listener_list_request: getLoadBalancerListenerListRequest (required)
:return: GetLoadBalancerListenerListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_load_balancer_listener_list_with_http_info(get_load_balancer_listener_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_load_balancer_listener_list_with_http_info(get_load_balancer_listener_list_request, **kwargs) # noqa: E501
return data
def get_load_balancer_listener_list_with_http_info(self, get_load_balancer_listener_list_request, **kwargs): # noqa: E501
"""get_load_balancer_listener_list # noqa: E501
로드밸런서리스너리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_listener_list_with_http_info(get_load_balancer_listener_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerListenerListRequest get_load_balancer_listener_list_request: getLoadBalancerListenerListRequest (required)
:return: GetLoadBalancerListenerListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_load_balancer_listener_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_load_balancer_listener_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_load_balancer_listener_list_request' is set
if ('get_load_balancer_listener_list_request' not in params or
params['get_load_balancer_listener_list_request'] is None):
raise ValueError("Missing the required parameter `get_load_balancer_listener_list_request` when calling `get_load_balancer_listener_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_load_balancer_listener_list_request' in params:
body_params = params['get_load_balancer_listener_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getLoadBalancerListenerList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetLoadBalancerListenerListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_load_balancer_rule_list(self, get_load_balancer_rule_list_request, **kwargs): # noqa: E501
"""get_load_balancer_rule_list # noqa: E501
로드밸런서룰리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_rule_list(get_load_balancer_rule_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerRuleListRequest get_load_balancer_rule_list_request: getLoadBalancerRuleListRequest (required)
:return: GetLoadBalancerRuleListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_load_balancer_rule_list_with_http_info(get_load_balancer_rule_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_load_balancer_rule_list_with_http_info(get_load_balancer_rule_list_request, **kwargs) # noqa: E501
return data
def get_load_balancer_rule_list_with_http_info(self, get_load_balancer_rule_list_request, **kwargs): # noqa: E501
"""get_load_balancer_rule_list # noqa: E501
로드밸런서룰리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_load_balancer_rule_list_with_http_info(get_load_balancer_rule_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetLoadBalancerRuleListRequest get_load_balancer_rule_list_request: getLoadBalancerRuleListRequest (required)
:return: GetLoadBalancerRuleListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_load_balancer_rule_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_load_balancer_rule_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_load_balancer_rule_list_request' is set
if ('get_load_balancer_rule_list_request' not in params or
params['get_load_balancer_rule_list_request'] is None):
raise ValueError("Missing the required parameter `get_load_balancer_rule_list_request` when calling `get_load_balancer_rule_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_load_balancer_rule_list_request' in params:
body_params = params['get_load_balancer_rule_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getLoadBalancerRuleList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetLoadBalancerRuleListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_target_group_detail(self, get_target_group_detail_request, **kwargs): # noqa: E501
"""get_target_group_detail # noqa: E501
타겟그룹상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_group_detail(get_target_group_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetGroupDetailRequest get_target_group_detail_request: getTargetGroupDetailRequest (required)
:return: GetTargetGroupDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_target_group_detail_with_http_info(get_target_group_detail_request, **kwargs) # noqa: E501
else:
(data) = self.get_target_group_detail_with_http_info(get_target_group_detail_request, **kwargs) # noqa: E501
return data
def get_target_group_detail_with_http_info(self, get_target_group_detail_request, **kwargs): # noqa: E501
"""get_target_group_detail # noqa: E501
타겟그룹상세조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_group_detail_with_http_info(get_target_group_detail_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetGroupDetailRequest get_target_group_detail_request: getTargetGroupDetailRequest (required)
:return: GetTargetGroupDetailResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_target_group_detail_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_target_group_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_target_group_detail_request' is set
if ('get_target_group_detail_request' not in params or
params['get_target_group_detail_request'] is None):
raise ValueError("Missing the required parameter `get_target_group_detail_request` when calling `get_target_group_detail`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_target_group_detail_request' in params:
body_params = params['get_target_group_detail_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getTargetGroupDetail', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetTargetGroupDetailResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_target_group_list(self, get_target_group_list_request, **kwargs): # noqa: E501
"""get_target_group_list # noqa: E501
타겟그룹리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_group_list(get_target_group_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetGroupListRequest get_target_group_list_request: getTargetGroupListRequest (required)
:return: GetTargetGroupListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_target_group_list_with_http_info(get_target_group_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_target_group_list_with_http_info(get_target_group_list_request, **kwargs) # noqa: E501
return data
def get_target_group_list_with_http_info(self, get_target_group_list_request, **kwargs): # noqa: E501
"""get_target_group_list # noqa: E501
타겟그룹리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_group_list_with_http_info(get_target_group_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetGroupListRequest get_target_group_list_request: getTargetGroupListRequest (required)
:return: GetTargetGroupListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_target_group_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_target_group_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_target_group_list_request' is set
if ('get_target_group_list_request' not in params or
params['get_target_group_list_request'] is None):
raise ValueError("Missing the required parameter `get_target_group_list_request` when calling `get_target_group_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_target_group_list_request' in params:
body_params = params['get_target_group_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getTargetGroupList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetTargetGroupListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_target_list(self, get_target_list_request, **kwargs): # noqa: E501
"""get_target_list # noqa: E501
타겟리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_list(get_target_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetListRequest get_target_list_request: getTargetListRequest (required)
:return: GetTargetListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_target_list_with_http_info(get_target_list_request, **kwargs) # noqa: E501
else:
(data) = self.get_target_list_with_http_info(get_target_list_request, **kwargs) # noqa: E501
return data
def get_target_list_with_http_info(self, get_target_list_request, **kwargs): # noqa: E501
"""get_target_list # noqa: E501
타겟리스트조회 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_target_list_with_http_info(get_target_list_request, async=True)
>>> result = thread.get()
:param async bool
:param GetTargetListRequest get_target_list_request: getTargetListRequest (required)
:return: GetTargetListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['get_target_list_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_target_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'get_target_list_request' is set
if ('get_target_list_request' not in params or
params['get_target_list_request'] is None):
raise ValueError("Missing the required parameter `get_target_list_request` when calling `get_target_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_target_list_request' in params:
body_params = params['get_target_list_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/getTargetList', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetTargetListResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_target(self, remove_target_request, **kwargs): # noqa: E501
"""remove_target # noqa: E501
타겟제거 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_target(remove_target_request, async=True)
>>> result = thread.get()
:param async bool
:param RemoveTargetRequest remove_target_request: removeTargetRequest (required)
:return: RemoveTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.remove_target_with_http_info(remove_target_request, **kwargs) # noqa: E501
else:
(data) = self.remove_target_with_http_info(remove_target_request, **kwargs) # noqa: E501
return data
def remove_target_with_http_info(self, remove_target_request, **kwargs): # noqa: E501
"""remove_target # noqa: E501
타겟제거 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.remove_target_with_http_info(remove_target_request, async=True)
>>> result = thread.get()
:param async bool
:param RemoveTargetRequest remove_target_request: removeTargetRequest (required)
:return: RemoveTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['remove_target_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_target" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'remove_target_request' is set
if ('remove_target_request' not in params or
params['remove_target_request'] is None):
raise ValueError("Missing the required parameter `remove_target_request` when calling `remove_target`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'remove_target_request' in params:
body_params = params['remove_target_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/removeTarget', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoveTargetResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_load_balancer_description(self, set_load_balancer_description_request, **kwargs): # noqa: E501
"""set_load_balancer_description # noqa: E501
로드밸런서설명설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_load_balancer_description(set_load_balancer_description_request, async=True)
>>> result = thread.get()
:param async bool
:param SetLoadBalancerDescriptionRequest set_load_balancer_description_request: setLoadBalancerDescriptionRequest (required)
:return: SetLoadBalancerDescriptionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_load_balancer_description_with_http_info(set_load_balancer_description_request, **kwargs) # noqa: E501
else:
(data) = self.set_load_balancer_description_with_http_info(set_load_balancer_description_request, **kwargs) # noqa: E501
return data
def set_load_balancer_description_with_http_info(self, set_load_balancer_description_request, **kwargs): # noqa: E501
"""set_load_balancer_description # noqa: E501
로드밸런서설명설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_load_balancer_description_with_http_info(set_load_balancer_description_request, async=True)
>>> result = thread.get()
:param async bool
:param SetLoadBalancerDescriptionRequest set_load_balancer_description_request: setLoadBalancerDescriptionRequest (required)
:return: SetLoadBalancerDescriptionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['set_load_balancer_description_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_load_balancer_description" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'set_load_balancer_description_request' is set
if ('set_load_balancer_description_request' not in params or
params['set_load_balancer_description_request'] is None):
raise ValueError("Missing the required parameter `set_load_balancer_description_request` when calling `set_load_balancer_description`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'set_load_balancer_description_request' in params:
body_params = params['set_load_balancer_description_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/setLoadBalancerDescription', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SetLoadBalancerDescriptionResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_load_balancer_instance_subnet(self, set_load_balancer_instance_subnet_request, **kwargs): # noqa: E501
"""set_load_balancer_instance_subnet # noqa: E501
로드밸런서인스턴스서브넷설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_load_balancer_instance_subnet(set_load_balancer_instance_subnet_request, async=True)
>>> result = thread.get()
:param async bool
:param SetLoadBalancerInstanceSubnetRequest set_load_balancer_instance_subnet_request: setLoadBalancerInstanceSubnetRequest (required)
:return: SetLoadBalancerInstanceSubnetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_load_balancer_instance_subnet_with_http_info(set_load_balancer_instance_subnet_request, **kwargs) # noqa: E501
else:
(data) = self.set_load_balancer_instance_subnet_with_http_info(set_load_balancer_instance_subnet_request, **kwargs) # noqa: E501
return data
def set_load_balancer_instance_subnet_with_http_info(self, set_load_balancer_instance_subnet_request, **kwargs): # noqa: E501
"""set_load_balancer_instance_subnet # noqa: E501
로드밸런서인스턴스서브넷설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_load_balancer_instance_subnet_with_http_info(set_load_balancer_instance_subnet_request, async=True)
>>> result = thread.get()
:param async bool
:param SetLoadBalancerInstanceSubnetRequest set_load_balancer_instance_subnet_request: setLoadBalancerInstanceSubnetRequest (required)
:return: SetLoadBalancerInstanceSubnetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['set_load_balancer_instance_subnet_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_load_balancer_instance_subnet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'set_load_balancer_instance_subnet_request' is set
if ('set_load_balancer_instance_subnet_request' not in params or
params['set_load_balancer_instance_subnet_request'] is None):
raise ValueError("Missing the required parameter `set_load_balancer_instance_subnet_request` when calling `set_load_balancer_instance_subnet`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'set_load_balancer_instance_subnet_request' in params:
body_params = params['set_load_balancer_instance_subnet_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/setLoadBalancerInstanceSubnet', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SetLoadBalancerInstanceSubnetResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_target(self, set_target_request, **kwargs): # noqa: E501
"""set_target # noqa: E501
타겟설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_target(set_target_request, async=True)
>>> result = thread.get()
:param async bool
:param SetTargetRequest set_target_request: setTargetRequest (required)
:return: SetTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_target_with_http_info(set_target_request, **kwargs) # noqa: E501
else:
(data) = self.set_target_with_http_info(set_target_request, **kwargs) # noqa: E501
return data
def set_target_with_http_info(self, set_target_request, **kwargs): # noqa: E501
"""set_target # noqa: E501
타겟설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_target_with_http_info(set_target_request, async=True)
>>> result = thread.get()
:param async bool
:param SetTargetRequest set_target_request: setTargetRequest (required)
:return: SetTargetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['set_target_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_target" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'set_target_request' is set
if ('set_target_request' not in params or
params['set_target_request'] is None):
raise ValueError("Missing the required parameter `set_target_request` when calling `set_target`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'set_target_request' in params:
body_params = params['set_target_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/setTarget', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SetTargetResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_target_group_description(self, set_target_group_description_request, **kwargs): # noqa: E501
"""set_target_group_description # noqa: E501
타겟그룹설명설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_target_group_description(set_target_group_description_request, async=True)
>>> result = thread.get()
:param async bool
:param SetTargetGroupDescriptionRequest set_target_group_description_request: setTargetGroupDescriptionRequest (required)
:return: SetTargetGroupDescriptionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.set_target_group_description_with_http_info(set_target_group_description_request, **kwargs) # noqa: E501
else:
(data) = self.set_target_group_description_with_http_info(set_target_group_description_request, **kwargs) # noqa: E501
return data
def set_target_group_description_with_http_info(self, set_target_group_description_request, **kwargs): # noqa: E501
"""set_target_group_description # noqa: E501
타겟그룹설명설정 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.set_target_group_description_with_http_info(set_target_group_description_request, async=True)
>>> result = thread.get()
:param async bool
:param SetTargetGroupDescriptionRequest set_target_group_description_request: setTargetGroupDescriptionRequest (required)
:return: SetTargetGroupDescriptionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['set_target_group_description_request'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_target_group_description" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'set_target_group_description_request' is set
if ('set_target_group_description_request' not in params or
params['set_target_group_description_request'] is None):
raise ValueError("Missing the required parameter `set_target_group_description_request` when calling `set_target_group_description`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
query_params.append(('responseFormatType', 'json')) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'set_target_group_description_request' in params:
body_params = params['set_target_group_description_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['x-ncp-iam'] # noqa: E501
return self.api_client.call_api(
'/setTargetGroupDescription', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SetTargetGroupDescriptionResponse', # noqa: E501
auth_settings=auth_settings,
_async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
nilq/baby-python
|
python
|
from selenium import webdriver
import logging
import os
logging.getLogger().setLevel(logging.INFO)
def lambda_handler(event, context):
logging.info("python-selenium-chromium-on-lambda started")
chrome_options = webdriver.ChromeOptions()
prefs = {"download.default_directory": "/tmp", "safebrowsing.enabled": True}
chrome_options.add_experimental_option("prefs", prefs)
chrome_options.add_argument("--disable-extensions")
chrome_options.add_argument("--disable-gpu")
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--single-process")
chrome_options.add_argument("--headless")
chrome_options.add_argument("--incognito")
chrome_options.add_argument("--disable-popup-blocking")
chrome_options.add_argument("--disable-translate")
chrome_options.add_argument("--start-maximized")
chrome_options.add_argument("--window-size=1600,1024")
chrome_options.add_argument("--disable-impl-side-painting")
chrome_options.add_argument("--disable-dev-shm-usage")
driver = webdriver.Chrome(chrome_options=chrome_options)
driver.implicitly_wait(60)
open_google(driver)
take_screenshot(driver, "screenshot_google.png")
driver.close()
return
def open_google(driver):
logging.info("opening google...")
url = "https://google.com"
driver.get(url)
def take_screenshot(driver, filename):
logging.info(f"taking screenshot {filename}...")
driver.save_screenshot(filename)
logging.info(f"screenshot location: {os.getcwd()}/{filename}")
|
nilq/baby-python
|
python
|
#
# PySNMP MIB module WHISP-BOX-MIBV2-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/WHISP-BOX-MIBV2-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:29:30 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
TimeTicks, iso, NotificationType, Gauge32, IpAddress, Unsigned32, Integer32, ObjectIdentity, Counter64, Counter32, MibIdentifier, Bits, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "iso", "NotificationType", "Gauge32", "IpAddress", "Unsigned32", "Integer32", "ObjectIdentity", "Counter64", "Counter32", "MibIdentifier", "Bits", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
MacAddress, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "TextualConvention", "DisplayString")
whispBox, whispModules = mibBuilder.importSymbols("WHISP-GLOBAL-REG-MIB", "whispBox", "whispModules")
EventString, WhispMACAddress, WhispLUID = mibBuilder.importSymbols("WHISP-TCV2-MIB", "EventString", "WhispMACAddress", "WhispLUID")
whispBoxLevelMibModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 161, 19, 1, 1, 8))
if mibBuilder.loadTexts: whispBoxLevelMibModule.setLastUpdated('200304150000Z')
if mibBuilder.loadTexts: whispBoxLevelMibModule.setOrganization('Cambium Networks')
whispBoxStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1))
whispBoxConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2))
whispBoxControls = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3))
whispBoxEventLog = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 5))
whispBoxConf = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6))
whispBoxGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1))
whispBoxBridgeVar = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 7))
whispBoxCPVar = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9))
whispBoxEvent = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 12))
whispBoxDHCPClientEvent = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 12, 1))
whispBoxDNS = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13))
whispBoxRFPhysical = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15))
whispBoxRFConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 16))
whispBoxSoftwareVer = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxSoftwareVer.setStatus('current')
whispBoxFPGAVer = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxFPGAVer.setStatus('current')
whispBoxEsn = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxEsn.setStatus('current')
whispBoxBoot = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxBoot.setStatus('current')
boxTemperature = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxTemperature.setStatus('obsolete')
boxDeviceType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxDeviceType.setStatus('current')
boxDeviceTypeID = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxDeviceTypeID.setStatus('current')
boxEncryption = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxEncryption.setStatus('current')
etherLinkStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etherLinkStatus.setStatus('current')
boxFrequency = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxFrequency.setStatus('current')
platformVer = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: platformVer.setStatus('current')
platformType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: platformType.setStatus('current')
dhcpLanIp = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 13), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpLanIp.setStatus('current')
dhcpLanSubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 14), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpLanSubnetMask.setStatus('current')
dhcpLanGateway = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 15), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpLanGateway.setStatus('current')
dhcpRfPublicIp = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 16), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpRfPublicIp.setStatus('current')
dhcpRfPublicSubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 17), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpRfPublicSubnetMask.setStatus('current')
dhcpRfPublicGateway = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 18), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpRfPublicGateway.setStatus('current')
lanDhcpStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 19), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lanDhcpStatus.setStatus('current')
rfPublicDhcpStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 20), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfPublicDhcpStatus.setStatus('current')
inSyncCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: inSyncCount.setStatus('current')
outSyncCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: outSyncCount.setStatus('current')
pllOutLockCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pllOutLockCount.setStatus('current')
txCalFailure = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: txCalFailure.setStatus('current')
swVersion = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 25), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swVersion.setStatus('current')
pldVersion = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 26), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pldVersion.setStatus('current')
platformInfo = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 27), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: platformInfo.setStatus('current')
antPolarization = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 28), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: antPolarization.setStatus('current')
packetOverloadCounter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: packetOverloadCounter.setStatus('current')
whispBoxP11Personality = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 30), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxP11Personality.setStatus('current')
whispBoxP11FPGAType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 31), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxP11FPGAType.setStatus('current')
whispBoxP11BstrapFPGAVer = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 32), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxP11BstrapFPGAVer.setStatus('current')
numDFSDetections = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: numDFSDetections.setStatus('current')
rxOverrunPkts = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rxOverrunPkts.setStatus('current')
boxTemperatureC = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 35), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxTemperatureC.setStatus('current')
boxTemperatureF = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 36), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: boxTemperatureF.setStatus('current')
bridgeCbFecStatbin = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatbin.setStatus('current')
bridgeCbFecStatbout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatbout.setStatus('current')
bridgeCbFecStatbtoss = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatbtoss.setStatus('current')
bridgeCbFecStatbtosscap = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatbtosscap.setStatus('current')
bridgeCbFecStatuin = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatuin.setStatus('current')
bridgeCbFecStatuout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatuout.setStatus('current')
bridgeCbFecStatutoss = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatutoss.setStatus('current')
bridgeCbFecStatutosscap = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatutosscap.setStatus('current')
bridgeCbRFStatbin = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatbin.setStatus('current')
bridgeCbRFStatbout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatbout.setStatus('current')
bridgeCbRFStatbtoss = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatbtoss.setStatus('current')
bridgeCbRFStatbtosscap = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatbtosscap.setStatus('current')
bridgeCbRFStatuin = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatuin.setStatus('current')
bridgeCbRFStatuout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatuout.setStatus('current')
bridgeCbRFStatutoss = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatutoss.setStatus('current')
bridgeCbRFStatutosscap = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatutosscap.setStatus('current')
bridgeCbErrStatNI1QSend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 53), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatNI1QSend.setStatus('current')
bridgeCbErrStatNI2QSend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 54), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatNI2QSend.setStatus('current')
bridgeCbErrStatBridgeFull = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 55), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatBridgeFull.setStatus('current')
bridgeCbErrStatSendMsg = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 56), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatSendMsg.setStatus('current')
bridgeCbErrStatAPFecQSend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 57), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatAPFecQSend.setStatus('current')
bridgeCbErrStatApRfQSend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 58), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbErrStatApRfQSend.setStatus('current')
rfStatXmtUDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 59), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatXmtUDataCnt.setStatus('current')
rfStatXmtBDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 60), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatXmtBDataCnt.setStatus('current')
rfStatRcvUDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 61), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvUDataCnt.setStatus('current')
rfStatRcvBDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 62), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvBDataCnt.setStatus('current')
rfStatXmtCntlCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 63), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatXmtCntlCnt.setStatus('current')
rfStatRcvCntlCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 64), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvCntlCnt.setStatus('current')
rfStatInSyncCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 65), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatInSyncCount.setStatus('current')
rfStatOutSyncCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 66), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatOutSyncCount.setStatus('current')
rfStatOverrunCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 67), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatOverrunCount.setStatus('current')
rfStatUnderrunCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 68), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatUnderrunCount.setStatus('current')
rfStatRcvCorruptDataCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 69), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvCorruptDataCount.setStatus('current')
rfStatBadBcastCtlCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 70), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatBadBcastCtlCnt.setStatus('current')
rfStatPLLOutOfLockCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 71), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatPLLOutOfLockCnt.setStatus('current')
rfStatBeaconVerMismatchCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 72), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatBeaconVerMismatchCnt.setStatus('current')
rfStatBadFreqBcnRcvCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 73), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatBadFreqBcnRcvCnt.setStatus('current')
rfStatnonLiteBcnRcvCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 74), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatnonLiteBcnRcvCnt.setStatus('current')
rfStatUnsupFeatBcnRcvCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 75), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatUnsupFeatBcnRcvCnt.setStatus('current')
rfStatUnkwnFeatBcnRcvCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 76), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatUnkwnFeatBcnRcvCnt.setStatus('current')
rfStatTxCalFailCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 77), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatTxCalFailCnt.setStatus('current')
rfStatBadInSyncIDRcv = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 78), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatBadInSyncIDRcv.setStatus('current')
rfStatTempOutOfRange = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 79), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatTempOutOfRange.setStatus('current')
rfStatRSSIOutOfRange = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 80), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRSSIOutOfRange.setStatus('current')
rfStatRangeCapEnf = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 81), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRangeCapEnf.setStatus('current')
rfStatRcvLTStart = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 82), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvLTStart.setStatus('current')
rfStatRcvLTStartHS = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 83), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvLTStartHS.setStatus('current')
rfStatRcvLTResult = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 84), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvLTResult.setStatus('current')
rfStatXmtLTResult = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 85), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatXmtLTResult.setStatus('current')
whispFeatureKeyOrigin = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 86), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispFeatureKeyOrigin.setStatus('current')
radioMSN = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 87), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioMSN.setStatus('current')
updateStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 88), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: updateStatus.setStatus('current')
syslogStatTxSuccesses = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 89), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: syslogStatTxSuccesses.setStatus('current')
syslogStatDropped = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 90), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: syslogStatDropped.setStatus('current')
fecStatLinkLost = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 91), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecStatLinkLost.setStatus('current')
fecStatLinkDetected = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 92), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecStatLinkDetected.setStatus('current')
natDhcpStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 93), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: natDhcpStatus.setStatus('current')
fecInDiscardsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 94), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecInDiscardsCount.setStatus('current')
fecInErrorsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 95), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecInErrorsCount.setStatus('current')
fecOutDiscardsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 96), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecOutDiscardsCount.setStatus('current')
fecOutErrorsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 97), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecOutErrorsCount.setStatus('current')
rfInDiscardsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 98), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfInDiscardsCount.setStatus('current')
rfInErrorsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 99), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfInErrorsCount.setStatus('current')
rfOutDiscardsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 100), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfOutDiscardsCount.setStatus('current')
rfOutErrorsCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 101), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfOutErrorsCount.setStatus('current')
fecInDiscardsOverloadCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 102), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecInDiscardsOverloadCount.setStatus('current')
fecOutDiscardsOverloadCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 103), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fecOutDiscardsOverloadCount.setStatus('current')
rfInDiscardsOverloadCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 104), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfInDiscardsOverloadCount.setStatus('current')
rfOutDiscardsOverloadCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 105), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfOutDiscardsOverloadCount.setStatus('current')
fpgaCompileInfo = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 106), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fpgaCompileInfo.setStatus('current')
fpgaBuildDate = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 107), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fpgaBuildDate.setStatus('current')
aggregateBandwidthCap = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 108), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: aggregateBandwidthCap.setStatus('current')
calibrationStatusBool = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 109), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notCalibrated", 0), ("calibrated", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: calibrationStatusBool.setStatus('current')
calibrationStatusBox = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 110), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: calibrationStatusBox.setStatus('current')
radioEngKeyed = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 111), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioEngKeyed.setStatus('current')
bridgeCbFecStatfloods = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 112), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbFecStatfloods.setStatus('current')
bridgeCbRFStatfloods = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 113), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bridgeCbRFStatfloods.setStatus('current')
agcGainRxCH1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 114), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agcGainRxCH1.setStatus('current')
agcGainRxCH2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 115), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agcGainRxCH2.setStatus('current')
antType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 116), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("integrated", 0), ("external", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: antType.setStatus('current')
rfStatRcvCorruptControlCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 117), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvCorruptControlCount.setStatus('current')
rfStatXmtMDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 217), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatXmtMDataCnt.setStatus('current')
rfStatRcvMDataCnt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 1, 218), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfStatRcvMDataCnt.setStatus('current')
linkNegoSpeed = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: linkNegoSpeed.setStatus('obsolete')
colorCode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 254))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: colorCode.setStatus('current')
displayOnlyAccess = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: displayOnlyAccess.setStatus('obsolete')
fullAccess = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fullAccess.setStatus('current')
webAutoUpdate = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 5), Integer32()).setUnits('Seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: webAutoUpdate.setStatus('current')
pass1Status = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pass1Status.setStatus('current')
pass2Status = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: pass2Status.setStatus('current')
bridgeEntryTimeout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(25, 1440))).setUnits('minutes').setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeEntryTimeout.setStatus('current')
snmpMibPerm = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("readWrite", 0), ("readOnly", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpMibPerm.setStatus('current')
bhTimingMode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("timingMaster", 1), ("timeingSlave", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bhTimingMode.setStatus('current')
bhModulation = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("tenMbitsPerSecond", 0), ("twentyMbitsPerSecond", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bhModulation.setStatus('obsolete')
powerControl = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("low", 0), ("normal", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: powerControl.setStatus('current')
extFilterDelay = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 13), Integer32()).setUnits('nanoseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: extFilterDelay.setStatus('current')
antennaGain = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 14), Integer32()).setUnits('dBi').setMaxAccess("readwrite")
if mibBuilder.loadTexts: antennaGain.setStatus('current')
eirp = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 15), Integer32()).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: eirp.setStatus('obsolete')
dynamicLearning = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dynamicLearning.setStatus('current')
managementVID = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: managementVID.setStatus('current')
agingTimeout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agingTimeout.setStatus('current')
frameType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("allframes", 0), ("taggedonly", 1), ("untaggedonly", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: frameType.setStatus('current')
addVlanMember = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 20), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: addVlanMember.setStatus('current')
removeVlanMember = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 21), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: removeVlanMember.setStatus('current')
scheduling = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("hardware", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: scheduling.setStatus('current')
transmitterOP = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 23), Integer32()).setUnits('dBm').setMaxAccess("readwrite")
if mibBuilder.loadTexts: transmitterOP.setStatus('current')
bridgeEnable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bridgeEnable.setStatus('current')
fecEnable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fecEnable.setStatus('current')
trapIP1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 26), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP1.setStatus('obsolete')
trapIP2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 27), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP2.setStatus('obsolete')
trapIP3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 28), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP3.setStatus('obsolete')
trapIP4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 29), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP4.setStatus('obsolete')
trapIP5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 30), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP5.setStatus('obsolete')
trapIP6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 31), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP6.setStatus('obsolete')
trapIP7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 32), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP7.setStatus('obsolete')
trapIP8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 33), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP8.setStatus('obsolete')
trapIP9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 34), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP9.setStatus('obsolete')
trapIP10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 35), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapIP10.setStatus('obsolete')
commStringRWrite = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 36), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: commStringRWrite.setStatus('current')
subnetMask = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 37), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask.setStatus('current')
mngtIP = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 38), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP.setStatus('current')
allowVIDAccess = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 39), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allowVIDAccess.setStatus('current')
setDefaultPlug = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 40), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: setDefaultPlug.setStatus('current')
hwsCompatibility = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 41), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwsCompatibility.setStatus('obsolete')
gpsInput = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 42), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("generateSyncSignal", 0), ("syncToReceivedSignalTimingPort", 1), ("syncToReceivedSignalPowerPort", 2), ("syncToiGPS", 3), ("autoSync", 4), ("autoSyncFreeRun", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gpsInput.setStatus('current')
ism = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 43), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ism.setStatus('current')
hiPriority = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 44), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hiPriority.setStatus('obsolete')
userName = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 45), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userName.setStatus('current')
userPassword = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 46), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userPassword.setStatus('current')
userAccessLevel = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 47), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userAccessLevel.setStatus('current')
deleteUser = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 48), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deleteUser.setStatus('current')
twoXRate = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 49), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: twoXRate.setStatus('obsolete')
lanDhcpState = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 50), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lanDhcpState.setStatus('current')
sessionTimeout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 51), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sessionTimeout.setStatus('current')
vlanMemberSource = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 52), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("configured", 0), ("active", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanMemberSource.setStatus('current')
addCustomFreqList = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 53), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: addCustomFreqList.setStatus('current')
removeCustomFreqList = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 54), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: removeCustomFreqList.setStatus('current')
allowColocation = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 55), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allowColocation.setStatus('obsolete')
changeUsrPwd = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 56), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: changeUsrPwd.setStatus('current')
mngtIP2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 57), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP2.setStatus('current')
subnetMask2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 58), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask2.setStatus('current')
mngtIP3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 59), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP3.setStatus('current')
subnetMask3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 60), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask3.setStatus('current')
mngtIP4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 61), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP4.setStatus('current')
subnetMask4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 62), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask4.setStatus('current')
mngtIP5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 63), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP5.setStatus('current')
subnetMask5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 64), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask5.setStatus('current')
mngtIP6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 65), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP6.setStatus('current')
subnetMask6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 66), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask6.setStatus('current')
mngtIP7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 67), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP7.setStatus('current')
subnetMask7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 68), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask7.setStatus('current')
mngtIP8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 69), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP8.setStatus('current')
subnetMask8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 70), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask8.setStatus('current')
mngtIP9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 71), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP9.setStatus('current')
subnetMask9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 72), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask9.setStatus('current')
mngtIP10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 73), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mngtIP10.setStatus('current')
subnetMask10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 74), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: subnetMask10.setStatus('current')
bhvlanEnable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 75), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bhvlanEnable.setStatus('current')
lldpBroadcastEnable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 76), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lldpBroadcastEnable.setStatus('current')
regionCode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 77), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 6, 5, 3, 8, 10, 11, 7, 9, 2, 1, 4))).clone(namedValues=NamedValues(("none", 0), ("australia", 6), ("brazil", 5), ("canada", 3), ("india", 8), ("indonesia", 10), ("ireland", 11), ("russia", 7), ("spain", 9), ("us", 2), ("other", 1), ("europe", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionCode.setStatus('deprecated')
russiaRegion = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 78), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("category1", 1), ("category2", 2), ("category3", 3), ("category4", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: russiaRegion.setStatus('deprecated')
commStringROnly = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 79), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: commStringROnly.setStatus('current')
ethernetLinkSpeed = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 80), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8, 3, 5, 7, 12, 13, 15, 63))).clone(namedValues=NamedValues(("forced10H", 1), ("forced10F", 2), ("forced100H", 4), ("forced100F", 8), ("auto10F-10H", 3), ("auto100H-10H", 5), ("auto100H-10F-10H", 7), ("auto100F-100H", 12), ("auto100F-100H-10H", 13), ("auto100F-100H-10F-10H", 15), ("auto1000F-100F-100H-10F-10H", 63)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ethernetLinkSpeed.setStatus('current')
cyclicPrefix = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 81), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("one-quarter", 0), ("one-eighth", 1), ("one-sixteenth", 2), ("one-quarter-one-eighth", 3), ("one-quarter-one-sixteenth", 4), ("one-eighth-one-sixteenth", 5), ("one-quarter-one-eighth-one-sixteenth", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cyclicPrefix.setStatus('current')
numberCustomFreq = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 82), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberCustomFreq.setStatus('current')
channelBandwidth = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 83), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: channelBandwidth.setStatus('current')
setDefaults = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 84), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("noChangeOrUndoFactoryDefaults", 0), ("setToFactoryDefaults", 1), ("factoryDefaultsSet-AwaitingReboot", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: setDefaults.setStatus('current')
radioRateAdapt = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 85), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 4, 1, 5, 2, 3, 7, 8, 9))).clone(namedValues=NamedValues(("onex", 0), ("onexmimo", 4), ("onextwox", 1), ("onextwoxmimo", 5), ("onextwoxthreex", 2), ("onextwoxthreexfourx", 3), ("onextwoxfourx", 7), ("onextwoxfourxsixx", 8), ("onextwoxfourxsixxeightx", 9)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: radioRateAdapt.setStatus('current')
siteInfoViewable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 86), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: siteInfoViewable.setStatus('current')
largeVCQ = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 87), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: largeVCQ.setStatus('current')
latitude = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 88), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latitude.setStatus('current')
longitude = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 89), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: longitude.setStatus('current')
height = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 90), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-2147483647, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: height.setStatus('current')
bandwidth = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 91), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 3, 5, 8, 9, 10, 11))).clone(namedValues=NamedValues(("bandwidth5mhz", 1), ("bandwidth10mhz", 3), ("bandwidth20mhz", 5), ("bandwidth5-10mhz", 8), ("bandwidth5-20mhz", 9), ("bandwidth10-20mhz", 10), ("bandwidth5-10-20mhz", 11)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bandwidth.setStatus('current')
dataScramblingMethod = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 92), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("legacyDataScrambling", 0), ("r10DataScrambling", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataScramblingMethod.setStatus('obsolete')
portVID = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 93), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portVID.setStatus('current')
radioRateAdaptUL = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 94), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 4, 1, 5, 2, 3, 7, 8, 9))).clone(namedValues=NamedValues(("onex", 0), ("onexmimo", 4), ("onextwox", 1), ("onextwoxmimo", 5), ("onextwoxthreex", 2), ("onextwoxthreexfourx", 3), ("onextwoxfourx", 7), ("onextwoxfourxsixx", 8), ("onextwoxfourxsixxeightx", 9)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: radioRateAdaptUL.setStatus('current')
providerVID = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 95), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: providerVID.setStatus('current')
mac1VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 96), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac1VIDMapAddr.setStatus('current')
mac1VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 97), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac1VIDMapVid.setStatus('current')
mac2VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 98), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac2VIDMapAddr.setStatus('current')
mac2VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 99), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac2VIDMapVid.setStatus('current')
mac3VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 100), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac3VIDMapAddr.setStatus('current')
mac3VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 101), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac3VIDMapVid.setStatus('current')
mac4VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 102), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac4VIDMapAddr.setStatus('current')
mac4VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 103), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac4VIDMapVid.setStatus('current')
mac5VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 104), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac5VIDMapAddr.setStatus('current')
mac5VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 105), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac5VIDMapVid.setStatus('current')
mac6VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 106), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac6VIDMapAddr.setStatus('current')
mac6VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 107), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac6VIDMapVid.setStatus('current')
mac7VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 108), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac7VIDMapAddr.setStatus('current')
mac7VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 109), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac7VIDMapVid.setStatus('current')
mac8VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 110), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac8VIDMapAddr.setStatus('current')
mac8VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 111), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac8VIDMapVid.setStatus('current')
mac9VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 112), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac9VIDMapAddr.setStatus('current')
mac9VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 113), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac9VIDMapVid.setStatus('current')
mac10VIDMapAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 114), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac10VIDMapAddr.setStatus('current')
mac10VIDMapVid = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 115), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mac10VIDMapVid.setStatus('current')
vlanPortType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 116), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("q", 0), ("qinq", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanPortType.setStatus('current')
vlanAcceptQinQFrames = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 117), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanAcceptQinQFrames.setStatus('current')
whispWebUserAccessMode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 118), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("local", 0), ("remote", 1), ("remotethenlocal", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: whispWebUserAccessMode.setStatus('current')
usrAccountEnableAccounting = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 119), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("disable", 0), ("deviceAccess", 1), ("dataUsage", 2), ("all", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: usrAccountEnableAccounting.setStatus('current')
allowRejectThenLocal = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 120), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("doNotAllowLocalAuthifAAAReject", 0), ("allowLocalAuthIfAAAReject", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allowRejectThenLocal.setStatus('current')
snrCalculation = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 121), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snrCalculation.setStatus('deprecated')
priorityPrecedence = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 122), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("eight021pThenDiffServ", 0), ("diffservThenEight021p", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: priorityPrecedence.setStatus('current')
installationColorCode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 123), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: installationColorCode.setStatus('current')
apSmMode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 124), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("sm", 1), ("ap", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apSmMode.setStatus('current')
pppoeFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 125), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pppoeFilter.setStatus('current')
smbFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 126), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: smbFilter.setStatus('current')
snmpFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 127), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpFilter.setStatus('current')
userP1Filter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 128), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userP1Filter.setStatus('current')
userP2Filter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 129), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userP2Filter.setStatus('current')
userP3Filter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 130), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userP3Filter.setStatus('current')
allOtherIpFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 131), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allOtherIpFilter.setStatus('current')
allIpv4Filter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 132), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allIpv4Filter.setStatus('current')
arpFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 133), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: arpFilter.setStatus('current')
allOthersFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 134), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: allOthersFilter.setStatus('current')
userDefinedPort1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 135), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userDefinedPort1.setStatus('current')
port1TCPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 136), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port1TCPFilter.setStatus('current')
port1UDPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 137), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port1UDPFilter.setStatus('current')
userDefinedPort2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 138), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userDefinedPort2.setStatus('current')
port2TCPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 139), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port2TCPFilter.setStatus('current')
port2UDPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 140), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port2UDPFilter.setStatus('current')
userDefinedPort3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 141), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userDefinedPort3.setStatus('current')
port3TCPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 142), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port3TCPFilter.setStatus('current')
port3UDPFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 143), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: port3UDPFilter.setStatus('current')
bootpcFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 144), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bootpcFilter.setStatus('current')
bootpsFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 145), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bootpsFilter.setStatus('current')
ip4MultFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 146), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("filterOff", 0), ("filterOn", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ip4MultFilter.setStatus('current')
packetFilterDirection = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 147), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("upstream", 1), ("downstream", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: packetFilterDirection.setStatus('current')
encryptionConfig = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 148), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("des", 0), ("aes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: encryptionConfig.setStatus('current')
pppoeCtlPriority = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 149), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("normal", 0), ("high", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pppoeCtlPriority.setStatus('current')
ftpPort = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 150), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPort.setStatus('current')
httpPort = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 151), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: httpPort.setStatus('current')
snmpPort = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 153), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpPort.setStatus('current')
snmpTrapPort = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 154), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpTrapPort.setStatus('current')
syslogDomainNameAppend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 156), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disableDomain", 0), ("appendDomain", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: syslogDomainNameAppend.setStatus('current')
syslogServerAddr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 157), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: syslogServerAddr.setStatus('current')
syslogServerPort = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 158), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: syslogServerPort.setStatus('current')
syslogMinLevel = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 159), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("fatal", 0), ("alert", 1), ("critical", 2), ("error", 3), ("warning", 4), ("notice", 5), ("info", 6), ("debug", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: syslogMinLevel.setStatus('current')
lan1DhcpRelease = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 201), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("releaseIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan1DhcpRelease.setStatus('current')
lan1DhcpRenew = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 202), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("renewIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan1DhcpRenew.setStatus('current')
lan3DhcpRelease = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 203), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("releaseIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan3DhcpRelease.setStatus('current')
lan3DhcpRenew = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 204), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("renewIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan3DhcpRenew.setStatus('current')
natDhcpRelease = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 205), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("releaseIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: natDhcpRelease.setStatus('current')
natDhcpRenew = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 206), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("renewIP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: natDhcpRenew.setStatus('current')
region = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 207), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 6, 3, 2, 5, 4, 7))).clone(namedValues=NamedValues(("none", 0), ("otherRegulatory", 1), ("asia", 6), ("europe", 3), ("northAmerica", 2), ("oceania", 5), ("southAmerica", 4), ("africa", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: region.setStatus('current')
regionAsia = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 208), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("none", 0), ("india", 2), ("indonesia", 3), ("russiacategory1", 4), ("russiacategory2", 5), ("russiacategory3", 6), ("russiacategory4", 7), ("vietnam", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionAsia.setStatus('current')
regionEurope = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 209), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("none", 0), ("spain", 2), ("ireland", 3), ("denmark", 4), ("finland", 5), ("germany", 6), ("greece", 7), ("iceland", 8), ("liechtenstein", 9), ("norway", 10), ("portugal", 11), ("switzerland", 12), ("serbia", 13), ("unitedkingdom", 14)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionEurope.setStatus('current')
regionNorthAmerica = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 210), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 3, 2, 4))).clone(namedValues=NamedValues(("none", 0), ("canada", 3), ("unitedStates", 2), ("mexico", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionNorthAmerica.setStatus('current')
regionOceania = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 211), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2))).clone(namedValues=NamedValues(("none", 0), ("australia", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionOceania.setStatus('current')
regionSouthAmerica = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 212), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2))).clone(namedValues=NamedValues(("none", 0), ("brazil", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionSouthAmerica.setStatus('current')
regionOtherRegulatory = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 213), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("other", 1), ("otherFCC", 2), ("otherETSI", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionOtherRegulatory.setStatus('current')
interleave = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 214), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("default", 0), ("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: interleave.setStatus('current')
receiveQualityDebug = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 215), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: receiveQualityDebug.setStatus('current')
apType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 216), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("standardAP", 0), ("remoteAP", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apType.setStatus('current')
regionAfrica = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 217), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2))).clone(namedValues=NamedValues(("none", 0), ("algeria", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: regionAfrica.setStatus('current')
addCustomFreqMimo = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 218), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: addCustomFreqMimo.setStatus('current')
removeCustomFreqMimo = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 219), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: removeCustomFreqMimo.setStatus('current')
timedSpectrumAnalysisDurationBox = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 220), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 1000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: timedSpectrumAnalysisDurationBox.setStatus('current')
spectrumAnalysisActionBox = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 2, 221), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("stopSpectrumAnalysis", 0), ("startTimedSpectrumAnalysis", 1), ("startContinuousSpectrumAnalysis", 2), ("idleNoSpectrumAnalysis", 3), ("idleCompleteSpectrumAnalysis", 4), ("inProgressTimedSpectrumAnalysis", 5), ("inProgressContinuousSpectrumAnalysis", 6), ("notReady", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: spectrumAnalysisActionBox.setStatus('current')
saveFlash = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("doNotSaveToFlash", 0), ("saveToFlash", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: saveFlash.setStatus('obsolete')
reboot = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("finishedReboot", 0), ("reboot", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: reboot.setStatus('current')
clearEventLog = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notClear", 0), ("clear", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: clearEventLog.setStatus('current')
rebootIfRequired = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("rebootNotRequired", 0), ("rebootRequired", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rebootIfRequired.setStatus('current')
clearBERStats = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("doNotClearBERStats", 0), ("clearBERStats", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: clearBERStats.setStatus('current')
updateDevice = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 3, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: updateDevice.setStatus('current')
whispBoxEvntLog = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 5, 1), EventString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBoxEvntLog.setStatus('current')
whispBoxAttributesGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 1))
for _whispBoxAttributesGroup_obj in [[("WHISP-BOX-MIBV2-MIB", "vlanAcceptQinQFrames"), ("WHISP-BOX-MIBV2-MIB", "providerVID"), ("WHISP-BOX-MIBV2-MIB", "mac1VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac1VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac2VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac2VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac3VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac3VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac4VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac4VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac5VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac5VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac6VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac6VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac7VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac7VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac8VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac8VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac9VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac9VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "mac10VIDMapAddr"), ("WHISP-BOX-MIBV2-MIB", "mac10VIDMapVid"), ("WHISP-BOX-MIBV2-MIB", "vlanPortType"), ("WHISP-BOX-MIBV2-MIB", "portVID"), ("WHISP-BOX-MIBV2-MIB", "timedSpectrumAnalysisDurationBox"), ("WHISP-BOX-MIBV2-MIB", "spectrumAnalysisActionBox"), ("WHISP-BOX-MIBV2-MIB", "calibrationStatusBox"), ("WHISP-BOX-MIBV2-MIB", "calibrationStatusBool"), ("WHISP-BOX-MIBV2-MIB", "agcGainRxCH1"), ("WHISP-BOX-MIBV2-MIB", "agcGainRxCH2"), ("WHISP-BOX-MIBV2-MIB", "whispBoxSoftwareVer"), ("WHISP-BOX-MIBV2-MIB", "whispBoxFPGAVer"), ("WHISP-BOX-MIBV2-MIB", "whispBoxEsn"), ("WHISP-BOX-MIBV2-MIB", "whispBoxBoot"), ("WHISP-BOX-MIBV2-MIB", "boxTemperature"), ("WHISP-BOX-MIBV2-MIB", "boxDeviceType"), ("WHISP-BOX-MIBV2-MIB", "boxDeviceTypeID"), ("WHISP-BOX-MIBV2-MIB", "boxEncryption"), ("WHISP-BOX-MIBV2-MIB", "etherLinkStatus"), ("WHISP-BOX-MIBV2-MIB", "boxFrequency"), ("WHISP-BOX-MIBV2-MIB", "platformVer"), ("WHISP-BOX-MIBV2-MIB", "platformType"), ("WHISP-BOX-MIBV2-MIB", "dhcpLanIp"), ("WHISP-BOX-MIBV2-MIB", "dhcpLanSubnetMask"), ("WHISP-BOX-MIBV2-MIB", "dhcpLanGateway"), ("WHISP-BOX-MIBV2-MIB", "dhcpRfPublicIp"), ("WHISP-BOX-MIBV2-MIB", "dhcpRfPublicSubnetMask"), ("WHISP-BOX-MIBV2-MIB", "dhcpRfPublicGateway"), ("WHISP-BOX-MIBV2-MIB", "lanDhcpStatus"), ("WHISP-BOX-MIBV2-MIB", "rfPublicDhcpStatus"), ("WHISP-BOX-MIBV2-MIB", "natDhcpStatus"), ("WHISP-BOX-MIBV2-MIB", "inSyncCount"), ("WHISP-BOX-MIBV2-MIB", "outSyncCount"), ("WHISP-BOX-MIBV2-MIB", "pllOutLockCount"), ("WHISP-BOX-MIBV2-MIB", "txCalFailure"), ("WHISP-BOX-MIBV2-MIB", "swVersion"), ("WHISP-BOX-MIBV2-MIB", "pldVersion"), ("WHISP-BOX-MIBV2-MIB", "platformInfo"), ("WHISP-BOX-MIBV2-MIB", "antType"), ("WHISP-BOX-MIBV2-MIB", "antPolarization"), ("WHISP-BOX-MIBV2-MIB", "packetOverloadCounter"), ("WHISP-BOX-MIBV2-MIB", "whispBoxP11Personality"), ("WHISP-BOX-MIBV2-MIB", "whispBoxP11FPGAType"), ("WHISP-BOX-MIBV2-MIB", "whispBoxP11BstrapFPGAVer"), ("WHISP-BOX-MIBV2-MIB", "numDFSDetections"), ("WHISP-BOX-MIBV2-MIB", "rxOverrunPkts"), ("WHISP-BOX-MIBV2-MIB", "boxTemperatureC"), ("WHISP-BOX-MIBV2-MIB", "boxTemperatureF"), ("WHISP-BOX-MIBV2-MIB", "linkNegoSpeed"), ("WHISP-BOX-MIBV2-MIB", "installationColorCode"), ("WHISP-BOX-MIBV2-MIB", "colorCode"), ("WHISP-BOX-MIBV2-MIB", "displayOnlyAccess"), ("WHISP-BOX-MIBV2-MIB", "fullAccess"), ("WHISP-BOX-MIBV2-MIB", "webAutoUpdate"), ("WHISP-BOX-MIBV2-MIB", "pass1Status"), ("WHISP-BOX-MIBV2-MIB", "pass2Status"), ("WHISP-BOX-MIBV2-MIB", "bridgeEntryTimeout"), ("WHISP-BOX-MIBV2-MIB", "snmpMibPerm"), ("WHISP-BOX-MIBV2-MIB", "bhTimingMode"), ("WHISP-BOX-MIBV2-MIB", "powerControl"), ("WHISP-BOX-MIBV2-MIB", "extFilterDelay"), ("WHISP-BOX-MIBV2-MIB", "antennaGain"), ("WHISP-BOX-MIBV2-MIB", "eirp"), ("WHISP-BOX-MIBV2-MIB", "dynamicLearning"), ("WHISP-BOX-MIBV2-MIB", "managementVID"), ("WHISP-BOX-MIBV2-MIB", "agingTimeout"), ("WHISP-BOX-MIBV2-MIB", "frameType"), ("WHISP-BOX-MIBV2-MIB", "addVlanMember"), ("WHISP-BOX-MIBV2-MIB", "removeVlanMember"), ("WHISP-BOX-MIBV2-MIB", "scheduling"), ("WHISP-BOX-MIBV2-MIB", "transmitterOP"), ("WHISP-BOX-MIBV2-MIB", "bridgeEnable"), ("WHISP-BOX-MIBV2-MIB", "fecEnable"), ("WHISP-BOX-MIBV2-MIB", "trapIP1"), ("WHISP-BOX-MIBV2-MIB", "trapIP2"), ("WHISP-BOX-MIBV2-MIB", "trapIP3"), ("WHISP-BOX-MIBV2-MIB", "trapIP4"), ("WHISP-BOX-MIBV2-MIB", "trapIP5"), ("WHISP-BOX-MIBV2-MIB", "trapIP6"), ("WHISP-BOX-MIBV2-MIB", "trapIP7"), ("WHISP-BOX-MIBV2-MIB", "trapIP8"), ("WHISP-BOX-MIBV2-MIB", "trapIP9"), ("WHISP-BOX-MIBV2-MIB", "trapIP10"), ("WHISP-BOX-MIBV2-MIB", "commStringRWrite"), ("WHISP-BOX-MIBV2-MIB", "subnetMask"), ("WHISP-BOX-MIBV2-MIB", "mngtIP"), ("WHISP-BOX-MIBV2-MIB", "allowVIDAccess"), ("WHISP-BOX-MIBV2-MIB", "setDefaultPlug"), ("WHISP-BOX-MIBV2-MIB", "hwsCompatibility"), ("WHISP-BOX-MIBV2-MIB", "gpsInput"), ("WHISP-BOX-MIBV2-MIB", "ism"), ("WHISP-BOX-MIBV2-MIB", "hiPriority"), ("WHISP-BOX-MIBV2-MIB", "userName"), ("WHISP-BOX-MIBV2-MIB", "userPassword"), ("WHISP-BOX-MIBV2-MIB", "userAccessLevel"), ("WHISP-BOX-MIBV2-MIB", "deleteUser"), ("WHISP-BOX-MIBV2-MIB", "twoXRate"), ("WHISP-BOX-MIBV2-MIB", "lanDhcpState"), ("WHISP-BOX-MIBV2-MIB", "dnsIpState"), ("WHISP-BOX-MIBV2-MIB", "sessionTimeout"), ("WHISP-BOX-MIBV2-MIB", "vlanMemberSource"), ("WHISP-BOX-MIBV2-MIB", "addCustomFreqList"), ("WHISP-BOX-MIBV2-MIB", "removeCustomFreqList"), ("WHISP-BOX-MIBV2-MIB", "allowColocation"), ("WHISP-BOX-MIBV2-MIB", "changeUsrPwd"), ("WHISP-BOX-MIBV2-MIB", "mngtIP2"), ("WHISP-BOX-MIBV2-MIB", "subnetMask2"), ("WHISP-BOX-MIBV2-MIB", "mngtIP3"), ("WHISP-BOX-MIBV2-MIB", "subnetMask3"), ("WHISP-BOX-MIBV2-MIB", "mngtIP4"), ("WHISP-BOX-MIBV2-MIB", "subnetMask4"), ("WHISP-BOX-MIBV2-MIB", "mngtIP5"), ("WHISP-BOX-MIBV2-MIB", "subnetMask5"), ("WHISP-BOX-MIBV2-MIB", "mngtIP6"), ("WHISP-BOX-MIBV2-MIB", "subnetMask6"), ("WHISP-BOX-MIBV2-MIB", "mngtIP7"), ("WHISP-BOX-MIBV2-MIB", "subnetMask7"), ("WHISP-BOX-MIBV2-MIB", "mngtIP8"), ("WHISP-BOX-MIBV2-MIB", "subnetMask8"), ("WHISP-BOX-MIBV2-MIB", "mngtIP9"), ("WHISP-BOX-MIBV2-MIB", "subnetMask9"), ("WHISP-BOX-MIBV2-MIB", "mngtIP10"), ("WHISP-BOX-MIBV2-MIB", "subnetMask10"), ("WHISP-BOX-MIBV2-MIB", "bhvlanEnable"), ("WHISP-BOX-MIBV2-MIB", "lldpBroadcastEnable"), ("WHISP-BOX-MIBV2-MIB", "radioRateAdapt"), ("WHISP-BOX-MIBV2-MIB", "fpgaBuildDate"), ("WHISP-BOX-MIBV2-MIB", "fpgaCompileInfo"), ("WHISP-BOX-MIBV2-MIB", "syslogDomainNameAppend"), ("WHISP-BOX-MIBV2-MIB", "syslogServerAddr"), ("WHISP-BOX-MIBV2-MIB", "syslogServerPort"), ("WHISP-BOX-MIBV2-MIB", "syslogMinLevel"), ("WHISP-BOX-MIBV2-MIB", "syslogStatTxSuccesses"), ("WHISP-BOX-MIBV2-MIB", "syslogStatDropped"), ("WHISP-BOX-MIBV2-MIB", "apType"), ("WHISP-BOX-MIBV2-MIB", "apSmMode"), ("WHISP-BOX-MIBV2-MIB", "region"), ("WHISP-BOX-MIBV2-MIB", "regionCode"), ("WHISP-BOX-MIBV2-MIB", "regionAsia"), ("WHISP-BOX-MIBV2-MIB", "regionEurope"), ("WHISP-BOX-MIBV2-MIB", "regionNorthAmerica"), ("WHISP-BOX-MIBV2-MIB", "regionOceania"), ("WHISP-BOX-MIBV2-MIB", "regionSouthAmerica"), ("WHISP-BOX-MIBV2-MIB", "regionAfrica"), ("WHISP-BOX-MIBV2-MIB", "regionOtherRegulatory"), ("WHISP-BOX-MIBV2-MIB", "radioRateAdaptUL"), ("WHISP-BOX-MIBV2-MIB", "dnsPrimaryMgmtIP"), ("WHISP-BOX-MIBV2-MIB", "dnsAlternateMgmtIP"), ("WHISP-BOX-MIBV2-MIB", "dnsMgmtDomainName"), ("WHISP-BOX-MIBV2-MIB", "addCustomFreqMimo"), ("WHISP-BOX-MIBV2-MIB", "removeCustomFreqMimo"), ("WHISP-BOX-MIBV2-MIB", "ftpPort"), ("WHISP-BOX-MIBV2-MIB", "httpPort"), ("WHISP-BOX-MIBV2-MIB", "snmpPort"), ("WHISP-BOX-MIBV2-MIB", "snmpTrapPort"), ("WHISP-BOX-MIBV2-MIB", "lan1DhcpRelease"), ("WHISP-BOX-MIBV2-MIB", "lan1DhcpRenew"), ("WHISP-BOX-MIBV2-MIB", "lan3DhcpRelease"), ("WHISP-BOX-MIBV2-MIB", "lan3DhcpRenew"), ("WHISP-BOX-MIBV2-MIB", "natDhcpRelease"), ("WHISP-BOX-MIBV2-MIB", "natDhcpRenew"), ("WHISP-BOX-MIBV2-MIB", "radioEngKeyed"), ("WHISP-BOX-MIBV2-MIB", "priorityPrecedence"), ("WHISP-BOX-MIBV2-MIB", "pppoeCtlPriority"), ("WHISP-BOX-MIBV2-MIB", "rfStatXmtUDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatXmtBDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatXmtMDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvUDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvBDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvMDataCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatXmtCntlCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvCntlCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatInSyncCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatOutSyncCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatOverrunCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatUnderrunCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvCorruptDataCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvCorruptControlCount"), ("WHISP-BOX-MIBV2-MIB", "rfStatBadBcastCtlCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatPLLOutOfLockCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatBeaconVerMismatchCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatBadFreqBcnRcvCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatnonLiteBcnRcvCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatUnsupFeatBcnRcvCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatUnkwnFeatBcnRcvCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatTxCalFailCnt"), ("WHISP-BOX-MIBV2-MIB", "rfStatBadInSyncIDRcv"), ("WHISP-BOX-MIBV2-MIB", "rfStatTempOutOfRange"), ("WHISP-BOX-MIBV2-MIB", "rfStatRSSIOutOfRange"), ("WHISP-BOX-MIBV2-MIB", "rfStatRangeCapEnf"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvLTStart"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvLTStartHS"), ("WHISP-BOX-MIBV2-MIB", "rfStatRcvLTResult"), ("WHISP-BOX-MIBV2-MIB", "rfStatXmtLTResult"), ("WHISP-BOX-MIBV2-MIB", "whispFeatureKeyOrigin"), ("WHISP-BOX-MIBV2-MIB", "updateStatus"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatbin"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatbout"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatbtoss"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatbtosscap"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatuin"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatuout"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatutoss"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatutosscap"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatbin"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatbout"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbFecStatfloods"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatfloods"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatbtoss"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatbtosscap"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatuin"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatuout"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatutoss"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbRFStatutosscap"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatNI1QSend"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatNI2QSend"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatBridgeFull"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatSendMsg"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatAPFecQSend"), ("WHISP-BOX-MIBV2-MIB", "bridgeCbErrStatApRfQSend"), ("WHISP-BOX-MIBV2-MIB", "fecStatLinkDetected"), ("WHISP-BOX-MIBV2-MIB", "fecStatLinkLost"), ("WHISP-BOX-MIBV2-MIB", "fecInDiscardsCount"), ("WHISP-BOX-MIBV2-MIB", "fecInErrorsCount"), ("WHISP-BOX-MIBV2-MIB", "fecOutDiscardsCount"), ("WHISP-BOX-MIBV2-MIB", "fecOutErrorsCount"), ("WHISP-BOX-MIBV2-MIB", "rfInDiscardsCount"), ("WHISP-BOX-MIBV2-MIB", "rfInErrorsCount"), ("WHISP-BOX-MIBV2-MIB", "rfOutDiscardsCount"), ("WHISP-BOX-MIBV2-MIB", "rfOutErrorsCount"), ("WHISP-BOX-MIBV2-MIB", "fecInDiscardsOverloadCount"), ("WHISP-BOX-MIBV2-MIB", "fecOutDiscardsOverloadCount"), ("WHISP-BOX-MIBV2-MIB", "rfInDiscardsOverloadCount"), ("WHISP-BOX-MIBV2-MIB", "rfOutDiscardsOverloadCount"), ("WHISP-BOX-MIBV2-MIB", "interleave")], [("WHISP-BOX-MIBV2-MIB", "radioMSN"), ("WHISP-BOX-MIBV2-MIB", "latitude"), ("WHISP-BOX-MIBV2-MIB", "longitude"), ("WHISP-BOX-MIBV2-MIB", "height"), ("WHISP-BOX-MIBV2-MIB", "bandwidth"), ("WHISP-BOX-MIBV2-MIB", "dataScramblingMethod"), ("WHISP-BOX-MIBV2-MIB", "whispWebUserAccessMode"), ("WHISP-BOX-MIBV2-MIB", "usrAccountEnableAccounting"), ("WHISP-BOX-MIBV2-MIB", "allowRejectThenLocal"), ("WHISP-BOX-MIBV2-MIB", "pppoeFilter"), ("WHISP-BOX-MIBV2-MIB", "smbFilter"), ("WHISP-BOX-MIBV2-MIB", "snmpFilter"), ("WHISP-BOX-MIBV2-MIB", "userP1Filter"), ("WHISP-BOX-MIBV2-MIB", "userP2Filter"), ("WHISP-BOX-MIBV2-MIB", "userP3Filter"), ("WHISP-BOX-MIBV2-MIB", "allOtherIpFilter"), ("WHISP-BOX-MIBV2-MIB", "allIpv4Filter"), ("WHISP-BOX-MIBV2-MIB", "arpFilter"), ("WHISP-BOX-MIBV2-MIB", "allOthersFilter"), ("WHISP-BOX-MIBV2-MIB", "userDefinedPort1"), ("WHISP-BOX-MIBV2-MIB", "port1TCPFilter"), ("WHISP-BOX-MIBV2-MIB", "port1UDPFilter"), ("WHISP-BOX-MIBV2-MIB", "userDefinedPort2"), ("WHISP-BOX-MIBV2-MIB", "port2TCPFilter"), ("WHISP-BOX-MIBV2-MIB", "port2UDPFilter"), ("WHISP-BOX-MIBV2-MIB", "userDefinedPort3"), ("WHISP-BOX-MIBV2-MIB", "port3TCPFilter"), ("WHISP-BOX-MIBV2-MIB", "port3UDPFilter"), ("WHISP-BOX-MIBV2-MIB", "bootpcFilter"), ("WHISP-BOX-MIBV2-MIB", "bootpsFilter"), ("WHISP-BOX-MIBV2-MIB", "ip4MultFilter"), ("WHISP-BOX-MIBV2-MIB", "packetFilterDirection"), ("WHISP-BOX-MIBV2-MIB", "encryptionConfig")]]:
if getattr(mibBuilder, 'version', 0) < (4, 4, 2):
# WARNING: leading objects get lost here!
whispBoxAttributesGroup = whispBoxAttributesGroup.setObjects(*_whispBoxAttributesGroup_obj)
else:
whispBoxAttributesGroup = whispBoxAttributesGroup.setObjects(*_whispBoxAttributesGroup_obj, **dict(append=True))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxAttributesGroup = whispBoxAttributesGroup.setStatus('current')
whispBoxControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 2)).setObjects(("WHISP-BOX-MIBV2-MIB", "saveFlash"), ("WHISP-BOX-MIBV2-MIB", "reboot"), ("WHISP-BOX-MIBV2-MIB", "clearEventLog"), ("WHISP-BOX-MIBV2-MIB", "rebootIfRequired"), ("WHISP-BOX-MIBV2-MIB", "clearBERStats"), ("WHISP-BOX-MIBV2-MIB", "updateDevice"), ("WHISP-BOX-MIBV2-MIB", "siteInfoViewable"), ("WHISP-BOX-MIBV2-MIB", "largeVCQ"), ("WHISP-BOX-MIBV2-MIB", "snrCalculation"), ("WHISP-BOX-MIBV2-MIB", "receiveQualityDebug"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxControlGroup = whispBoxControlGroup.setStatus('current')
whispBoxBTGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 3)).setObjects(("WHISP-BOX-MIBV2-MIB", "whispBridgeMacAddr"), ("WHISP-BOX-MIBV2-MIB", "whispBridgeDesLuid"), ("WHISP-BOX-MIBV2-MIB", "whispBridgeAge"), ("WHISP-BOX-MIBV2-MIB", "whispBridgeExt"), ("WHISP-BOX-MIBV2-MIB", "whispBridgeHash"), ("WHISP-BOX-MIBV2-MIB", "whispBridgeCAM"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxBTGroup = whispBoxBTGroup.setStatus('current')
whispBoxVLANTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 4)).setObjects(("WHISP-BOX-MIBV2-MIB", "whispVID"), ("WHISP-BOX-MIBV2-MIB", "whispVType"), ("WHISP-BOX-MIBV2-MIB", "whispVAge"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxVLANTableGroup = whispBoxVLANTableGroup.setStatus('current')
whispBoxCPTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 5)).setObjects(("WHISP-BOX-MIBV2-MIB", "codePoint0"), ("WHISP-BOX-MIBV2-MIB", "codePoint1"), ("WHISP-BOX-MIBV2-MIB", "codePoint2"), ("WHISP-BOX-MIBV2-MIB", "codePoint3"), ("WHISP-BOX-MIBV2-MIB", "codePoint4"), ("WHISP-BOX-MIBV2-MIB", "codePoint5"), ("WHISP-BOX-MIBV2-MIB", "codePoint6"), ("WHISP-BOX-MIBV2-MIB", "codePoint7"), ("WHISP-BOX-MIBV2-MIB", "codePoint8"), ("WHISP-BOX-MIBV2-MIB", "codePoint9"), ("WHISP-BOX-MIBV2-MIB", "codePoint10"), ("WHISP-BOX-MIBV2-MIB", "codePoint11"), ("WHISP-BOX-MIBV2-MIB", "codePoint12"), ("WHISP-BOX-MIBV2-MIB", "codePoint13"), ("WHISP-BOX-MIBV2-MIB", "codePoint14"), ("WHISP-BOX-MIBV2-MIB", "codePoint15"), ("WHISP-BOX-MIBV2-MIB", "codePoint16"), ("WHISP-BOX-MIBV2-MIB", "codePoint17"), ("WHISP-BOX-MIBV2-MIB", "codePoint18"), ("WHISP-BOX-MIBV2-MIB", "codePoint19"), ("WHISP-BOX-MIBV2-MIB", "codePoint20"), ("WHISP-BOX-MIBV2-MIB", "codePoint21"), ("WHISP-BOX-MIBV2-MIB", "codePoint22"), ("WHISP-BOX-MIBV2-MIB", "codePoint23"), ("WHISP-BOX-MIBV2-MIB", "codePoint24"), ("WHISP-BOX-MIBV2-MIB", "codePoint25"), ("WHISP-BOX-MIBV2-MIB", "codePoint26"), ("WHISP-BOX-MIBV2-MIB", "codePoint27"), ("WHISP-BOX-MIBV2-MIB", "codePoint28"), ("WHISP-BOX-MIBV2-MIB", "codePoint29"), ("WHISP-BOX-MIBV2-MIB", "codePoint30"), ("WHISP-BOX-MIBV2-MIB", "codePoint31"), ("WHISP-BOX-MIBV2-MIB", "codePoint32"), ("WHISP-BOX-MIBV2-MIB", "codePoint33"), ("WHISP-BOX-MIBV2-MIB", "codePoint34"), ("WHISP-BOX-MIBV2-MIB", "codePoint35"), ("WHISP-BOX-MIBV2-MIB", "codePoint36"), ("WHISP-BOX-MIBV2-MIB", "codePoint37"), ("WHISP-BOX-MIBV2-MIB", "codePoint38"), ("WHISP-BOX-MIBV2-MIB", "codePoint39"), ("WHISP-BOX-MIBV2-MIB", "codePoint40"), ("WHISP-BOX-MIBV2-MIB", "codePoint41"), ("WHISP-BOX-MIBV2-MIB", "codePoint42"), ("WHISP-BOX-MIBV2-MIB", "codePoint43"), ("WHISP-BOX-MIBV2-MIB", "codePoint44"), ("WHISP-BOX-MIBV2-MIB", "codePoint45"), ("WHISP-BOX-MIBV2-MIB", "codePoint46"), ("WHISP-BOX-MIBV2-MIB", "codePoint47"), ("WHISP-BOX-MIBV2-MIB", "codePoint48"), ("WHISP-BOX-MIBV2-MIB", "codePoint49"), ("WHISP-BOX-MIBV2-MIB", "codePoint50"), ("WHISP-BOX-MIBV2-MIB", "codePoint51"), ("WHISP-BOX-MIBV2-MIB", "codePoint52"), ("WHISP-BOX-MIBV2-MIB", "codePoint53"), ("WHISP-BOX-MIBV2-MIB", "codePoint54"), ("WHISP-BOX-MIBV2-MIB", "codePoint55"), ("WHISP-BOX-MIBV2-MIB", "codePoint56"), ("WHISP-BOX-MIBV2-MIB", "codePoint57"), ("WHISP-BOX-MIBV2-MIB", "codePoint58"), ("WHISP-BOX-MIBV2-MIB", "codePoint59"), ("WHISP-BOX-MIBV2-MIB", "codePoint60"), ("WHISP-BOX-MIBV2-MIB", "codePoint61"), ("WHISP-BOX-MIBV2-MIB", "codePoint62"), ("WHISP-BOX-MIBV2-MIB", "codePoint63"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxCPTableGroup = whispBoxCPTableGroup.setStatus('current')
whispBoxUserTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 6)).setObjects(("WHISP-BOX-MIBV2-MIB", "entryIndex"), ("WHISP-BOX-MIBV2-MIB", "userLoginName"), ("WHISP-BOX-MIBV2-MIB", "userPswd"), ("WHISP-BOX-MIBV2-MIB", "accessLevel"), ("WHISP-BOX-MIBV2-MIB", "loginStatus"), ("WHISP-BOX-MIBV2-MIB", "loginMethod"), ("WHISP-BOX-MIBV2-MIB", "sessionTime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxUserTableGroup = whispBoxUserTableGroup.setStatus('current')
whispLayer2NeighborTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 7)).setObjects(("WHISP-BOX-MIBV2-MIB", "entryL2Index"), ("WHISP-BOX-MIBV2-MIB", "neighborMAC"), ("WHISP-BOX-MIBV2-MIB", "neighborIP"), ("WHISP-BOX-MIBV2-MIB", "neighborSiteName"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispLayer2NeighborTableGroup = whispLayer2NeighborTableGroup.setStatus('current')
whispBoxNotifGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 6, 1, 8)).setObjects(("WHISP-BOX-MIBV2-MIB", "boxLan1DHCPClientEvent"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
whispBoxNotifGroup = whispBoxNotifGroup.setStatus('current')
whispBridgeTbUsed = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 7, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeTbUsed.setStatus('current')
whispBridgeTbFree = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 7, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeTbFree.setStatus('current')
whispBridgeTbErr = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 7, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeTbErr.setStatus('current')
codePoint0 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: codePoint0.setStatus('current')
codePoint1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint1.setStatus('current')
codePoint2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint2.setStatus('current')
codePoint3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint3.setStatus('current')
codePoint4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint4.setStatus('current')
codePoint5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint5.setStatus('current')
codePoint6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint6.setStatus('current')
codePoint7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint7.setStatus('current')
codePoint8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint8.setStatus('current')
codePoint9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint9.setStatus('current')
codePoint10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint10.setStatus('current')
codePoint11 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint11.setStatus('current')
codePoint12 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 13), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint12.setStatus('current')
codePoint13 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 14), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint13.setStatus('current')
codePoint14 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 15), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint14.setStatus('current')
codePoint15 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint15.setStatus('current')
codePoint16 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint16.setStatus('current')
codePoint17 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint17.setStatus('current')
codePoint18 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 19), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint18.setStatus('current')
codePoint19 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 20), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint19.setStatus('current')
codePoint20 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 21), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint20.setStatus('current')
codePoint21 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 22), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint21.setStatus('current')
codePoint22 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 23), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint22.setStatus('current')
codePoint23 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 24), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint23.setStatus('current')
codePoint24 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 25), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint24.setStatus('current')
codePoint25 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 26), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint25.setStatus('current')
codePoint26 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 27), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint26.setStatus('current')
codePoint27 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 28), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint27.setStatus('current')
codePoint28 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 29), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint28.setStatus('current')
codePoint29 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 30), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint29.setStatus('current')
codePoint30 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 31), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint30.setStatus('current')
codePoint31 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 32), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint31.setStatus('current')
codePoint32 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 33), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint32.setStatus('current')
codePoint33 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 34), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint33.setStatus('current')
codePoint34 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 35), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint34.setStatus('current')
codePoint35 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 36), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint35.setStatus('current')
codePoint36 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 37), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint36.setStatus('current')
codePoint37 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 38), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint37.setStatus('current')
codePoint38 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 39), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint38.setStatus('current')
codePoint39 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 40), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint39.setStatus('current')
codePoint40 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 41), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint40.setStatus('current')
codePoint41 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 42), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint41.setStatus('current')
codePoint42 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 43), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint42.setStatus('current')
codePoint43 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 44), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint43.setStatus('current')
codePoint44 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 45), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint44.setStatus('current')
codePoint45 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 46), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint45.setStatus('current')
codePoint46 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 47), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint46.setStatus('current')
codePoint47 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 48), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint47.setStatus('current')
codePoint48 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 49), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: codePoint48.setStatus('current')
codePoint49 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 50), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint49.setStatus('current')
codePoint50 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 51), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint50.setStatus('current')
codePoint51 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 52), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint51.setStatus('current')
codePoint52 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 53), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint52.setStatus('current')
codePoint53 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 54), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint53.setStatus('current')
codePoint54 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 55), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint54.setStatus('current')
codePoint55 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 56), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint55.setStatus('current')
codePoint56 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 57), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: codePoint56.setStatus('current')
codePoint57 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 58), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint57.setStatus('current')
codePoint58 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 59), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint58.setStatus('current')
codePoint59 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 60), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint59.setStatus('current')
codePoint60 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 61), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint60.setStatus('current')
codePoint61 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 62), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint61.setStatus('current')
codePoint62 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 63), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint62.setStatus('current')
codePoint63 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 9, 64), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: codePoint63.setStatus('current')
boxLan1DHCPClientEvent = NotificationType((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 12, 1, 1)).setObjects(("WHISP-BOX-MIBV2-MIB", "dhcpLanIp"), ("WHISP-BOX-MIBV2-MIB", "whispBoxEsn"))
if mibBuilder.loadTexts: boxLan1DHCPClientEvent.setStatus('current')
dnsIpState = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("manual", 0), ("automatic", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsIpState.setStatus('current')
dnsPrimaryMgmtIP = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsPrimaryMgmtIP.setStatus('current')
dnsAlternateMgmtIP = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsAlternateMgmtIP.setStatus('current')
dnsMgmtDomainName = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsMgmtDomainName.setStatus('current')
trapDomainNameAppend = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disableDomain", 0), ("appendDomain", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapDomainNameAppend.setStatus('current')
trap1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap1.setStatus('current')
trap2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap2.setStatus('current')
trap3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap3.setStatus('current')
trap4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 9), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap4.setStatus('current')
trap5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 10), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap5.setStatus('current')
trap6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 11), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap6.setStatus('current')
trap7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 12), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap7.setStatus('current')
trap8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 13), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap8.setStatus('current')
trap9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 14), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap9.setStatus('current')
trap10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 13, 15), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trap10.setStatus('current')
whispBoxRFPhysicalRadios = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 1), )
if mibBuilder.loadTexts: whispBoxRFPhysicalRadios.setStatus('current')
whispBoxRFPhysicalRadioEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 1, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "radioIndex"))
if mibBuilder.loadTexts: whispBoxRFPhysicalRadioEntry.setStatus('current')
radioIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioIndex.setStatus('current')
radioType = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("fsk", 0), ("ofdm", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioType.setStatus('current')
radioPaths = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioPaths.setStatus('current')
whispBoxRFPhysicalRadioPaths = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 2), )
if mibBuilder.loadTexts: whispBoxRFPhysicalRadioPaths.setStatus('current')
whispBoxRFPhysicalRadioPathEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 2, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "radioIndex"), (0, "WHISP-BOX-MIBV2-MIB", "pathIndex"))
if mibBuilder.loadTexts: whispBoxRFPhysicalRadioPathEntry.setStatus('current')
pathIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: pathIndex.setStatus('current')
whispBoxRFPhysicalRadioFrequencies = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 3), )
if mibBuilder.loadTexts: whispBoxRFPhysicalRadioFrequencies.setStatus('current')
whispBoxRFPhysicalRadioFrequencyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 3, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "radioIndex"), (0, "WHISP-BOX-MIBV2-MIB", "frequency"))
if mibBuilder.loadTexts: whispBoxRFPhysicalRadioFrequencyEntry.setStatus('current')
frequency = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 15, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 900000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: frequency.setStatus('current')
whispBoxRFConfigRadios = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 16, 1), )
if mibBuilder.loadTexts: whispBoxRFConfigRadios.setStatus('current')
whispBoxRFConfigRadioEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 16, 1, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "radioConfigIndex"))
if mibBuilder.loadTexts: whispBoxRFConfigRadioEntry.setStatus('current')
radioConfigIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 16, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioConfigIndex.setStatus('current')
radioFrequencyBand = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 16, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))).clone(namedValues=NamedValues(("band700", 0), ("band900", 1), ("band2400", 2), ("band3500", 3), ("band3700", 4), ("band4900", 5), ("band5100", 6), ("band5200", 7), ("band5400", 8), ("band5700", 9), ("band5800", 10), ("band5900", 11), ("band6050", 12)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: radioFrequencyBand.setStatus('current')
whispBoxBridgeTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4), )
if mibBuilder.loadTexts: whispBoxBridgeTable.setStatus('current')
whispBoxBridgeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "whispBridgeMacAddr"))
if mibBuilder.loadTexts: whispBoxBridgeEntry.setStatus('current')
whispBridgeMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeMacAddr.setStatus('current')
whispBridgeDesLuid = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 2), WhispLUID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeDesLuid.setStatus('current')
whispBridgeAge = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeAge.setStatus('current')
whispBridgeExt = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeExt.setStatus('current')
whispBridgeHash = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeHash.setStatus('current')
whispBridgeCAM = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 4, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispBridgeCAM.setStatus('obsolete')
whispVLANTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 8), )
if mibBuilder.loadTexts: whispVLANTable.setStatus('current')
whispVLANEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 8, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "whispVID"))
if mibBuilder.loadTexts: whispVLANEntry.setStatus('current')
whispVID = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4095))).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispVID.setStatus('current')
whispVType = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 8, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispVType.setStatus('current')
whispVAge = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 8, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: whispVAge.setStatus('current')
whispUserTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10), )
if mibBuilder.loadTexts: whispUserTable.setStatus('current')
whispUserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "entryIndex"))
if mibBuilder.loadTexts: whispUserEntry.setStatus('current')
entryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entryIndex.setStatus('current')
userLoginName = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userLoginName.setStatus('current')
userPswd = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userPswd.setStatus('current')
accessLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accessLevel.setStatus('current')
loginStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: loginStatus.setStatus('current')
loginMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: loginMethod.setStatus('current')
sessionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 10, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sessionTime.setStatus('current')
whispLayer2NeighborTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11), )
if mibBuilder.loadTexts: whispLayer2NeighborTable.setStatus('current')
whispLayer2NeighborEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11, 1), ).setIndexNames((0, "WHISP-BOX-MIBV2-MIB", "entryL2Index"))
if mibBuilder.loadTexts: whispLayer2NeighborEntry.setStatus('current')
entryL2Index = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 20))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entryL2Index.setStatus('current')
neighborMAC = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: neighborMAC.setStatus('current')
neighborIP = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: neighborIP.setStatus('current')
neighborSiteName = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 3, 11, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: neighborSiteName.setStatus('current')
mibBuilder.exportSymbols("WHISP-BOX-MIBV2-MIB", lan3DhcpRenew=lan3DhcpRenew, mngtIP=mngtIP, rfStatOutSyncCount=rfStatOutSyncCount, hiPriority=hiPriority, trap7=trap7, codePoint25=codePoint25, allowVIDAccess=allowVIDAccess, codePoint46=codePoint46, codePoint59=codePoint59, bootpcFilter=bootpcFilter, rfStatRcvLTStartHS=rfStatRcvLTStartHS, mac10VIDMapVid=mac10VIDMapVid, lldpBroadcastEnable=lldpBroadcastEnable, mac7VIDMapAddr=mac7VIDMapAddr, inSyncCount=inSyncCount, codePoint17=codePoint17, codePoint27=codePoint27, ftpPort=ftpPort, codePoint20=codePoint20, whispBoxConf=whispBoxConf, fpgaCompileInfo=fpgaCompileInfo, whispBoxDHCPClientEvent=whispBoxDHCPClientEvent, longitude=longitude, mngtIP10=mngtIP10, rfOutErrorsCount=rfOutErrorsCount, bootpsFilter=bootpsFilter, mngtIP9=mngtIP9, codePoint19=codePoint19, codePoint22=codePoint22, dnsPrimaryMgmtIP=dnsPrimaryMgmtIP, dnsMgmtDomainName=dnsMgmtDomainName, syslogStatTxSuccesses=syslogStatTxSuccesses, codePoint2=codePoint2, rfInDiscardsOverloadCount=rfInDiscardsOverloadCount, whispBridgeExt=whispBridgeExt, bridgeCbFecStatfloods=bridgeCbFecStatfloods, bhTimingMode=bhTimingMode, russiaRegion=russiaRegion, entryIndex=entryIndex, regionEurope=regionEurope, removeCustomFreqList=removeCustomFreqList, codePoint10=codePoint10, mngtIP8=mngtIP8, whispBoxEvent=whispBoxEvent, rfStatRcvMDataCnt=rfStatRcvMDataCnt, accessLevel=accessLevel, codePoint7=codePoint7, whispBoxRFPhysical=whispBoxRFPhysical, mac1VIDMapVid=mac1VIDMapVid, syslogServerPort=syslogServerPort, codePoint32=codePoint32, pass2Status=pass2Status, allOtherIpFilter=allOtherIpFilter, snmpFilter=snmpFilter, rfStatUnderrunCount=rfStatUnderrunCount, snrCalculation=snrCalculation, syslogServerAddr=syslogServerAddr, codePoint33=codePoint33, userP3Filter=userP3Filter, bridgeCbFecStatbtosscap=bridgeCbFecStatbtosscap, whispBoxP11FPGAType=whispBoxP11FPGAType, portVID=portVID, whispBridgeCAM=whispBridgeCAM, trap5=trap5, mngtIP5=mngtIP5, bridgeEnable=bridgeEnable, mac8VIDMapAddr=mac8VIDMapAddr, mac6VIDMapAddr=mac6VIDMapAddr, radioConfigIndex=radioConfigIndex, codePoint55=codePoint55, whispBoxP11BstrapFPGAVer=whispBoxP11BstrapFPGAVer, whispLayer2NeighborEntry=whispLayer2NeighborEntry, sessionTimeout=sessionTimeout, whispFeatureKeyOrigin=whispFeatureKeyOrigin, whispUserTable=whispUserTable, mac10VIDMapAddr=mac10VIDMapAddr, userDefinedPort3=userDefinedPort3, changeUsrPwd=changeUsrPwd, bhModulation=bhModulation, bridgeCbFecStatutosscap=bridgeCbFecStatutosscap, region=region, bandwidth=bandwidth, lan3DhcpRelease=lan3DhcpRelease, whispWebUserAccessMode=whispWebUserAccessMode, syslogDomainNameAppend=syslogDomainNameAppend, whispBoxEsn=whispBoxEsn, transmitterOP=transmitterOP, vlanAcceptQinQFrames=vlanAcceptQinQFrames, radioType=radioType, trapIP2=trapIP2, siteInfoViewable=siteInfoViewable, vlanPortType=vlanPortType, subnetMask9=subnetMask9, subnetMask5=subnetMask5, boxDeviceType=boxDeviceType, sessionTime=sessionTime, rfOutDiscardsCount=rfOutDiscardsCount, channelBandwidth=channelBandwidth, fecInDiscardsOverloadCount=fecInDiscardsOverloadCount, allOthersFilter=allOthersFilter, mac8VIDMapVid=mac8VIDMapVid, pppoeFilter=pppoeFilter, rfStatXmtLTResult=rfStatXmtLTResult, trapIP9=trapIP9, trap3=trap3, dhcpRfPublicSubnetMask=dhcpRfPublicSubnetMask, whispBoxRFPhysicalRadioFrequencyEntry=whispBoxRFPhysicalRadioFrequencyEntry, userAccessLevel=userAccessLevel, rfPublicDhcpStatus=rfPublicDhcpStatus, dhcpRfPublicGateway=dhcpRfPublicGateway, codePoint42=codePoint42, numberCustomFreq=numberCustomFreq, agcGainRxCH2=agcGainRxCH2, fullAccess=fullAccess, neighborMAC=neighborMAC, whispBoxControls=whispBoxControls, whispBoxCPTableGroup=whispBoxCPTableGroup, rfOutDiscardsOverloadCount=rfOutDiscardsOverloadCount, trapDomainNameAppend=trapDomainNameAppend, installationColorCode=installationColorCode, whispBoxRFPhysicalRadioPaths=whispBoxRFPhysicalRadioPaths, powerControl=powerControl, ip4MultFilter=ip4MultFilter, codePoint8=codePoint8, outSyncCount=outSyncCount, rfStatUnkwnFeatBcnRcvCnt=rfStatUnkwnFeatBcnRcvCnt, fecOutDiscardsOverloadCount=fecOutDiscardsOverloadCount, userP2Filter=userP2Filter, codePoint39=codePoint39, rfStatXmtUDataCnt=rfStatXmtUDataCnt, codePoint18=codePoint18, packetOverloadCounter=packetOverloadCounter, radioIndex=radioIndex, antPolarization=antPolarization, syslogStatDropped=syslogStatDropped, codePoint45=codePoint45, calibrationStatusBool=calibrationStatusBool, commStringRWrite=commStringRWrite, whispBoxFPGAVer=whispBoxFPGAVer, mac1VIDMapAddr=mac1VIDMapAddr, regionCode=regionCode, boxEncryption=boxEncryption, port3UDPFilter=port3UDPFilter, codePoint11=codePoint11, pass1Status=pass1Status, whispBoxRFPhysicalRadios=whispBoxRFPhysicalRadios, smbFilter=smbFilter, whispBoxConfig=whispBoxConfig, whispBoxUserTableGroup=whispBoxUserTableGroup, subnetMask3=subnetMask3, bridgeCbRFStatbout=bridgeCbRFStatbout, subnetMask6=subnetMask6, radioEngKeyed=radioEngKeyed, rfStatBadInSyncIDRcv=rfStatBadInSyncIDRcv, whispBoxDNS=whispBoxDNS, trapIP6=trapIP6, userP1Filter=userP1Filter, vlanMemberSource=vlanMemberSource, bridgeCbFecStatutoss=bridgeCbFecStatutoss, whispBoxSoftwareVer=whispBoxSoftwareVer, whispBoxBridgeEntry=whispBoxBridgeEntry, mngtIP4=mngtIP4, fecEnable=fecEnable, userDefinedPort2=userDefinedPort2, bridgeCbErrStatBridgeFull=bridgeCbErrStatBridgeFull, bhvlanEnable=bhvlanEnable, fecOutDiscardsCount=fecOutDiscardsCount, scheduling=scheduling, codePoint30=codePoint30, codePoint43=codePoint43, subnetMask10=subnetMask10, etherLinkStatus=etherLinkStatus, spectrumAnalysisActionBox=spectrumAnalysisActionBox, whispBoxRFPhysicalRadioPathEntry=whispBoxRFPhysicalRadioPathEntry, whispBridgeTbErr=whispBridgeTbErr, userDefinedPort1=userDefinedPort1, dhcpLanSubnetMask=dhcpLanSubnetMask, whispBridgeHash=whispBridgeHash, whispBoxCPVar=whispBoxCPVar, codePoint52=codePoint52, dataScramblingMethod=dataScramblingMethod, clearEventLog=clearEventLog, snmpPort=snmpPort, bridgeCbRFStatutosscap=bridgeCbRFStatutosscap, trap10=trap10, codePoint14=codePoint14, regionOtherRegulatory=regionOtherRegulatory, codePoint31=codePoint31, rfStatRcvLTStart=rfStatRcvLTStart, codePoint34=codePoint34, mac2VIDMapVid=mac2VIDMapVid, rfStatInSyncCount=rfStatInSyncCount, whispBridgeTbFree=whispBridgeTbFree, codePoint35=codePoint35, addCustomFreqMimo=addCustomFreqMimo, codePoint38=codePoint38, mac5VIDMapAddr=mac5VIDMapAddr, platformType=platformType, apType=apType, setDefaultPlug=setDefaultPlug, deleteUser=deleteUser, reboot=reboot, trap4=trap4, rfStatRcvBDataCnt=rfStatRcvBDataCnt, subnetMask2=subnetMask2, updateStatus=updateStatus, codePoint57=codePoint57, port1UDPFilter=port1UDPFilter, numDFSDetections=numDFSDetections, codePoint41=codePoint41, pathIndex=pathIndex, bridgeCbRFStatuout=bridgeCbRFStatuout, codePoint49=codePoint49, natDhcpStatus=natDhcpStatus, rfStatOverrunCount=rfStatOverrunCount, rebootIfRequired=rebootIfRequired, subnetMask7=subnetMask7, trap6=trap6, loginMethod=loginMethod, bridgeCbErrStatApRfQSend=bridgeCbErrStatApRfQSend, managementVID=managementVID, codePoint36=codePoint36, rfStatRcvUDataCnt=rfStatRcvUDataCnt, codePoint5=codePoint5, bridgeCbRFStatfloods=bridgeCbRFStatfloods, rfStatRcvCntlCnt=rfStatRcvCntlCnt, trap2=trap2, codePoint13=codePoint13, bridgeCbErrStatNI2QSend=bridgeCbErrStatNI2QSend, snmpMibPerm=snmpMibPerm, userPswd=userPswd, trap8=trap8, rfStatRcvCorruptControlCount=rfStatRcvCorruptControlCount, whispVLANEntry=whispVLANEntry, platformInfo=platformInfo, codePoint50=codePoint50, fecInDiscardsCount=fecInDiscardsCount, codePoint12=codePoint12, removeCustomFreqMimo=removeCustomFreqMimo, whispBoxEventLog=whispBoxEventLog, bridgeCbErrStatNI1QSend=bridgeCbErrStatNI1QSend, linkNegoSpeed=linkNegoSpeed, whispBridgeAge=whispBridgeAge, ism=ism, fecInErrorsCount=fecInErrorsCount, whispVType=whispVType, trap1=trap1, allowColocation=allowColocation, agingTimeout=agingTimeout, antType=antType, userLoginName=userLoginName, whispBoxBoot=whispBoxBoot, neighborSiteName=neighborSiteName, pldVersion=pldVersion)
mibBuilder.exportSymbols("WHISP-BOX-MIBV2-MIB", pllOutLockCount=pllOutLockCount, bridgeCbFecStatbin=bridgeCbFecStatbin, codePoint1=codePoint1, interleave=interleave, whispBoxVLANTableGroup=whispBoxVLANTableGroup, addCustomFreqList=addCustomFreqList, dhcpLanGateway=dhcpLanGateway, codePoint29=codePoint29, subnetMask4=subnetMask4, codePoint0=codePoint0, mac3VIDMapAddr=mac3VIDMapAddr, lan1DhcpRenew=lan1DhcpRenew, regionSouthAmerica=regionSouthAmerica, rfStatXmtMDataCnt=rfStatXmtMDataCnt, rfStatRangeCapEnf=rfStatRangeCapEnf, dynamicLearning=dynamicLearning, ethernetLinkSpeed=ethernetLinkSpeed, rfStatBeaconVerMismatchCnt=rfStatBeaconVerMismatchCnt, trapIP10=trapIP10, dnsIpState=dnsIpState, boxDeviceTypeID=boxDeviceTypeID, height=height, trapIP3=trapIP3, snmpTrapPort=snmpTrapPort, bridgeCbErrStatSendMsg=bridgeCbErrStatSendMsg, codePoint63=codePoint63, mac4VIDMapAddr=mac4VIDMapAddr, dhcpRfPublicIp=dhcpRfPublicIp, boxTemperature=boxTemperature, trapIP5=trapIP5, rfInDiscardsCount=rfInDiscardsCount, saveFlash=saveFlash, rfStatnonLiteBcnRcvCnt=rfStatnonLiteBcnRcvCnt, bridgeCbRFStatutoss=bridgeCbRFStatutoss, bridgeCbFecStatbout=bridgeCbFecStatbout, rfStatTempOutOfRange=rfStatTempOutOfRange, whispBridgeDesLuid=whispBridgeDesLuid, twoXRate=twoXRate, codePoint44=codePoint44, whispVAge=whispVAge, hwsCompatibility=hwsCompatibility, codePoint26=codePoint26, allIpv4Filter=allIpv4Filter, whispVID=whispVID, radioPaths=radioPaths, radioFrequencyBand=radioFrequencyBand, trapIP1=trapIP1, lanDhcpState=lanDhcpState, whispBoxBTGroup=whispBoxBTGroup, whispBoxBridgeVar=whispBoxBridgeVar, aggregateBandwidthCap=aggregateBandwidthCap, codePoint21=codePoint21, port1TCPFilter=port1TCPFilter, webAutoUpdate=webAutoUpdate, trapIP4=trapIP4, agcGainRxCH1=agcGainRxCH1, fecStatLinkDetected=fecStatLinkDetected, whispBoxNotifGroup=whispBoxNotifGroup, mngtIP6=mngtIP6, port3TCPFilter=port3TCPFilter, fpgaBuildDate=fpgaBuildDate, regionAfrica=regionAfrica, httpPort=httpPort, port2UDPFilter=port2UDPFilter, whispLayer2NeighborTableGroup=whispLayer2NeighborTableGroup, whispBridgeTbUsed=whispBridgeTbUsed, setDefaults=setDefaults, regionAsia=regionAsia, neighborIP=neighborIP, antennaGain=antennaGain, rfStatRcvCorruptDataCount=rfStatRcvCorruptDataCount, whispVLANTable=whispVLANTable, codePoint60=codePoint60, usrAccountEnableAccounting=usrAccountEnableAccounting, rfStatBadBcastCtlCnt=rfStatBadBcastCtlCnt, rfStatRSSIOutOfRange=rfStatRSSIOutOfRange, encryptionConfig=encryptionConfig, cyclicPrefix=cyclicPrefix, whispBoxEvntLog=whispBoxEvntLog, codePoint23=codePoint23, priorityPrecedence=priorityPrecedence, rfStatBadFreqBcnRcvCnt=rfStatBadFreqBcnRcvCnt, boxFrequency=boxFrequency, calibrationStatusBox=calibrationStatusBox, codePoint56=codePoint56, codePoint24=codePoint24, mac4VIDMapVid=mac4VIDMapVid, mac9VIDMapAddr=mac9VIDMapAddr, codePoint47=codePoint47, codePoint9=codePoint9, rfStatRcvLTResult=rfStatRcvLTResult, latitude=latitude, lanDhcpStatus=lanDhcpStatus, subnetMask8=subnetMask8, codePoint4=codePoint4, addVlanMember=addVlanMember, trapIP7=trapIP7, boxTemperatureC=boxTemperatureC, swVersion=swVersion, whispBoxRFConfig=whispBoxRFConfig, mac7VIDMapVid=mac7VIDMapVid, updateDevice=updateDevice, arpFilter=arpFilter, lan1DhcpRelease=lan1DhcpRelease, displayOnlyAccess=displayOnlyAccess, PYSNMP_MODULE_ID=whispBoxLevelMibModule, timedSpectrumAnalysisDurationBox=timedSpectrumAnalysisDurationBox, codePoint51=codePoint51, frequency=frequency, radioRateAdaptUL=radioRateAdaptUL, gpsInput=gpsInput, allowRejectThenLocal=allowRejectThenLocal, eirp=eirp, codePoint53=codePoint53, boxLan1DHCPClientEvent=boxLan1DHCPClientEvent, bridgeEntryTimeout=bridgeEntryTimeout, natDhcpRelease=natDhcpRelease, commStringROnly=commStringROnly, syslogMinLevel=syslogMinLevel, txCalFailure=txCalFailure, userName=userName, natDhcpRenew=natDhcpRenew, mngtIP3=mngtIP3, rfStatUnsupFeatBcnRcvCnt=rfStatUnsupFeatBcnRcvCnt, regionOceania=regionOceania, codePoint62=codePoint62, removeVlanMember=removeVlanMember, codePoint61=codePoint61, providerVID=providerVID, codePoint15=codePoint15, mngtIP2=mngtIP2, boxTemperatureF=boxTemperatureF, rfInErrorsCount=rfInErrorsCount, clearBERStats=clearBERStats, frameType=frameType, whispLayer2NeighborTable=whispLayer2NeighborTable, whispBoxStatus=whispBoxStatus, trapIP8=trapIP8, whispBoxRFConfigRadioEntry=whispBoxRFConfigRadioEntry, bridgeCbErrStatAPFecQSend=bridgeCbErrStatAPFecQSend, radioRateAdapt=radioRateAdapt, codePoint28=codePoint28, whispBoxRFPhysicalRadioEntry=whispBoxRFPhysicalRadioEntry, whispBoxLevelMibModule=whispBoxLevelMibModule, regionNorthAmerica=regionNorthAmerica, codePoint37=codePoint37, bridgeCbRFStatbin=bridgeCbRFStatbin, dnsAlternateMgmtIP=dnsAlternateMgmtIP, radioMSN=radioMSN, bridgeCbRFStatbtoss=bridgeCbRFStatbtoss, pppoeCtlPriority=pppoeCtlPriority, loginStatus=loginStatus, fecOutErrorsCount=fecOutErrorsCount, extFilterDelay=extFilterDelay, subnetMask=subnetMask, largeVCQ=largeVCQ, packetFilterDirection=packetFilterDirection, userPassword=userPassword, whispBoxAttributesGroup=whispBoxAttributesGroup, mac3VIDMapVid=mac3VIDMapVid, codePoint16=codePoint16, codePoint40=codePoint40, rxOverrunPkts=rxOverrunPkts, bridgeCbRFStatbtosscap=bridgeCbRFStatbtosscap, fecStatLinkLost=fecStatLinkLost, whispBoxP11Personality=whispBoxP11Personality, codePoint3=codePoint3, bridgeCbRFStatuin=bridgeCbRFStatuin, trap9=trap9, apSmMode=apSmMode, rfStatTxCalFailCnt=rfStatTxCalFailCnt, mac2VIDMapAddr=mac2VIDMapAddr, codePoint6=codePoint6, rfStatPLLOutOfLockCnt=rfStatPLLOutOfLockCnt, whispBoxBridgeTable=whispBoxBridgeTable, whispBoxRFConfigRadios=whispBoxRFConfigRadios, bridgeCbFecStatuin=bridgeCbFecStatuin, mac6VIDMapVid=mac6VIDMapVid, whispBridgeMacAddr=whispBridgeMacAddr, port2TCPFilter=port2TCPFilter, codePoint48=codePoint48, receiveQualityDebug=receiveQualityDebug, mac5VIDMapVid=mac5VIDMapVid, bridgeCbFecStatbtoss=bridgeCbFecStatbtoss, platformVer=platformVer, rfStatXmtCntlCnt=rfStatXmtCntlCnt, dhcpLanIp=dhcpLanIp, colorCode=colorCode, whispBoxRFPhysicalRadioFrequencies=whispBoxRFPhysicalRadioFrequencies, codePoint58=codePoint58, mac9VIDMapVid=mac9VIDMapVid, whispUserEntry=whispUserEntry, codePoint54=codePoint54, whispBoxControlGroup=whispBoxControlGroup, entryL2Index=entryL2Index, whispBoxGroups=whispBoxGroups, mngtIP7=mngtIP7, rfStatXmtBDataCnt=rfStatXmtBDataCnt, bridgeCbFecStatuout=bridgeCbFecStatuout)
|
nilq/baby-python
|
python
|
from setuptools import setup
setup(name='safygiphy',
version='1.1.1',
description='API Wrapper for the online Gif library, Giphy',
url='https://code.tetraetc.com/SafyGiphy/',
author="TetraEtc",
author_email="administrator@tetraetc.com",
install_requires=[
'requests'
],
packages=['safygiphy']
)
|
nilq/baby-python
|
python
|
from mypy import api
from redun.tests.utils import get_test_file
def test_task_types() -> None:
"""
mypy should find type errors related to redun task calls.
"""
workflow_file = get_test_file("test_data/typing/workflow_fail.py.txt")
stdout, stderr, ret_code = api.run(
[
"--show-traceback",
workflow_file,
"redun",
]
)
print(stdout)
assert ret_code == 1
# Parse found type check errors.
stdout_lines = stdout.split("\n")[:-2]
found_errors = {line.split(":", 2)[1] for line in stdout_lines}
# Get lines with expected errors.
with open(workflow_file) as infile:
expected_errors = {str(i) for i, line in enumerate(infile, 1) if "ERROR" in line}
assert found_errors == expected_errors
|
nilq/baby-python
|
python
|
from collections import Counter
l = [1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 4, 4, 4, 4,
4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 8, 8, 8, 8]
# print(Counter(l))
s = 'aaassssvvvveeeeedddddccccccceeelllll'
# print(Counter(s))
word = 'How many times does each word show up in this sentence word word show up'
words = word.split()
# print(Counter(words))
c = Counter(words)
# print(c.most_common(2))
# print(sum(c.values()))
|
nilq/baby-python
|
python
|
"""Process ACL states"""
from __future__ import absolute_import
import logger
from utils import dict_proto
from proto.acl_counting_pb2 import RuleCounts
LOGGER = logger.get_logger('aclstate')
class AclStateCollector:
"""Processing ACL states for ACL counting"""
def __init__(self):
self._switch_configs = {}
def get_port_rule_counts(self, switch, port, rule_samples):
"""Return the ACL count for a port"""
acl_config, error_map = self._verify_port_acl_config(switch, port)
if not acl_config:
return dict_proto(error_map, RuleCounts)
rule_counts = self._get_port_rule_counts(switch, port, acl_config, rule_samples)
return dict_proto(rule_counts, RuleCounts)
# pylint: disable=protected-access
def _get_port_rule_counts(self, switch, port, acl_config, rule_samples):
rule_counts_map = {'rules': {}, 'errors': []}
rules_map = rule_counts_map['rules']
errors = rule_counts_map['errors']
for rule_config in acl_config.rules:
cookie_num = rule_config.get('cookie')
if not cookie_num:
LOGGER.error(
'Cookie is not generated for ACL rule: %s, %s',
acl_config._id, rule_config.get('description'))
continue
rule_description = rule_config.get('description')
if not rule_description:
LOGGER.error('Rule with cookie %s does not have a description', cookie_num)
continue
has_sample = False
for sample in rule_samples:
if str(sample.labels.get('cookie')) != str(cookie_num):
continue
if sample.labels.get('dp_name') != switch:
continue
if int(sample.labels.get('in_port')) != port:
continue
rule_map = rules_map.setdefault(rule_description, {})
rule_map['packet_count'] = int(sample.value)
has_sample = True
break
if not has_sample:
error = (f'No ACL metric sample available for switch, port, ACL, rule: '
f'{switch}, {port}, {acl_config._id}, {rule_description} '
f'(cookie={cookie_num})')
errors.append(error)
LOGGER.error(error)
return rule_counts_map
def _verify_port_acl_config(self, switch, port):
error_map = {'errors': []}
error_list = error_map['errors']
switch_config = self._switch_configs.get(switch)
if not switch_config:
error = f'Switch not defined in Faucet dps config: {switch}'
LOGGER.error(error)
error_list.append(error)
return None, error_map
port_config = switch_config.ports.get(port)
if not port_config:
error = f'Port not defined in Faucet dps config: {switch}, {port}'
LOGGER.error(error)
error_list.append(error)
return None, error_map
acls_config = port_config.acls_in
if not acls_config:
error = f'No ACLs applied to port: {switch}, {port}'
LOGGER.error(error)
error_list.append(error)
return None, error_map
if len(acls_config) != 1:
error = f'More than one ACLs were applied to port: {switch}, {port}'
LOGGER.error(error)
error_list.append(error)
return None, error_map
return acls_config[0], None
def update_switch_configs(self, switch_configs):
"""Update cache of switch configs"""
self._switch_configs = switch_configs
|
nilq/baby-python
|
python
|
from sqlalchemy.orm import joinedload
from FlaskRTBCTF.utils.models import db, TimeMixin, ReprMixin
from FlaskRTBCTF.utils.cache import cache
# Machine Table
class Machine(TimeMixin, ReprMixin, db.Model):
__tablename__ = "machine"
__repr_fields__ = (
"name",
"os",
)
id = db.Column(db.Integer, primary_key=True, index=True)
name = db.Column(db.String(64), nullable=False, unique=True)
user_hash = db.Column(db.String(32), nullable=False)
root_hash = db.Column(db.String(32), nullable=False)
user_points = db.Column(db.Integer, default=0)
root_points = db.Column(db.Integer, default=0)
os = db.Column(db.String, nullable=False, default="linux")
ip = db.Column(db.String(64), nullable=False)
difficulty = db.Column(db.String, nullable=False, default="Easy")
@staticmethod
@cache.cached(timeout=3600 * 3, key_prefix="machines")
def get_all():
return Machine.query.all()
# UserMachine: N to N relationship
class UserMachine(TimeMixin, db.Model):
__tablename__ = "user_machine"
user_id = db.Column(
db.Integer,
db.ForeignKey("user.id"),
nullable=False,
primary_key=True,
index=True,
)
machine_id = db.Column(
db.Integer,
db.ForeignKey("machine.id"),
nullable=False,
primary_key=True,
index=True,
)
owned_user = db.Column(db.Boolean, nullable=False, default=False)
owned_root = db.Column(db.Boolean, nullable=False, default=False)
@classmethod
@cache.memoize(timeout=3600 * 3)
def completed_machines(cls, user_id):
completed = dict()
_ids1 = (
cls.query.with_entities(cls.machine_id)
.filter_by(user_id=user_id, owned_user=True)
.all()
)
_ids2 = (
cls.query.with_entities(cls.machine_id)
.filter_by(user_id=user_id, owned_root=True)
.all()
)
completed["user"] = [int(id[0]) for id in _ids1]
completed["root"] = [int(id[0]) for id in _ids2]
return completed
# Tag Model
class Tag(ReprMixin, db.Model):
__tablename__ = "tag"
__repr_fields__ = ("label",)
id = db.Column(db.Integer, primary_key=True)
label = db.Column(db.String(32), nullable=False)
color = db.Column(db.String(16), nullable=False)
# Tags table
tags = db.Table(
"tags",
db.Column("tag_id", db.Integer, db.ForeignKey("tag.id"), primary_key=True),
db.Column(
"challenge_id", db.Integer, db.ForeignKey("challenge.id"), primary_key=True
),
)
# Challenges Model
class Challenge(TimeMixin, ReprMixin, db.Model):
__tablename__ = "challenge"
__repr_fields__ = ("title", "category")
id = db.Column(db.Integer, primary_key=True, index=True)
title = db.Column(db.String(64), nullable=False, unique=True)
description = db.Column(db.TEXT, nullable=True)
flag = db.Column(db.TEXT, nullable=False)
points = db.Column(db.Integer, nullable=False, default=0)
url = db.Column(db.TEXT, nullable=True)
difficulty = db.Column(db.String, nullable=True)
category_id = db.Column(db.Integer, db.ForeignKey("category.id"), nullable=False)
category = db.relationship("Category", backref=db.backref("challenges", lazy=True))
tags = db.relationship(
"Tag",
secondary=tags,
lazy="subquery",
backref=db.backref("challenges", lazy="noload"),
)
# UserChallenge: N to N relationship
class UserChallenge(TimeMixin, db.Model):
__tablename__ = "user_challenge"
user_id = db.Column(
db.Integer,
db.ForeignKey("user.id"),
nullable=False,
primary_key=True,
index=True,
)
challenge_id = db.Column(
db.Integer,
db.ForeignKey("challenge.id"),
nullable=False,
primary_key=True,
index=True,
)
completed = db.Column(db.Boolean, nullable=False, default=False)
@classmethod
@cache.memoize(timeout=3600 * 3)
def completed_challenges(cls, user_id):
_ids = (
cls.query.with_entities(cls.challenge_id)
.filter_by(user_id=user_id, completed=True)
.all()
)
_ids = [int(id[0]) for id in _ids]
return _ids
# Category Model
class Category(ReprMixin, db.Model):
__tablename__ = "category"
__repr_fields__ = ("name",)
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(32), nullable=False)
@staticmethod
@cache.cached(timeout=3600 * 3, key_prefix="challenges")
def get_challenges():
categories = (
Category.query.options(joinedload("challenges"))
.filter(Category.challenges)
.all()
)
return categories
|
nilq/baby-python
|
python
|
# This file is part of Peach-Py package and is licensed under the Simplified BSD license.
# See license.rst for the full text of the license.
from enum import IntEnum
class FileType(IntEnum):
# No file type
null = 0
# Relocatable file
object = 1
# Executable file
executable = 2
# Fixed VM shared library (?)
fixed_vm_library = 3
# Core dump file
core_dump = 4
# Preloaded executable file
preloaded_executable = 5
# Dynamically bound shared library
dynamic_library = 6
# Dynamic linker (dyld)
dynamic_linker = 7
# Dynamically bound bundle file
dynamic_bundle = 8
# Shared library stub for build-time linking (no section content)
dynamic_library_stub = 9
# Companion file with debug sections only
debug_symbols = 10
# Kernel-mode driver
kext_bundle = 11
class CpuType(IntEnum):
x86 = 0x00000007
x86_64 = 0x01000007
arm = 0x0000000C
arm64 = 0x0100000C
ppc = 0x00000012
ppc64 = 0x01000012
abi64 = 0x01000000
class PPCCpuSubType(IntEnum):
all = 0
# PowerPC G3
powerpc750 = 9
# PowerPC G4
powerpc7400 = 10
# PowerPC G4+
powerpc7450 = 11
# PowerPC G5
powerpc970 = 100
class X86CpuSubType(IntEnum):
all = 3
class ARMCpuSubType(IntEnum):
all = 0
# ARM 1176
v6 = 6
# ARM Cortex-A8
v7 = 9
# Cortex-A9 (ARMv7 + MP extension + NEON-HP, de-facto useless, removed from Clang)
v7f = 10
# Swift (ARMv7 + MP extension + VFPv4/NEONv2 + DIV)
v7s = 11
# Marvell Kirkwood (ARMv7 + XScale extension + WMMXv2 + Armada extension, no NEON)
v7k = 12
# Cyclone
v8 = 13
class ARM64CpuSubType(IntEnum):
all = 0
# Cyclone
v8 = 1
class MachHeader:
def __init__(self, abi):
import peachpy.x86_64
import peachpy.arm
self.abi = abi
self.size = {4: 28, 8: 32}[abi.pointer_size]
if abi == peachpy.x86_64.abi.system_v_x86_64_abi:
# 64-bit
self.magic = 0xFEEDFACF
self.cpu_type = CpuType.x86_64
self.cpu_subtype = X86CpuSubType.all
else:
raise ValueError("Unsupported ABI: %s" % str(abi))
self.file_type = FileType.object
self.commands_count = 0
self.commands_size = 0
self.flags = 0
@staticmethod
def get_size(abi):
from peachpy.abi import ABI
assert isinstance(abi, ABI)
assert abi.pointer_size in [4, 8]
return {4: 24, 8: 32}[abi.pointer_size]
def encode(self, encoder):
bytes = encoder.uint32(self.magic) + \
encoder.uint32(self.cpu_type) + \
encoder.uint32(self.cpu_subtype) + \
encoder.uint32(self.file_type) + \
encoder.uint32(self.commands_count) + \
encoder.uint32(self.commands_size) + \
encoder.uint32(self.flags)
if self.abi.pointer_size == 8:
bytes += bytearray(4)
return bytes
|
nilq/baby-python
|
python
|
from requests.exceptions import ConnectionError, HTTPError, SSLError
from sentry.exceptions import PluginError
from django.utils.translation import ugettext_lazy as _
from sentry_youtrack.forms import VERIFY_SSL_CERTIFICATE
from sentry_youtrack.youtrack import YouTrackClient
class YouTrackConfiguration(object):
error_message = {
'client': _("Unable to connect to YouTrack."),
'project_unknown': _('Unable to fetch project'),
'project_not_found': _('Project not found: %s'),
'invalid_ssl': _("SSL certificate verification failed."),
'invalid_password': _('Invalid username or password.'),
'invalid_project': _('Invalid project: \'%s\''),
'missing_fields': _('Missing required fields.'),
'perms': _("User doesn't have Low-level Administration permissions."),
'required': _("This field is required.")}
def __init__(self, initial):
self.config = self.build_default_fields(initial)
self.client_errors = {}
if self.has_client_fields(initial):
client = self.get_youtrack_client(initial)
yt_project = initial.get('project')
if client:
choices = []
if yt_project:
choices = self.get_ignore_field_choices(client, yt_project)
self.config.append({
'name':'ignore_fields',
'label':'Ignore Fields',
'type':'select',
'choices':choices,
'required':False,
'help': 'These fields will not appear on the form.',
})
choices = self.get_project_field_choices(client, yt_project)
self.config.append({
'name':'project',
'label':'Linked Project',
'type':'select',
'choices': choices,
'required':True,})
self.__add_default_tags()
def has_client_fields(self, initial):
return initial.get('password') and initial.get('username') and initial.get('url')
def build_default_fields(self, initial):
url = {'name':'url',
'label':'YouTrack Instance URL',
'type':'text',
'required':True,
'placeholder': 'e.g. "https://yoursitename.myjetbrains.com/youtrack/"',}
username = {'name':'username',
'label':'Username',
'type':'text',
'required':True,
'help': 'User should have admin rights.',}
password = {'name':'password',
'label':'Password',
'type':'secret',
'required':False,
'help': 'Only enter a password if you want to change it.',}
if initial.get('password'):
password['has_saved_value'] = True
return [url, username, password]
def __add_default_tags(self):
self.config.append({'name':'default_tags',
'label':'Default Tags',
'type':'text',
'required':False,
'placeholder': 'e.g. sentry',
'help': 'Comma-separated list of tags.',})
def get_youtrack_client(self, data, additional_params=None):
yt_settings = {
'url': data.get('url'),
'username': data.get('username'),
'password': data.get('password'),
'verify_ssl_certificate': VERIFY_SSL_CERTIFICATE}
if additional_params:
yt_settings.update(additional_params)
client = None
try:
client = YouTrackClient(**yt_settings)
except (HTTPError, ConnectionError) as e:
if e.response is not None and e.response.status_code == 403:
self.client_errors['username'] = self.error_message[
'invalid_password']
else:
self.client_errors['url'] = self.error_message['client']
except (SSLError, TypeError) as e:
self.client_errors['url'] = self.error_message['invalid_ssl']
if client:
try:
client.get_user(yt_settings.get('username'))
except HTTPError as e:
if e.response.status_code == 403:
self.client_errors['username'] = self.error_message['perms']
client = None
return client
def get_ignore_field_choices(self, client, project):
try:
fields = list(client.get_project_fields_list(project))
except HTTPError:
self.client_errors['project'] = self.error_message[
'invalid_project'] % (project,)
else:
names = [field['name'] for field in fields]
return zip(names, names)
return []
def get_project_field_choices(self, client, project):
choices = [(' ', u"- Choose project -")]
try:
projects = list(client.get_projects())
except HTTPError:
self.client_errors['project'] = self.error_message[
'invalid_project'] % (project, )
else:
for project in projects:
display = "%s (%s)" % (project['name'], project['id'])
choices.append((project['id'], display))
return choices
def get_project_fields_list(self, client, project_id):
try:
return list(client.get_project_fields_list(project_id))
except (HTTPError, ConnectionError) as e:
if e.response is not None and e.response.status_code == 404:
self.client_errors['project'] = self.error_message['project_not_found'] % project_id
else:
self.client_errors['project'] = self.error_message['project_unknown']
def get_projects(self, client):
try:
return list(client.get_projects())
except (HTTPError, ConnectionError) as e:
if e.response is not None and e.response.status_code == 404:
self.client_errors['project'] = self.error_message['project_not_found'] % project_id
else:
self.client_errors['project'] = self.error_message['project_unknown']
|
nilq/baby-python
|
python
|
class Solution:
def searchMatrix(self, matrix: List[List[int]], target: int) -> bool:
|
nilq/baby-python
|
python
|
import signal
import sys
import time
from collections import deque
import traceback
from picrosolve.game.cell import CellList
from .strategies.all import ALL_STRATEGIES
class Solver(object):
def __init__(self, board, strategies=None, debug=False):
self._board = board
if not strategies:
strategies = ALL_STRATEGIES(debug)
self._strategies = strategies
self._queue = deque()
self._debug = debug
self.working = None
self.working_strat_name = None
@property
def board(self):
return self._board
@property
def strategies(self):
return self._strategies
@property
def queue(self):
return self._queue
@property
def debug(self):
return self._debug
def dump_status(self, message):
self.print_status(message)
print("=== Dumping Queue ===")
for seq in self.queue:
print(u"> Clues: {}, Cells: {}".format(seq[0], CellList(seq[1])))
if self.working:
print(u"> Working Strategy: {}, Clue: {}, Cells: {}".format(self.working_strat_name, self.working[0], CellList(self.working[1])))
def solve(self):
signal.signal(
signal.SIGUSR1,
lambda x, y: self.dump_status(signal.getsignal(x))
)
signal.signal(
signal.SIGUSR2,
lambda x, y: print(x, y)
)
try:
self._solve()
except (Exception, KeyboardInterrupt) as e:
self.dump_status("Caught Exception")
print(str(e))
traceback.print_tb(sys.exc_info()[2])
def d(self, *args, **kwargs):
if self.debug:
print(*args, **kwargs)
def _solve(self):
all_sequences = self.board.rows + self.board.cols
solved_sequences = lambda: sum([1 if seq.solved else 0 for seq in all_sequences])
solved_l = len(all_sequences)
solved = lambda: (solved_sequences() - solved_l) == 0
self.queue.extend([(s.clues, s.cells) for s in all_sequences])
while len(self.queue) > 0:
self.d("Queue depth: {}".format(len(self.queue)))
self.working = self.queue.popleft()
for strat in self.strategies:
self.working_strat_name = strat.name
new_seqs = strat.apply_strategy(*self.working)
if len(new_seqs):
self.working = new_seqs[0]
if len(new_seqs) > 1:
self.queue.extend(new_seqs[1:])
else:
self.working = None
self.working_strat_name = None
break
if self.working:
self.queue.append(self.working)
if solved():
self.d("Queue: ")
for row in self.queue:
self.d(">> Clues: {}, Cells: {}".format(row[0], CellList(row[1])))
self.print_status("Solved")
def print_status(self, message):
solved = sum([1 if s.solved else 0 for s in self.board.rows + self.board.cols])
len_seq = len(self.board.rows) + len(self.board.cols)
print(" ==== {} ====".format(message))
print(self.board)
print("There are {}/{} solved sequences".format(solved, len_seq))
|
nilq/baby-python
|
python
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import aredis
import asyncio
import pytest
import sys
from unittest.mock import Mock
from distutils.version import StrictVersion
_REDIS_VERSIONS = {}
async def get_version(**kwargs):
params = {'host': 'localhost', 'port': 6379, 'db': 0}
params.update(kwargs)
key = '%s:%s' % (params['host'], params['port'])
if key not in _REDIS_VERSIONS:
client = aredis.StrictRedis(**params)
_REDIS_VERSIONS[key] = (await client.info())['redis_version']
client.connection_pool.disconnect()
return _REDIS_VERSIONS[key]
def skip_if_server_version_lt(min_version):
loop = asyncio.get_event_loop()
version = StrictVersion(loop.run_until_complete(get_version()))
check = version < StrictVersion(min_version)
return pytest.mark.skipif(check, reason="")
def skip_python_vsersion_lt(min_version):
min_version = tuple(map(int, min_version.split('.')))
check = sys.version_info[:2] < min_version
return pytest.mark.skipif(check, reason="")
@pytest.fixture()
def r(event_loop):
return aredis.StrictRedis(client_name='test', loop=event_loop)
class AsyncMock(Mock):
def __init__(self, *args, **kwargs):
super(AsyncMock, self).__init__(*args, **kwargs)
def __await__(self):
future = asyncio.Future(loop=self.loop)
future.set_result(self)
result = yield from future
return result
@staticmethod
def pack_response(response, *, loop):
future = asyncio.Future(loop=loop)
future.set_result(response)
return future
def _gen_mock_resp(r, response, *, loop):
mock_connection_pool = AsyncMock(loop=loop)
connection = AsyncMock(loop=loop)
connection.read_response.return_value = AsyncMock.pack_response(response, loop=loop)
mock_connection_pool.get_connection.return_value = connection
r.connection_pool = mock_connection_pool
return r
@pytest.fixture()
def mock_resp_role(event_loop):
r = aredis.StrictRedis(loop=event_loop)
response = [b'master', 169, [[b'172.17.0.2', b'7004', b'169']]]
return _gen_mock_resp(r, response, loop=event_loop)
|
nilq/baby-python
|
python
|
import urllib2
from bs4 import BeautifulSoup
import re
response = urllib2.urlopen("http://www.baidu.com")
# html_doc = response.read()
html_doc = '<div id="u_sp" class="s-isindex-wrap s-sp-menu"> <a href="http://www.nuomi.com/?cid=002540" target="_blank" class="mnav">糯米</a> <a href="http://news.baidu.com" target="_blank" class="mnav">新闻</a> <a href="http://www.hao123.com" target="_blank" class="mnav">hao123</a> <a href="http://map.baidu.com" target="_blank" class="mnav">地图</a> <a href="http://v.baidu.com" target="_blank" class="mnav">视频</a> <a href="http://tieba.baidu.com" target="_blank" class="mnav">贴吧</a><a id="s_username_top" class="s-user-name-top" data-tid="2004" href="http://i.baidu.com/" target="_blank"><span class="user-name">枼心</span></a><a id="s_usersetting_top" href="javascript:;" name="tj_settingicon" class="pf s-user-setting-top"><span class="setting-text">设置</span></a><a href="http://www.baidu.com/more/" name="tj_briicon" class="s_bri" target="_blank"> 更多产品</a><div id="s_user_name_menu" class="s-isindex-wrap s-user-set-menu menu-top" style="right: 128px; display: none;"><div><a href="http://vip.baidu.com/pcui/show/ucenterindex?vip_frm=super_account" target="_blank"> 我的VIP </a> <a href="http://i.baidu.com/center" target="_blank" data-tid="1000"> 个人中心 </a> <a href="http://passport.baidu.com/" data-tid="1001" target="_blank"> 帐号设置 </a> <a class="s-feedback" style="overflow:hidden" href="#" onclick="return false;">意见反馈</a> <a class="quit" style="overflow:hidden" href="#" onclick="return false;"> 退出 </a> </div> <span class="menu-arrow"> <em></em> </span> </div><div id="s_user_setting_menu" class="s-isindex-wrap s-user-set-menu menu-top" style="display:none;"><div> <a href="//www.baidu.com/gaoji/preferences.html" target="_blank"> 搜索设置 </a> <a href="//www.baidu.com/gaoji/advanced.html" target="_blank"> 高级搜索 </a> <a href="http://i.baidu.com/my/history?from=index" target="_blank"> 搜索历史 </a> <a class="s-feedback" style="overflow:hidden" href="#" onclick="return false;"> 意见反馈 </a> </div> <span class="menu-arrow"> <em></em> </span> </div></div>'
soup = BeautifulSoup(html_doc, 'html.parser', from_encoding='utf-8')
print "获取所有的链接"
links = soup.find_all('a')
for link in links:
print link.name, link['href'], link.get_text()
print "获取单一的链接"
link_node = soup.find('a', href="http://passport.baidu.com/")
print link_node.name, link_node["href"], link_node["data-tid"], link_node.get_text()
print "获取正则匹配"
link_node = soup.find('a', href=re.compile(r"history"))
print link_node.name, link_node["href"], link_node.get_text()
# print link_node["data-tid"]
print "获取class节点"
span_node = soup.find('span', class_="menu-arrow")
print span_node.name, span_node.get_text()
|
nilq/baby-python
|
python
|
from kapteyn import maputils
from matplotlib import pylab as plt
header = {'NAXIS': 2 ,'NAXIS1':100 , 'NAXIS2': 100 ,
'CDELT1': -7.165998823000E-03, 'CRPIX1': 5.100000000000E+01 ,
'CRVAL1': -5.128208479590E+01, 'CTYPE1': 'RA---NCP', 'CUNIT1': 'DEGREE ',
'CDELT2': 7.165998823000E-03 , 'CRPIX2': 5.100000000000E+01,
'CRVAL2': 6.015388802060E+01 , 'CTYPE2': 'DEC--NCP ', 'CUNIT2': 'DEGREE',
'CROTA2': 80
}
fig = plt.figure(figsize=(7,7))
fig.suptitle("Messy plot. Rotation is 80 deg.", fontsize=14, color='r')
fig.subplots_adjust(left=0.18, bottom=0.10, right=0.90,
top=0.90, wspace=0.95, hspace=0.20)
frame = fig.add_subplot(2,2,1)
f = maputils.FITSimage(externalheader=header)
annim = f.Annotatedimage(frame)
xpos = -0.42
ypos = 1.2
grat = annim.Graticule()
grat.setp_axislabel(plotaxis=0, xpos=xpos)
frame.set_title("Default", y=ypos)
frame2 = fig.add_subplot(2,2,2)
annim2 = f.Annotatedimage(frame2)
grat2 = annim2.Graticule()
grat2.setp_axislabel(plotaxis=0, xpos=xpos)
grat2.set_tickmode(mode="sw")
frame2.set_title("Switched ticks", y=ypos)
frame3 = fig.add_subplot(2,2,3)
annim3 = f.Annotatedimage(frame3)
grat3 = annim3.Graticule()
grat3.setp_axislabel(plotaxis=0, xpos=xpos)
grat3.set_tickmode(mode="na")
frame3.set_title("Only native ticks", y=ypos)
frame4 = fig.add_subplot(2,2,4)
annim4 = f.Annotatedimage(frame4)
grat4 = annim4.Graticule()
grat4.setp_axislabel(plotaxis=0, xpos=xpos)
grat4.set_tickmode(plotaxis=['bottom','left'], mode="Switch")
grat4.setp_ticklabel(plotaxis=['top','right'], visible=False)
frame4.set_title("Switched and cleaned", y=ypos)
maputils.showall()
|
nilq/baby-python
|
python
|
import neptune.new as neptune
import os
from GTApack.GTA_hotloader import GTA_hotloader
from GTApack.GTA_Unet import GTA_Unet
from GTApack.GTA_tester import GTA_tester
from torchvision import datasets, transforms
from torch.optim import SGD, Adam
from torch.optim.lr_scheduler import (ReduceLROnPlateau, CyclicLR,
CosineAnnealingLR)
from torch.utils.data import DataLoader, random_split
import torch
import numpy as np
import time
from neptune.new.types import File
import matplotlib.pyplot as plt
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print(device)
# Set up the datasets
np.random.seed(42)
val_set, train_set = torch.utils.data.random_split(
np.random.randint(low = 1, high = 4962, size = 500),
[60, 440],
generator=torch.Generator().manual_seed(42))
test_set = np.random.randint(low = 1, high = 858, size = 100)
valload = GTA_hotloader(path = "C:/Users/Marc/Desktop/Billeder/train/",
width = 400, height = 300, ind = val_set,
device = device)
trainload = GTA_hotloader(path = "C:/Users/Marc/Desktop/Billeder/train/",
width = 400, height = 300, ind = train_set,
device = device)
testload = GTA_hotloader(path = "C:/Users/Marc/Desktop/Billeder/test-val/",
width = 400, height = 300, ind = test_set,
device = device)
batch_size = 1
# Set up the dataloaders:
valloader = torch.utils.data.DataLoader(valload,
batch_size=batch_size,
shuffle=True,
num_workers=0)
trainloader = torch.utils.data.DataLoader(trainload,
batch_size=batch_size,
shuffle=True,
num_workers=0)
testloader = torch.utils.data.DataLoader(testload,
batch_size=batch_size,
shuffle=True,
num_workers=0)
token = os.getenv('Neptune_api')
run = neptune.init(
project="Deep-Learning-test/Deep-Learning-Test",
api_token=token,
)
nEpoch = 50
# Network 1
params = {"optimizer":"SGD", "optimizer_learning_rate":0.01,
"optimizer_momentum": 0.9, "loss_function":"MSEloss",
"model":"GTA_Unet"}
run[f"network1/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = SGD(model.parameters(), lr=0.01, momentum=0.9)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network1/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network1/validation_loss"].log(w)
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network1.pt")
run[f"network1/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network1.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network1/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network1/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# Network 2:
params = {"optimizer":"SGD", "optimizer_momentum": 0.9,
"optimizer_learning_rate": 0.1, "loss_function":"MSEloss",
"model":"GTA_Unet", "scheduler":"ReduceLROnPlateau",
"scheduler_patience":3, "scheduler_threshold":0.01}
run[f"network2/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = SGD(model.parameters(), lr=0.1, momentum=0.9)
scheduler = ReduceLROnPlateau(optimizer, 'min', patience=3, threshold=0.01)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network2/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network2/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network2/validation_loss"].log(w)
scheduler.step(w)
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network2.pt")
run[f"network2/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network2.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network2/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network2/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# Network 3
params = {"optimizer":"Adam", "optimizer_learning_rate": 0.1,
"loss_function":"MSEloss", "model":"GTA_Unet",
"scheduler":"ReduceLROnPlateau", "scheduler_patience":3,
"scheduler_threshold":0.01}
run[f"network3/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = Adam(model.parameters(), lr=0.1)
scheduler = ReduceLROnPlateau(optimizer, 'min', patience=3, threshold=0.01)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network3/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network3/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network3/validation_loss"].log(w)
scheduler.step(w)
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network3.pt")
run[f"network3/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network3.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network3/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network3/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# Network 4
params = {"optimizer":"SGD", "optimizer_momentum": 0.9,
"optimizer_learning_rate": 0.1, "loss_function":"MSEloss",
"model":"GTA_Unet", "scheduler":"CyclicLR",
"scheduler_base_lr":0.01, "scheduler_max_lr":0.1,
"scheduler_step_size_up":10}
run[f"network4/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = SGD(model.parameters(), lr=0.01, momentum=0.9)
scheduler = CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, step_size_up=10)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network4/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network4/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network4/validation_loss"].log(w)
scheduler.step()
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network4.pt")
run[f"network4/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network4.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network4/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network4/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# network 5
params = {"optimizer":"Adam", "optimizer_learning_rate": 0.1,
"loss_function":"MSEloss", "model":"GTA_Unet", "scheduler":"CyclicLR",
"scheduler_base_lr":0.01, "scheduler_max_lr":0.1,
"scheduler_step_size_up":10, "scheduler_cycle_momentum":"False"}
run[f"network5/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = Adam(model.parameters(), lr=0.1)
scheduler = CyclicLR(optimizer, base_lr=0.001, max_lr=0.1, step_size_up=10, cycle_momentum = False)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network5/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network5/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network5/validation_loss"].log(w)
scheduler.step()
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network5.pt")
run[f"network5/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network5.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network5/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network5/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# network 6
params = {"optimizer":"SGD", "optimizer_learning_rate": 0.1,
"loss_function":"MSEloss", "model":"GTA_Unet", "scheduler":"CyclicLR",
"scheduler_base_lr":0.01, "scheduler_max_lr":0.1,
"scheduler_step_size_up":1, "scheduler_step_size_down":4}
run[f"network6/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = SGD(model.parameters(), 0.1)
scheduler = CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, step_size_up=1, step_size_down=4)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network6/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network6/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network6/validation_loss"].log(w)
scheduler.step()
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network6.pt")
run[f"network6/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network6.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network6/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network6/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
# Network 7
params = {"optimizer1":"SGD", "optimizer1_learning_rate": 0.5,
"loss_function":"MSEloss", "model":"GTA_Unet",
"scheduler1":"CosineAnnealingLR",
"scheduler1_T_max":10, "scheduler1_eta_min":0.1,
"optimizer2":"SGD", "optimizer2_learning_rate": 0.1,
"scheduler2":"CosineAnnealingLR",
"scheduler2_T_max":10, "scheduler2_eta_min":0.01,}
run[f"network7/parameters"] = params
lossFunc = nn.MSELoss()
model = GTA_Unet(n_channels = 3, n_classes = 9).to(device)
optimizer = SGD(model.parameters(), 0.5)
scheduler = CosineAnnealingLR(optimizer, T_max = 10, eta_min = 0.1)
valid_loss, train_loss = [], []
avg_train_loss, avg_valid_loss = [], []
for iEpoch in range(nEpoch):
print(f"Training epoch {iEpoch}")
run[f"network7/learning_rate"].log(optimizer.param_groups[0]['lr'])
for img, lab in trainloader:
y_pred = model(img)
model.zero_grad()
loss = lossFunc(y_pred, lab)
loss.backward()
optimizer.step()
train_loss.append(loss.item())
avg_train_loss.append(w := (np.mean(np.array(train_loss))))
run[f"network7/train_loss"].log(w)
train_loss = []
for img, lab in valloader:
y_pred = model(img)
loss = lossFunc(y_pred, lab)
valid_loss.append(loss.item())
avg_valid_loss.append(w := (np.mean(np.array(valid_loss))))
run[f"network7/validation_loss"].log(w)
if iEpoch == 30:
optimizer = SGD(model.parameters(), 0.1)
scheduler = CosineAnnealingLR(optimizer, T_max = 10, eta_min = 0.01)
scheduler.step()
valid_loss = []
torch.save(model.state_dict(), "C:/Users/Marc/Desktop/Billeder/params/network7.pt")
run[f"network7/network_weights"].upload(File("C:/Users/Marc/Desktop/Billeder/params/network7.pt"))
test_acc_per_pic = GTA_tester(model, testloader)
print(np.mean(test_acc_per_pic))
run[f"network7/test_accuracy_per_pic"].log(test_acc_per_pic)
run[f"network7/mean_test_accuracy"].log(np.mean(test_acc_per_pic))
run.stop()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
import json
import re
import requests
import urllib
import logging
logger = logging.getLogger('nova-playlist')
class YouTubeAPI(object):
clientID = 'CLIENTID'
clientSecret = 'CLIENTSECRET'
refreshToken = 'REFRESHTOKEN'
accessToken = None
def get_access_token(self):
payload = {'client_id': self.clientID,
'client_secret': self.clientSecret,
'refresh_token': self.refreshToken,
'grant_type': 'refresh_token'}
r = requests.post('https://accounts.google.com/o/oauth2/token', data=payload)
self.accessToken = r.json()['access_token']
def search_youtube_id(self, title):
try:
if not self.accessToken:
self.get_access_token()
headers = {'Authorization': 'Bearer ' + self.accessToken}
url = 'https://www.googleapis.com/youtube/v3/search'
r = requests.get(url, params={'part': 'snippet',
'q': title,
'type': 'video'}, headers=headers)
items = r.json()['items']
if len(items) == 0:
youtube_id = None
logger.warning("No video found for %s" % title)
else:
youtube_id = items[0]['id']['videoId']
logger.info("Found %s for song %s" % (youtube_id, title))
return youtube_id
except:
logger.warning('YouTube API search error, fallback on scraper')
return self.scrap_youtube_id(title)
def scrap_youtube_id(self, title):
url = "http://www.youtube.com/results?search_query=%s" % urllib.quote_plus(title)
page = requests.get(url, timeout=15)
if 'Aucune vid' in page.content:
logger.warning("No video found for %s" % str(self))
return None
else:
youtube_id = re.findall('href="\/watch\?v=(.*?)[&;"]', page.content)[0]
logger.info("Found %s for song %s" % (youtube_id, str(self)))
return youtube_id
def clean_channel_playlist(self, playlist_id):
if not self.accessToken:
self.get_access_token()
headers = {'Authorization': 'Bearer ' + self.accessToken}
url = 'https://www.googleapis.com/youtube/v3/playlistItems'
r = requests.get(url, params={'part': 'snippet',
'playlistId': playlist_id,
'maxResults': 50}, headers=headers)
for video in r.json()['items']:
vd = requests.delete(url, params={'id': video['id']}, headers=headers)
if vd.status_code != 204:
logger.error("Error removing song from playlist %s" % (vd.text))
def build_channel_playlist(self, playlist_id, songs):
if not self.accessToken:
self.get_access_token()
url = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet'
headers = {'Authorization': 'Bearer ' + self.accessToken,
'Content-Type': 'application/json'}
songPosition = -1
for song in songs:
if song.youtube_id:
songPosition += 1
payload = json.dumps({'snippet':
{
'playlistId': playlist_id,
'resourceId': {
'kind': 'youtube#video',
'videoId': song.youtube_id
},
'position': songPosition
}
})
logger.debug('Sending payload %s' % (payload))
r = requests.post(url, data=payload, headers=headers)
if r.status_code != 200:
logger.error("Error publishing %s : %s" % (song.artist + ' / ' + song.title, r.text))
|
nilq/baby-python
|
python
|
import asyncio
import datetime
def get_time():
d = datetime.datetime.now()
return d.strftime('%M:%S')
async def coro(group_id, coro_id):
print('group{}-task{} started at:{}'.format(group_id, coro_id, get_time()))
await asyncio.sleep(coro_id) # 模拟读取文件的耗时IO
return 'group{}-task{} done at:{}'.format(group_id, coro_id, get_time())
loop = asyncio.get_event_loop()
# 创建三组tasks
tasks1 = [asyncio.ensure_future(coro(1, i)) for i in range(1, 5)]
tasks2 = [asyncio.ensure_future(coro(2, i)) for i in range(5, 6)]
tasks3 = [asyncio.ensure_future(coro(3, i)) for i in range(7, 10)]
group1 = asyncio.gather(*tasks1) # 对第1组的协程进行分组,group1
group2 = asyncio.gather(*tasks2) # 对第2组的协程进行分组,group2
group3 = asyncio.gather(*tasks3) # 对第3组的协程进行分组,group3
all_groups = asyncio.gather(group1, group2, group3) # 把3个group再聚合成一个大组,也是就所有协程对象的被聚合到一个大组
loop = asyncio.get_event_loop()
all_group_result = loop.run_until_complete(all_groups)
for index, group in enumerate(all_group_result): # 获取每组协程的输出
print('group {} result:{}'.format(index + 1, group))
loop.close()
|
nilq/baby-python
|
python
|
import cv2
def draw_yolo_detections(image, detections, color=(0,255,0)):
img = image.copy()
with open("..//Data//model//yolov4/coco.names", 'rt') as f:
classes = f.read().rstrip('\n').split('\n')
for detect in detections:
bbox = detect[1]
category = classes[int(detect[0])]
cv2.rectangle(img, bbox, color, 2)
cv2.putText(img, str(category), (bbox[0], bbox[1] - 5),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2, cv2.LINE_AA)
return img
|
nilq/baby-python
|
python
|
import dataclasses
import vk_api
from vk_api import VkUpload
from vk_api.bot_longpoll import VkBotLongPoll
from vk_api.longpoll import VkLongPoll, VkEventType
from vk_api.utils import get_random_id
@dataclasses.dataclass
class __cfg__:
""" Bot config is struct for every bot. Easy to use because of fields """
name: str
logger: str
platform: str
logindata: str
description: str
def __post_init__(self):
self.compressed: dict = {
"name": self.name,
"logger": self.logger,
"platform": self.platform,
"logindata": self.logindata,
"description": self.description,
}
class __vk__:
def __init__(self, cfg):
try:
self.vk_session = vk_api.VkApi(token=cfg.logindata)
self.vk = self.vk_session.get_api()
self.longpool = VkBotLongPoll(self.vk_session)
self.cfg = cfg
except vk_api.exceptions.ApiError as err:
print(f"\033[1;31m║\033[0m --------------------- Error! ---------------------\n\033[1;31m║\033[0m - Error: {err}\n\033[1;31m║\033[0m - Token: {token}")
def listen(self, private: object, public: object):
print(f"\033[1;32m║\033[0m ------------------ Entered main loop ------------------\n\033[1;32m║\033[0m - Name: {self.cfg.name}\n\033[1;32m║\033[0m - Platform: {self.cfg.platform}\n\033[1;32m║\033[0m - Description: {self.cfg.description}")
for event in self.longpool.listen():
if event.type == VkEventType.MESSAGE_NEW:
if event.from_user and event.text:
text = private(event)
print("Theese scenaraio")
if text == "None": continue
self.vk.messages.send(
user_id=event.user_id,
random_id=get_random_id(),
message=text,
)
elif event.from_chat and event.text:
print("Another one")
text = public(event)
print("Another one")
if text == "None": continue
self.vk.messages.send(
group_id=event.chat_id,
random_id=get_random_id(),
message=text,
)
else:
print(event)
class Core:
def __init__(self, cfg: dict, prettyPrint: bool = True):
__cfg = __cfg__(name=cfg['name'],
logger=cfg['logger'],
platform=cfg['platform'],
logindata=cfg['token'],
description=cfg['description'])
self.cfg = __cfg
self.prt = prettyPrint
if self.prt:
print(f"""\033[1;34m║\033[0m ------------------ Initiated ... ------------------
\033[1;34m║\033[0m - Name: {self.cfg.name}
\033[1;34m║\033[0m - Platform: {self.cfg.platform}
\033[1;34m║\033[0m - Description: {self.cfg.description}
\033[1;34m║\033[0m - Logger Name: {self.cfg.logger}
""")
def __vk_run__(self, private: object, public: object):
if self.prt:
print(f"""\033[1;34m║\033[0m ------------------ Entered vk ... ------------------
\033[1;34m║\033[0m - Name: {self.cfg.name}
\033[1;34m║\033[0m - Description: {self.cfg.description}
""")
b = __vk__(self.cfg)
b.listen(private, public)
def run(self, commands: tuple[object]):
if self.cfg.platform == 'vk':
self.__vk_run__(commands[0], commands[1]);
|
nilq/baby-python
|
python
|
# F2x installation script (setup.py)
#
# Copyright 2018 German Aerospace Center (DLR)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import setuptools
from distutils.core import setup
package_data = {
'F2x.parser.plyplus.grammar': ["*.g"],
}
cmdclass = {}
command_options = {}
sys.path.append(os.path.abspath('src'))
try:
# Try to extract program information from sources.
import F2x
from F2x.template import collect_template_data
name = F2x.program_name
version = F2x.get_version_string()
release = F2x.get_version_string(full=True)
for package, data_files in collect_template_data():
package_data[package.__name__] = package_data.get(package.__name__, []) + data_files
except ImportError:
# Fallback: set them manual :(
name = 'F2x'
version = '0.0.0'
release = '0.0'
try:
from F2x.distutils.command import build_sphinx
cmdclass['build_sphinx'] = build_sphinx.build_sphinx
command_options['build_sphinx'] = {
'project': ('setup.py', name),
'version': ('setup.py', version),
'release': ('setup.py', release),
'source_dir': ('setup.py', 'doc/src'),
'build_dir': ('setup.py', 'doc'),
}
except ImportError:
pass
setup(
name=name,
version=version,
description='Template-based Fortran wrapper.',
author='Michael Meinel',
author_email='michael.meinel@dlr.de',
url='http://www.dlr.de/sc',
cmdclass=cmdclass,
command_options=command_options,
packages=setuptools.find_packages('src'),
package_dir={ '': 'src' },
package_data=package_data,
install_requires=[
'plyplus',
'jinja2',
'numpy',
],
extras_require={
'cython': ['Cython', ],
'docs': [
'six',
'sphinx',
'sphinx-argparse',
],
'tests': [
'pytest-runner',
'pytest',
],
},
entry_points={
'console_scripts': [
'F2x=F2x.runtime.main:main',
'F2x-d=F2x.runtime.daemon:main',
],
},
)
|
nilq/baby-python
|
python
|
from hashlib import md5
def part_1(data):
i, p = 0, ""
while True:
if len(p) == 8:
break
hash = md5((data + str(i)).encode()).hexdigest()
if hash[:5] == "00000":
p += hash[5]
i += 1
return p
def part_2(data):
i, p = 0, "________"
while True:
if "_" not in p:
break
hash = md5((data + str(i)).encode()).hexdigest()
if hash[:5] == "00000":
if hash[5].isdigit() and int(hash[5]) < 8 and p[int(hash[5])] == "_":
p = p[:int(hash[5])] + hash[6] + p[int(hash[5])+1:]
i += 1
return p
def test():
assert(part_1("abc") == "18f47a30")
assert(part_2("abc") == "05ace8e3")
|
nilq/baby-python
|
python
|
import logging
from typing import Iterable
from septentrion import core, db, files, migration, style, versions
logger = logging.getLogger(__name__)
def initialize(settings_kwargs):
quiet = settings_kwargs.pop("quiet", False)
stylist = style.noop_stylist if quiet else style.stylist
settings = core.initialize(**settings_kwargs)
return {"settings": settings, "stylist": stylist}
def show_migrations(**settings_kwargs):
lib_kwargs = initialize(settings_kwargs)
core.describe_migration_plan(**lib_kwargs)
def migrate(**settings_kwargs):
lib_kwargs = initialize(settings_kwargs)
migration.migrate(**lib_kwargs)
def is_schema_initialized(**settings_kwargs):
lib_kwargs = initialize(settings_kwargs)
return db.is_schema_initialized(settings=lib_kwargs["settings"])
def build_migration_plan(**settings_kwargs):
lib_kwargs = initialize(settings_kwargs)
schema_version = core.get_best_schema_version(settings=lib_kwargs["settings"])
return core.build_migration_plan(
settings=lib_kwargs["settings"], schema_version=schema_version
)
def fake(version: str, **settings_kwargs):
lib_kwargs = initialize(settings_kwargs)
fake_version = versions.Version.from_string(version)
migration.create_fake_entries(version=fake_version, **lib_kwargs)
def load_fixtures(version: str, **settings_kwargs) -> None:
lib_kwargs = initialize(settings_kwargs)
init_version = versions.Version.from_string(version)
migration.load_fixtures(init_version=init_version, **lib_kwargs)
def get_known_versions(**settings_kwargs) -> Iterable[str]:
lib_kwargs = initialize(settings_kwargs)
known_versions = files.get_known_versions(settings=lib_kwargs["settings"])
return [version.original_string for version in known_versions]
|
nilq/baby-python
|
python
|
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import sys
import copy
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
class TestTrueDivide(TestCase):
def generate_data(self,min_d, max_d, shape, dtype):
input1 = np.random.uniform(min_d, max_d, shape).astype(dtype)
input2 = np.random.uniform(min_d, max_d, shape).astype(dtype)
# modify from numpy.ndarray to torch.tensor
npu_input1 = torch.from_numpy(input1)
npu_input2 = torch.from_numpy(input2)
return npu_input1, npu_input2
def generate_single_data(self, min_d, max_d, shape, dtype):
input1 = np.random.uniform(min_d, max_d, shape).astype(dtype)
npu_input1 = torch.from_numpy(input1)
return npu_input1
def generate_single_bool_data(self, min_d, max_d, shape):
input1 = np.random.uniform(min_d, max_d, shape)
input1 = input1.reshape(-1)
for i in range(len(input1)):
if input1[i]<0.5:
input1[i] = 0
input1 = input1.astype(np.bool)
input1 = input1.reshape(shape)
npu_input1 = torch.from_numpy(input1)
return npu_input1
def cpu_op_exec(self, input1, input2):
output = torch.true_divide(input1,input2)
output = output.numpy()
return output
def npu_op_exec(self, input1, input2):
input1 = input1.to("npu")
input2 = input2.to("npu")
output = torch.true_divide(input1,input2)
output = output.to("cpu")
output = output.numpy()
return output
def npu_op_exec_scalar(self, input1, input2):
input1 = input1.to("npu")
output = torch.true_divide(input1,input2)
output = output.to("cpu")
output = output.numpy()
return output
def test_true_divide_float32_broadcast(self,device):
npu_input1 = self.generate_single_data(0, 100, (2,2), np.float32)
npu_input2 = self.generate_single_data(0, 100, (2), np.float32)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_float32(self,device):
npu_input1, npu_input2 = self.generate_data(0, 100, (4, 3), np.float32)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_int32(self,device):
npu_input1, npu_input2 = self.generate_data(0, 100, (4, 3), np.int32)
cpu_output = self.cpu_op_exec(npu_input1, npu_input2)
npu_output = self.npu_op_exec(npu_input1, npu_input2)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_bool(self, device):
npu_input1, npu_input2 = self.generate_data(0, 100, (2,2), np.float32)
npu_input3 = self.generate_single_bool_data(1, 1, (2, 2))
cpu_output = self.cpu_op_exec(npu_input1, npu_input3)
npu_output = self.npu_op_exec(npu_input1, npu_input3)
print(cpu_output, npu_output)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_bool_scalar(self, device):
npu_input1, npu_input2 = self.generate_data(0, 100, (2, 2), np.float32)
cpu_output = self.cpu_op_exec(npu_input1,True)
npu_output = self.npu_op_exec_scalar(npu_input1, True)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_scalar_int32_1(self,device):
npu_input1, npu_input2 = self.generate_data(0, 100, (2, 3), np.float32)
cpu_output = self.cpu_op_exec(npu_input1, 2)
npu_output = self.npu_op_exec_scalar(npu_input1, 2)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_scalar_int32_2(self,device):
npu_input1, npu_input2 = self.generate_data(0, 100, (2, 3), np.int32)
cpu_output = self.cpu_op_exec(npu_input1, 2)
npu_output = self.npu_op_exec_scalar(npu_input1, 2)
self.assertRtolEqual(cpu_output, npu_output)
def test_true_divide_scalar_float32(self,device):
npu_input1, npu_input2 = self.generate_data(0, 100, (2, 3), np.float32)
cpu_output = self.cpu_op_exec(npu_input1, 2.0)
npu_output = self.npu_op_exec_scalar(npu_input1, 2.0)
self.assertRtolEqual(cpu_output, npu_output)
instantiate_device_type_tests(TestTrueDivide, globals() , except_for='cpu')
if __name__ == "__main__":
run_tests()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Date string field."""
from __future__ import absolute_import, print_function
import arrow
from arrow.parser import ParserError
from marshmallow import fields, missing
class DateString(fields.Date):
"""ISO8601-formatted date string."""
def _serialize(self, value, attr, obj):
"""Serialize an ISO8601-formatted date."""
try:
return super(DateString, self)._serialize(
arrow.get(value).date(), attr, obj)
except ParserError:
return missing
def _deserialize(self, value, attr, data):
"""Deserialize an ISO8601-formatted date."""
return super(DateString, self)._deserialize(value, attr,
data).isoformat()
|
nilq/baby-python
|
python
|
import unittest
import numpy as np
from modem.util.channel import Channel
def get_random(samples=2048):
"""Returns sequence of random comples samples """
return 2 * (np.random.sample((samples,)) + 1j * np.random.sample((samples,))) - (1 + 1j)
class test_channel(unittest.TestCase):
def setUp(self):
self.ch = Channel()
def test_awgn(self):
data_in = get_random(1024 * 1000)
data_out = self.ch.awgn(data_in, snr_db=0)
self.assertEqual(len(data_in), len(data_out))
self.assertAlmostEqual(
np.var(data_in), np.var(data_out) / 2.0, places=2)
def test_multipath(self):
data_in = np.zeros(10, dtype=complex)
data_in[2] = 1.0 + 0.0j
self.ch.impulse_response = np.arange(10) + 1j * np.arange(10)
data_out = self.ch.multipath(data_in)
np.testing.assert_array_almost_equal(
data_out[2:12], self.ch.last_impulse_response)
#self.assertAlmostEqual(np.linalg.norm(data_in), np.linalg.norm(data_out))
if __name__ == "__main__":
unittest.main()
|
nilq/baby-python
|
python
|
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
import urllib.request
requestUrl = 'http://www.tvapi.cn/movie/getMovieInfo'
webhead = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20100101 Firefox/23.0', 'charset':'utf-8'}
urlRequest = urllib.request.Request(url = requestUrl, headers = webhead)
with urllib.request.urlopen(urlRequest) as f:
pass
|
nilq/baby-python
|
python
|
import ckan.logic as logic
import ckan.model as model
import unicodedata
import ckanext.hdx_users.model as umodel
import ckanext.hdx_user_extra.model as ue_model
import ckanext.hdx_theme.tests.hdx_test_base as hdx_test_base
class TestAboutPageController(hdx_test_base.HdxBaseTest):
#loads missing plugins
@classmethod
def _load_plugins(cls):
hdx_test_base.load_plugin('hdx_users hdx_user_extra hdx_theme')
@classmethod
def setup_class(cls):
super(TestAboutPageController, cls).setup_class()
umodel.setup()
ue_model.create_table()
def test_resulting_page(self):
testsysadmin = model.User.by_name('testsysadmin')
page = self._getAboutPage('license')
assert 'Data Licenses' in str(page.response), 'the url /about/license should redirect to the Data Licenses page when no user is logged in'
page = self._getAboutPage('license', testsysadmin.apikey)
assert 'Data Licenses' in str(page.response), 'the url /about/license should redirect to the Data Licenses page, even when the user is logged in'
page = self._getAboutPage('terms')
assert 'Terms of Service' in str(page.response), 'the url /about/terms should redirect to the Terms of Service page when no user is logged in'
page = self._getAboutPage('terms', testsysadmin.apikey)
assert 'Terms of Service' in str(page.response), 'the url /about/terms should redirect to the Terms of Service page, even when the user is logged in'
try:
page = self._getAboutPage('fake')
assert "The requested about page doesn't exist" in str(page.response), 'the url /about/fake should throw an error when no user is logged in'
except logic.ValidationError:
assert True
try:
page = self._getAboutPage('fake', testsysadmin.apikey)
assert "The requested about page doesn't exist" in str(page.response), 'the url /about/terms should throw an error, even when the user is logged in'
except logic.ValidationError:
assert True
def _getAboutPage(self, page, apikey=None):
url = '/about/' + page
if apikey:
page = self.app.get(url,headers={'Authorization':unicodedata.normalize('NFKD', apikey).encode('ascii','ignore')})
else:
page = self.app.get(url)
return page
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Utilities for analysis
Author: G.J.J. van den Burg
License: See LICENSE file.
Copyright: 2021, The Alan Turing Institute
"""
from collections import namedtuple
Line = namedtuple("Line", ["xs", "ys", "style", "label"])
def dict2tex(d):
items = []
for key, value in d.items():
if isinstance(value, dict):
value = "{\n" + dict2tex(value) + "%\n}"
if value is None:
items.append(f"{key}")
else:
items.append(f"{key}={value}")
return ",%\n".join(items)
|
nilq/baby-python
|
python
|
"""
Dexter Legaspi - dlegaspi@bu.edu
Class: CS 521 - Summer 2
Date: 07/22/2021
Term Project
Main view/window
"""
import tkinter as tk
from tkinter import messagebox, filedialog
from PIL import ImageTk, Image as PILImage
import appglobals
from appcontroller import AppController
from appstate import AppState
from image import Image
from imageattributes import ImageAttributes
from imagecatalog import ImageCatalog
import logging
from imagerating import ImageRating
logger = logging.getLogger(__name__)
BACKGROUND_LOGO_IMAGE_PATH = appglobals.path_data_root_door / 'bg-logo2.jpg'
DOGE_IMAGE_PATH = appglobals.path_data_root_door / 'doge.jpg'
class MainView(tk.Frame):
"""
Main View/frame
"""
def __init__(self, master, controller: AppController):
"""
Constructor
:param master:
:param controller:
"""
tk.Frame.__init__(self, master)
self.controller = controller
self.master = master
self.statusbar = None
self.img_frame = None
self.img_label = None
self.exif_label = None
self.rating_slider = None
self.exif_label_text = tk.StringVar()
self.render()
self.render_menu()
def render_menu(self):
"""
Render menu
:return:
"""
menu_bar = tk.Menu(self.master)
main_menu = tk.Menu(menu_bar, tearoff=False)
main_menu.add_command(label='Open...',
command=self.menu_open_directory)
main_menu.add_command(label='Save Ratings',
command=self.menu_save_ratings)
main_menu.add_command(label='Quit',
command=self.menu_command_quit)
menu_bar.add_cascade(label='File',
menu=main_menu)
self.master.config(menu=menu_bar)
def menu_save_ratings(self):
"""
save ratings
:return:
"""
logger.debug('save ratings...')
self.controller.save_ratings()
def menu_command_quit(self):
"""
Quit app
:return:
"""
logger.debug('quitting...')
if appglobals.app_config_confirm_on_exit:
logger.info('quit confirmation enabled')
if messagebox.askokcancel("Quit", "Do you want to quit?"):
self.master.destroy()
else:
self.master.destroy()
def menu_open_directory(self):
"""
open/select directory dialog
:return:
"""
selected_dir = filedialog.askdirectory(parent=self.master,
initialdir='~',
title='Select Images Directory')
logger.info("open directory: %s", selected_dir)
self.set_status_bar_text(f'Open directory: {selected_dir}')
catalog = ImageCatalog(directory=selected_dir)
logger.info("catalog statistics: %s", catalog.get_stats())
new_state = AppState(catalog)
self.controller.set_state(new_state)
initial_img = self.controller.get_image_at_current_index()
if initial_img is not None:
self.set_img(initial_img)
return selected_dir
def set_status_bar_text(self, status_text):
"""
sets the status bar text
:param status_text:
:return:
"""
self.statusbar.config(text=status_text)
def __init_img_widget(self):
"""
Image widget init
:return:
"""
path = BACKGROUND_LOGO_IMAGE_PATH
img = PILImage.open(path)
# this is a test for resizing
# https://stackoverflow.com/a/24745969/918858
maxsize = (appglobals.app_config_img_dimension[0],
appglobals.app_config_img_dimension[1])
logger.info('resizing image...')
img.thumbnail(maxsize, PILImage.ANTIALIAS)
self.img_frame = tk.Frame(self,
width=appglobals.app_config_win_dimension[0],
height=appglobals.app_config_win_dimension[1])
self.img_frame.pack(fill=tk.BOTH, expand=True)
pimg = ImageTk.PhotoImage(img)
img_attr = ImageAttributes(img)
self.img_label = tk.Label(self.img_frame, image=pimg)
self.img_label.image = pimg
self.img_label.pack()
self.exif_label_text.set(img_attr.get_formatted_exif())
self.exif_label = tk.Label(self.img_frame,
textvariable=self.exif_label_text,
justify=tk.LEFT)
# place the EXIF relative to the image
# https://stackoverflow.com/a/63625317/918858
self.exif_label.place(in_=self.img_label, y=10, x=10)
def set_img(self, img: Image):
"""
Set the image in the Main View
:param img:
:return:
"""
# load the raw image
raw_img = img.get_image_object()
maxsize = (appglobals.app_config_img_dimension[0],
appglobals.app_config_img_dimension[1])
logger.info('resizing image...')
raw_img.thumbnail(maxsize, PILImage.ANTIALIAS)
# create the new Tk image object
pimg = ImageTk.PhotoImage(raw_img)
# get the image attribute and update the label
img_attr = img.get_attributes()
self.exif_label_text.set(img_attr.get_formatted_exif())
# set the rating slider
self.rating_slider.set(int(img.get_rating()))
# set the status bar
self.set_status_bar_text(f'Current image: {img.get_name().upper()}')
# finally update the actual image
self.img_label.configure(image=pimg)
self.img_label.image = pimg
self.img_label.pack()
def render(self):
"""
Render UI
:return:
"""
self.master.title('Image Viewer')
self.pack(fill=tk.BOTH, expand=1)
self.__init_img_widget()
# status bar
self.statusbar = tk.Label(self, text='Ready.', bd=1,
relief=tk.SUNKEN, anchor=tk.W)
self.statusbar.pack(side=tk.BOTTOM, fill=tk.X)
# rating slider
self.rating_slider = tk.Scale(self, from_=0, to=5, orient=tk.HORIZONTAL,
command=self.slider_handle_rating)
self.rating_slider.pack(side=tk.LEFT, padx=20, pady=20)
self.rating_slider.set(0)
# navigation buttons
next_button = tk.Button(self,
text='Next Image',
command=self.button_next_image)
previous_button = tk.Button(self,
text='Previous Image',
command=self.button_previous_image)
next_button.pack(side=tk.RIGHT, padx=2, pady=10)
previous_button.pack(side=tk.RIGHT, padx=2, pady=10)
def button_next_image(self):
"""
next image button handler
:return:
"""
logger.info("next image button pressed")
self.controller.next_image()
img = self.controller.get_image_at_current_index()
if img is not None:
self.set_img(img)
def button_previous_image(self):
"""
next image button handler
:return:
"""
logger.info("previous image button pressed")
self.controller.previous_image()
img = self.controller.get_image_at_current_index()
if img is not None:
self.set_img(img)
def slider_handle_rating(self, value):
"""
handling of rating slider
:return:
"""
logger.info("setting rating slider...")
logger.info("current slider setting: %s", value)
img = self.controller.get_image_at_current_index()
img.set_rating(ImageRating(int(value)))
pass
def render_main_view(controller):
"""
render the main view
:return:
"""
root = tk.Tk()
main_view = MainView(root, controller)
root.geometry('{}x{}'
.format(appglobals.app_config_win_dimension[0],
appglobals.app_config_win_dimension[1]))
# https://www.tutorialspoint.com/how-to-center-a-window-on-the-screen-in-tkinter
root.eval('tk::PlaceWindow . center')
root.mainloop()
|
nilq/baby-python
|
python
|
import typing
import pytest
from energuide import bilingual
from energuide import element
from energuide.embedded import code
from energuide.exceptions import InvalidEmbeddedDataTypeError
@pytest.fixture
def raw_wall_code() -> element.Element:
data = """
<Code id='Code 1'>
<Label>1201101121</Label>
<Layers>
<StructureType>
<English>Wood frame</English>
<French>Ossature de bois</French>
</StructureType>
<ComponentTypeSize>
<English>38x89 mm (2x4 in)</English>
<French>38x89 (2x4)</French>
</ComponentTypeSize>
</Layers>
</Code>
"""
return element.Element.from_string(data)
BAD_WALL_CODE_XML = [
# This XML block is missing the id attribute on the <Code> tag
"""
<Code>
<Label>1201101121</Label>
<Layers>
<StructureType>
<English>Wood frame</English>
<French>Ossature de bois</French>
</StructureType>
<ComponentTypeSize>
<English>38x89 mm (2x4 in)</English>
<French>38x89 (2x4)</French>
</ComponentTypeSize>
</Layers>
</Code>
""",
]
BAD_WINDOW_CODE_XML = [
# This XML block is missing the id attribute on the <Code> tag
"""
<Code>
<Label>202002</Label>
<Layers>
<GlazingTypes>
<English>Double/double with 1 coat</English>
<French>Double/double, 1 couche</French>
</GlazingTypes>
<CoatingsTints>
<English>Clear</English>
<French>Transparent</French>
</CoatingsTints>
<FillType>
<English>6 mm Air</English>
<French>6 mm d'air</French>
</FillType>
<SpacerType>
<English>Metal</English>
<French>Métal</French>
</SpacerType>
<Type>
<English>Picture</English>
<French>Fixe</French>
</Type>
<FrameMaterial>
<English>Wood</English>
<French>Bois</French>
</FrameMaterial>
</Layers>
</Code>
"""
]
@pytest.fixture
def wall_code() -> code.WallCode:
return code.WallCode(
identifier='Code 1',
label='1201101121',
tags={
code.WallCodeTag.STRUCTURE_TYPE: bilingual.Bilingual(
english='Wood frame',
french='Ossature de bois',
),
code.WallCodeTag.COMPONENT_TYPE_SIZE: bilingual.Bilingual(
english='38x89 mm (2x4 in)',
french='38x89 (2x4)',
)
},
)
@pytest.fixture
def raw_window_code() -> element.Element:
data = """
<Code id='Code 11'>
<Label>202002</Label>
<Layers>
<GlazingTypes>
<English>Double/double with 1 coat</English>
<French>Double/double, 1 couche</French>
</GlazingTypes>
<CoatingsTints>
<English>Clear</English>
<French>Transparent</French>
</CoatingsTints>
<FillType>
<English>6 mm Air</English>
<French>6 mm d'air</French>
</FillType>
<SpacerType>
<English>Metal</English>
<French>Métal</French>
</SpacerType>
<Type>
<English>Picture</English>
<French>Fixe</French>
</Type>
<FrameMaterial>
<English>Wood</English>
<French>Bois</French>
</FrameMaterial>
</Layers>
</Code>
"""
return element.Element.from_string(data)
@pytest.fixture
def window_code() -> code.WindowCode:
return code.WindowCode(
identifier='Code 11',
label='202002',
tags={
code.WindowCodeTag.GLAZING_TYPE: bilingual.Bilingual(
english='Double/double with 1 coat',
french='Double/double, 1 couche',
),
code.WindowCodeTag.COATING_TINTS: bilingual.Bilingual(english='Clear', french='Transparent'),
code.WindowCodeTag.FILL_TYPE: bilingual.Bilingual(english='6 mm Air', french="6 mm d'air"),
code.WindowCodeTag.SPACER_TYPE: bilingual.Bilingual(english='Metal', french='Métal'),
code.WindowCodeTag.CODE_TYPE: bilingual.Bilingual(english='Picture', french='Fixe'),
code.WindowCodeTag.FRAME_MATERIAL: bilingual.Bilingual(english='Wood', french='Bois'),
}
)
@pytest.fixture
def raw_codes(raw_wall_code: element.Element,
raw_window_code: element.Element) -> typing.Dict[str, typing.List[element.Element]]:
return {
'wall': [raw_wall_code],
'window': [raw_window_code],
}
def test_wall_code_from_data(raw_wall_code: element.Element, wall_code: code.WallCode) -> None:
output = code.WallCode.from_data(raw_wall_code)
assert output == wall_code
def test_window_code_from_data(raw_window_code: element.Element, window_code: code.WindowCode) -> None:
output = code.WindowCode.from_data(raw_window_code)
assert output == window_code
@pytest.mark.parametrize("bad_xml", BAD_WALL_CODE_XML)
def test_bad_wall_code(bad_xml: str) -> None:
code_node = element.Element.from_string(bad_xml)
with pytest.raises(InvalidEmbeddedDataTypeError) as excinfo:
code.WallCode.from_data(code_node)
assert excinfo.value.data_class == code.WallCode
@pytest.mark.parametrize("bad_xml", BAD_WINDOW_CODE_XML)
def test_bad_window_code(bad_xml: str) -> None:
code_node = element.Element.from_string(bad_xml)
with pytest.raises(InvalidEmbeddedDataTypeError) as excinfo:
code.WindowCode.from_data(code_node)
assert excinfo.value.data_class == code.WindowCode
def test_code_from_data(raw_wall_code: element.Element,
raw_window_code: element.Element,
wall_code: code.WallCode,
window_code: code.WindowCode) -> None:
output = code.Codes.from_data(
{'wall': [raw_wall_code], 'window': [raw_window_code]}
)
assert output == code.Codes(
wall={wall_code.identifier: wall_code},
window={window_code.identifier: window_code}
)
|
nilq/baby-python
|
python
|
__doc__ = """
样例: 传给topic_metadata函数的内容
args = {
'pattern' : "https://mirrors.tuna.tsinghua.edu.cn/help/%s",
'themes' :["AOSP", "AUR","CocoaPods"
, "anaconda","archlinux","archlinuxcn"
,"bananian","centos","chromiumos","cygwin"
,"docker","elpa","epel","fedora","git-repo"
,"gitlab-ce","gitlab-ci-multi-runner"
,"hackage","homebrew","homebrew-bottles"
,"linux-stable.git","linux.git","lxc-images"
,"mongodb","msys2","nodesource"
,"pybombs","pypi"
,"raspbian","repo-ck","repoforge","rpmfusion","rubygems"
,"tensorflow","termux","ubuntu","virtualbox","weave"],
'filter': {'method' : 'id', 'pattern' : 'help-content'}
}
生成任务的顺序是,首先读入任务列表,如果是HTML类型的任务,那么就转换HTML。
也就是说,关键还是我们需要告诉任务,何时我们得到任务类型的信息。
比如任务类型是HTML->Markdown这样的类型。
如果是这样的类型,我们就应该为这样的类型准备分析表达式。
更具体地,这样的类型还会与具体网站相绑定。也就是说,一个任务类型并不能决定处理函数
一个类型只能起到构造任务的作用。
另外,也说明任务一般是聚集起来出现的。以Python字典的形式出现,也就是以
最好元数据可以直接从YAML文件当中读取,这样的话就不用每个文件再单独写出来了。
"""
import re
## taskfunc,任务名,savefunc,保存名。
## 任务名其实是依赖于网站的。比如维基,我们希望以后缀.md结尾。
## 这个时候,要求具有定制网站保存名的能力
def to_metadata (args) :
"""
进一步整理mirror的元数据,从元数据当中添加构造类型数据
"""
taskfunc = lambda theme: re.sub(r'[=]', r'_', args['pattern'] % theme)
savefunc = lambda theme: re.sub(r'[=:/,\'\(\)]', r'_', theme) + ".md"
args['taskfunc'] = args.get('taskfunc', taskfunc)
args['savefunc'] = args.get('savefunc', savefunc)
args['savename'] = args['savename'] \
if 'savename' in args.keys() else 'download/'+savefunc(themes[0])
# 在mirror当中添加task_list属性,表示获取相应topic的任务名
args['task_list'] = [args['taskfunc'](theme)
for theme in args['themes']]
return args
def to_separate_metadata_list(args) :
"""
将数据变成是元数据的各个项目,每个项目作为单独的数据列表而出现。每次获取单独生成页面。
"""
result = []
taskfunc = lambda theme: re.sub(r'[=]', r'_', args['pattern'] % theme)
#savefunc = lambda theme: re.sub(r'[=]', r'_', theme) + '.md'
savefunc = lambda theme: re.sub(r'[=:/,\'\(\)]', r'_', theme) + ".md"
args['taskfunc'] = args.get('taskfunc', taskfunc)
args['savefunc'] = args.get('savefunc', savefunc)
for theme in args['themes'] :
result += [{'pattern' : args['pattern'],
'filter' : args['filter'] if 'filter' in args.keys() else None,
'themes' : [theme],
'taskfunc' : args['taskfunc'],
'savename' : args['savefunc'](theme)
}]
return result
|
nilq/baby-python
|
python
|
"""This module demonstrates basic Sphinx usage with Python modules.
Submodules
==========
.. autosummary::
:toctree: _autosummary
"""
VERSION = "0.0.1"
"""The version of this module."""
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# pyreverse -p contexts_basecontext_basecontext ../Lib/pagebot/contexts/basecontext/basecontext.py
# dot -Tpng classes_contexts_basecontext_basecontext.dot -o classes_contexts_basecontext_basecontext.png
import os
import subprocess
def getDirs(root):
return [d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))]
def getFiles(root):
return [(f, os.path.join(root, f)) for f in os.listdir(root) if os.path.isfile(os.path.join(root, f))]
def reverse(files):
for (f, p) in files:
if f.startswith('.'):
continue
else:
name = f.split('.')[0]
subprocess.call('pyreverse')# -p %s %s' % (name, p))
print(name, p)
break
if __name__ == "__main__":
root = '../Lib/pagebot'
files = getFiles(root)
reverse(files)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit test for downloading gzip-ed objects."""
import gzip
import json
import os
import re
import unittest
from testbench import rest_server
from tests.format_multipart_upload import format_multipart_upload_bytes
UPLOAD_QUANTUM = 256 * 1024
class TestTestbenchObjectGzip(unittest.TestCase):
def setUp(self):
rest_server.db.clear()
rest_server.server.config["PREFERRED_URL_SCHEME"] = "https"
rest_server.server.config["SERVER_NAME"] = "storage.googleapis.com"
rest_server.root.config["PREFERRED_URL_SCHEME"] = "https"
rest_server.root.config["SERVER_NAME"] = "storage.googleapis.com"
self.client = rest_server.server.test_client(allow_subdomain_redirects=True)
# Avoid magic buckets in the test
os.environ.pop("GOOGLE_CLOUD_CPP_STORAGE_TEST_BUCKET_NAME", None)
response = self.client.post(
"/storage/v1/b", data=json.dumps({"name": "bucket-name"})
)
self.assertEqual(response.status_code, 200)
def _insert_compressed_object(self, name):
media = "How vexingly quick daft zebras jump!"
compressed = gzip.compress(media.encode("utf-8"))
response = self.client.post(
"/upload/storage/v1/b/bucket-name/o",
query_string={
"name": name,
"uploadType": "media",
"contentEncoding": "gzip",
},
content_type="application/octet-stream",
data=compressed,
)
self.assertEqual(response.status_code, 200)
self.assertTrue(
response.headers.get("content-type").startswith("application/json")
)
insert_rest = json.loads(response.data)
self.assertEqual(insert_rest.get("kind"), "storage#object")
self.assertEqual(insert_rest.get("contentEncoding", ""), "gzip")
return media
def test_download_gzip_data_simple_upload(self):
media = self._insert_compressed_object("zebra")
response = self.client.get(
"/download/storage/v1/b/bucket-name/o/zebra", query_string={"alt": "media"}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data.decode("utf-8"), media)
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"gunzipped",
)
def test_download_gzip_compressed(self):
media = self._insert_compressed_object("zebra")
response = self.client.get(
"/download/storage/v1/b/bucket-name/o/zebra",
query_string={"alt": "media"},
headers={"Accept-Encoding": "gzip"},
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, gzip.compress(media.encode("utf-8")))
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""), ""
)
def test_download_gzip_range_ignored(self):
media = self._insert_compressed_object("zebra")
response = self.client.get(
"/download/storage/v1/b/bucket-name/o/zebra",
query_string={"alt": "media"},
headers={"Range": "4-8"},
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, media.encode("utf-8"))
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"gunzipped",
)
self.assertEqual(
response.headers.get("content-range", ""),
"bytes 0-%d/%d" % (len(media) - 1, len(media)),
)
def test_download_gzip_uncompressed_xml(self):
media = self._insert_compressed_object("zebra")
response = self.client.get(
"/zebra", base_url="https://bucket-name.storage.googleapis.com"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, media.encode("utf-8"))
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"gunzipped",
)
self.assertEqual(
response.headers.get("x-goog-stored-content-encoding", ""), "gzip"
)
def test_download_gzip_compressed_xml(self):
media = self._insert_compressed_object("zebra")
response = self.client.get(
"/zebra",
base_url="https://bucket-name.storage.googleapis.com",
headers={"Accept-Encoding": "gzip"},
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, gzip.compress(media.encode("utf-8")))
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"",
)
self.assertEqual(
response.headers.get("x-goog-stored-content-encoding", ""), "gzip"
)
def test_download_of_multipart_upload(self):
media = "How vexingly quick daft zebras jump!"
compressed = gzip.compress(media.encode("utf-8"))
boundary, payload = format_multipart_upload_bytes(
{"contentEncoding": "gzip"}, compressed
)
response = self.client.post(
"/upload/storage/v1/b/bucket-name/o",
query_string={"uploadType": "multipart", "name": "zebra"},
content_type="multipart/related; boundary=" + boundary,
data=payload,
)
self.assertEqual(response.status_code, 200)
self.assertTrue(
response.headers.get("content-type").startswith("application/json")
)
insert_rest = json.loads(response.data)
self.assertEqual(insert_rest.get("kind"), "storage#object")
self.assertEqual(insert_rest.get("contentEncoding", ""), "gzip")
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""), ""
)
response = self.client.get(
"/download/storage/v1/b/bucket-name/o/zebra", query_string={"alt": "media"}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data.decode("utf-8"), media)
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"gunzipped",
)
def test_download_of_resumable_upload(self):
media = "How vexingly quick daft zebras jump!"
compressed = gzip.compress(media.encode("utf-8"))
response = self.client.post(
"/upload/storage/v1/b/bucket-name/o",
query_string={"uploadType": "resumable", "name": "zebra"},
content_type="application/json",
data=json.dumps({"name": "zebra", "contentEncoding": "gzip"}),
)
self.assertEqual(response.status_code, 200)
location = response.headers.get("location")
self.assertIn("upload_id=", location)
match = re.search("[&?]upload_id=([^&]+)", location)
self.assertIsNotNone(match, msg=location)
upload_id = match.group(1)
finalized = self.client.put(
"/upload/storage/v1/b/bucket-name/o",
query_string={"upload_id": upload_id},
data=compressed,
)
self.assertEqual(finalized.status_code, 200)
self.assertTrue(
finalized.headers.get("content-type").startswith("application/json")
)
insert_rest = json.loads(finalized.data)
self.assertIn("metadata", insert_rest)
self.assertEqual(insert_rest.get("kind"), "storage#object")
self.assertEqual(insert_rest.get("contentEncoding", ""), "gzip")
response = self.client.get(
"/download/storage/v1/b/bucket-name/o/zebra", query_string={"alt": "media"}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data.decode("utf-8"), media)
self.assertEqual(
response.headers.get("x-guploader-response-body-transformations", ""),
"gunzipped",
)
if __name__ == "__main__":
unittest.main()
|
nilq/baby-python
|
python
|
# from django.contrib.oauth.models import User
from rest_framework import authentication
from rest_framework import exceptions
import logging
log = logging.getLogger(__name__)
import json, re
from django.core.cache import cache
from django.conf import settings
class TokenAuthentication(authentication.BaseAuthentication):
def authenticate(self, request):
path = request.path
log.debug('TokenAuthentication get path = {0}'.format(path))
#if path start with /rest/* pass it
parser = re.compile(r'^/rest/(\w+/)+.*')
match_group = parser.match(path)
if match_group:
return None
token = request.META.get('HTTP_AUTHORIZATION')
log.debug('TokenAuthentication get token = {0}'.format(token))
if not token:
raise exceptions.AuthenticationFailed('Authorization Token required')
else:
# lookup redis to see if token exists
# if not, return exception, else return user
value = cache.get(token)
if value == None:
# user = None
raise exceptions.AuthenticationFailed('Invalid token in redis')
else:
user = json.loads(value)
cache.set(token, json.dumps(user), settings.REDIS_TOKEN_TIMEOUT_SEC)
return (user, None)
|
nilq/baby-python
|
python
|
# Input: a list of "documents" at least containing: "sentences"
# Output: a list of "documents" at least containing: "text"
from .simplifier import Simplifier
class SimplifierByKGen:
def __init__(self, parameters):
# some prepartion
# no parameter is needed
print("Info: Simplifier By KGen has been initialized")
def execute(self, input):
if not 'documents' in input:
print("ERROR: documents is missing in the input for SimplifierByKGen")
return input
for document in input['documents']:
if not 'sentences' in document:
print("ERROR: sentences is missing in a document in documents for SimplifierByKGen")
return input
if not 'originalSentences' in document:
document['originalSentences'] = document['sentences']
document['sentences'] = Simplifier(document['sentences']).simplify()
return input
#def refineDocuments(self):
|
nilq/baby-python
|
python
|
from django.conf import settings
from django.core.files.storage import get_storage_class
from storages.backends.s3boto3 import S3Boto3Storage
# if settings.DEBUG:
# PublicMediaStorage = get_storage_class()
# PrivateMediaStorage = get_storage_class()
# else:
from config.settings import dev
class PublicMediaStorage(S3Boto3Storage):
location = dev.AWS_PUBLIC_MEDIA_LOCATION
file_overwrite = False
class PrivateMediaStorage(S3Boto3Storage):
location = dev.AWS_PRIVATE_MEDIA_LOCATION
file_overwrite = False
default_acl = "private"
custom_domain = False
|
nilq/baby-python
|
python
|
#########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import tempfile
import time
import threading
import ssl
import sys
import os
import copy
from contextlib import contextmanager
import celery
import cloudify.manager
from cloudify import ctx
from cloudify.exceptions import NonRecoverableError
from cloudify.utils import ManagerVersion
from cloudify.utils import get_manager_file_server_url
from cloudify.decorators import operation
from cloudify_agent.api.plugins.installer import PluginInstaller
from cloudify_agent.api.factory import DaemonFactory
from cloudify_agent.api import defaults
from cloudify_agent.api import exceptions
from cloudify_agent.api import utils
from cloudify_agent.app import app
from cloudify_agent.installer.config import configuration
@operation
def install_plugins(plugins, **_):
installer = PluginInstaller(logger=ctx.logger)
for plugin in plugins:
ctx.logger.info('Installing plugin: {0}'.format(plugin['name']))
try:
installer.install(plugin=plugin,
deployment_id=ctx.deployment.id,
blueprint_id=ctx.blueprint.id)
except exceptions.PluginInstallationError as e:
# preserve traceback
tpe, value, tb = sys.exc_info()
raise NonRecoverableError, NonRecoverableError(str(e)), tb
@operation
def uninstall_plugins(plugins, **_):
installer = PluginInstaller(logger=ctx.logger)
for plugin in plugins:
ctx.logger.info('Uninstalling plugin: {0}'.format(plugin['name']))
if plugin.get('wagon'):
installer.uninstall_wagon(
package_name=plugin['package_name'],
package_version=plugin['package_version'])
else:
installer.uninstall(plugin=plugin,
deployment_id=ctx.deployment.id)
@operation
def restart(new_name=None, delay_period=5, **_):
cloudify_agent = ctx.instance.runtime_properties['cloudify_agent']
if new_name is None:
new_name = utils.internal.generate_new_agent_name(
cloudify_agent.get('name', 'agent'))
# update agent name in runtime properties so that the workflow will
# what the name of the worker handling tasks to this instance.
# the update cannot be done by setting a nested property directly
# because they are not recognized as 'dirty'
cloudify_agent['name'] = new_name
ctx.instance.runtime_properties['cloudify_agent'] = cloudify_agent
# must update instance here because the process may shutdown before
# the decorator has a chance to do it.
ctx.instance.update()
daemon = _load_daemon(logger=ctx.logger)
# make the current master stop listening to the current queue
# to avoid a situation where we have two masters listening on the
# same queue.
app.control.cancel_consumer(
queue=daemon.queue,
destination=['celery@{0}'.format(daemon.name)]
)
# clone the current daemon to preserve all the attributes
attributes = utils.internal.daemon_to_dict(daemon)
# give the new daemon the new name
attributes['name'] = new_name
# remove the log file and pid file so that new ones will be created
# for the new agent
del attributes['log_file']
del attributes['pid_file']
# Get the broker credentials for the daemon
attributes.update(ctx.bootstrap_context.broker_config())
new_daemon = DaemonFactory().new(logger=ctx.logger, **attributes)
# create the new daemon
new_daemon.create()
_save_daemon(new_daemon)
# configure the new daemon
new_daemon.configure()
new_daemon.start()
# start a thread that will kill the current master.
# this is done in a thread so that the current task will not result in
# a failure
thread = threading.Thread(target=shutdown_current_master,
args=[delay_period, ctx.logger])
thread.daemon = True
thread.start()
@operation
def stop(delay_period=5, **_):
thread = threading.Thread(target=shutdown_current_master,
args=[delay_period, ctx.logger])
thread.daemon = True
thread.start()
def shutdown_current_master(delay_period, logger):
if delay_period > 0:
time.sleep(delay_period)
daemon = _load_daemon(logger=logger)
daemon.before_self_stop()
daemon.stop()
def _load_daemon(logger):
factory = DaemonFactory(
username=utils.internal.get_daemon_user(),
storage=utils.internal.get_daemon_storage_dir())
return factory.load(utils.internal.get_daemon_name(), logger=logger)
def _save_daemon(daemon):
factory = DaemonFactory(
username=utils.internal.get_daemon_user(),
storage=utils.internal.get_daemon_storage_dir())
factory.save(daemon)
def create_new_agent_dict(old_agent):
new_agent = {}
new_agent['name'] = utils.internal.generate_new_agent_name(
old_agent['name'])
new_agent['remote_execution'] = True
# TODO: broker_ip should be handled as part of fixing agent migration
fields_to_copy = ['windows', 'ip', 'basedir', 'user', 'broker_ip']
for field in fields_to_copy:
if field in old_agent:
new_agent[field] = old_agent[field]
configuration.reinstallation_attributes(new_agent)
new_agent['manager_file_server_url'] = get_manager_file_server_url()
new_agent['old_agent_version'] = old_agent['version']
return new_agent
@contextmanager
def _celery_client(ctx, agent):
# We retrieve broker url from old agent in order to support
# cases when old agent is not connected to current rabbit server.
if 'broker_config' in agent:
broker_config = agent['broker_config']
else:
broker_config = ctx.bootstrap_context.broker_config()
broker_url = utils.internal.get_broker_url(broker_config)
ctx.logger.info('Connecting to {0}'.format(broker_url))
celery_client = celery.Celery()
# We can't pass broker_url to Celery constructor because it would
# be overriden by the value from broker_config.py.
config = {
'BROKER_URL': broker_url,
'CELERY_RESULT_BACKEND': broker_url
}
if not ManagerVersion(agent['version']).equals(ManagerVersion('3.2')):
config['CELERY_TASK_RESULT_EXPIRES'] = \
defaults.CELERY_TASK_RESULT_EXPIRES
fd, cert_path = tempfile.mkstemp()
os.close(fd)
try:
if broker_config.get('broker_ssl_enabled'):
with open(cert_path, 'w') as cert_file:
cert_file.write(broker_config.get('broker_ssl_cert', ''))
broker_ssl = {
'ca_certs': cert_path,
'cert_reqs': ssl.CERT_REQUIRED
}
else:
broker_ssl = False
config['BROKER_USE_SSL'] = broker_ssl
celery_client.conf.update(**config)
yield celery_client
finally:
os.remove(cert_path)
def _celery_task_name(version):
if not version or ManagerVersion(version).greater_than(
ManagerVersion('3.3.1')):
return 'cloudify.dispatch.dispatch'
else:
return 'script_runner.tasks.run'
def _assert_agent_alive(name, celery_client, version=None):
tasks = utils.get_agent_registered(name, celery_client)
if not tasks:
raise NonRecoverableError(
'Could not access tasks list for agent {0}'.format(name))
task_name = _celery_task_name(version)
if task_name not in tasks:
raise NonRecoverableError('Task {0} is not available in agent {1}'.
format(task_name, name))
def _get_manager_version():
version_json = cloudify.manager.get_rest_client().manager.get_version()
return ManagerVersion(version_json['version'])
def _run_install_script(old_agent, timeout, validate_only=False,
install_script=None):
# Assuming that if there is no version info in the agent then
# this agent was installed by current manager.
old_agent = copy.deepcopy(old_agent)
if 'version' not in old_agent:
old_agent['version'] = str(_get_manager_version())
new_agent = create_new_agent_dict(old_agent)
old_agent_version = new_agent['old_agent_version']
with _celery_client(ctx, old_agent) as celery_client:
old_agent_name = old_agent['name']
_assert_agent_alive(old_agent_name, celery_client, old_agent_version)
if install_script is None:
script_format = '{0}/cloudify/install_agent.py'
install_script = script_format.format(
get_manager_file_server_url())
script_runner_task = 'script_runner.tasks.run'
cloudify_context = {
'type': 'operation',
'task_name': script_runner_task,
'task_target': old_agent['queue']
}
kwargs = {'script_path': install_script,
'cloudify_agent': new_agent,
'validate_only': validate_only,
'__cloudify_context': cloudify_context}
task = _celery_task_name(old_agent_version)
result = celery_client.send_task(
task,
kwargs=kwargs,
queue=old_agent['queue']
)
returned_agent = result.get(timeout=timeout)
if returned_agent['name'] != new_agent['name']:
raise NonRecoverableError(
'Expected agent name {0}, received {1}'.format(
new_agent['name'], returned_agent['name'])
)
returned_agent.pop('old_agent_version', None)
return {
'old': old_agent,
'new': returned_agent
}
def create_agent_from_old_agent(operation_timeout=300, install_script=None):
if 'cloudify_agent' not in ctx.instance.runtime_properties:
raise NonRecoverableError(
'cloudify_agent key not available in runtime_properties')
if 'agent_status' not in ctx.instance.runtime_properties:
raise NonRecoverableError(
('agent_status key not available in runtime_properties, '
'validation needs to be performed before new agent installation'))
status = ctx.instance.runtime_properties['agent_status']
if not status['agent_alive_crossbroker']:
raise NonRecoverableError(
('Last validation attempt has shown that agent is dead. '
'Rerun validation.'))
old_agent = ctx.instance.runtime_properties['cloudify_agent']
agents = _run_install_script(old_agent,
operation_timeout,
validate_only=False,
install_script=install_script)
# Make sure that new celery agent was started:
returned_agent = agents['new']
ctx.logger.info('Installed agent {0}'.format(returned_agent['name']))
_assert_agent_alive(returned_agent['name'], app)
# Setting old_cloudify_agent in order to uninstall it later.
ctx.instance.runtime_properties['old_cloudify_agent'] = agents['old']
ctx.instance.runtime_properties['cloudify_agent'] = returned_agent
@operation
def create_agent_amqp(install_agent_timeout, install_script=None, **_):
create_agent_from_old_agent(install_agent_timeout,
install_script=install_script)
@operation
def validate_agent_amqp(validate_agent_timeout, fail_on_agent_dead=False,
fail_on_agent_not_installable=False,
install_script=None, **_):
if 'cloudify_agent' not in ctx.instance.runtime_properties:
raise NonRecoverableError(
'cloudify_agent key not available in runtime_properties')
agent = ctx.instance.runtime_properties['cloudify_agent']
agent_name = agent['name']
result = {}
ctx.logger.info(('Checking if agent can be accessed through '
'current rabbitmq'))
try:
_assert_agent_alive(agent_name, app)
except Exception as e:
result['agent_alive'] = False
result['agent_alive_error'] = str(e)
ctx.logger.info('Agent unavailable, reason {0}'.format(str(e)))
else:
result['agent_alive'] = True
ctx.logger.info(('Checking if agent can be accessed through '
'different rabbitmq'))
try:
_run_install_script(agent, validate_agent_timeout, validate_only=True,
install_script=install_script)
except Exception as e:
result['agent_alive_crossbroker'] = False
result['agent_alive_crossbroker_error'] = str(e)
ctx.logger.info('Agent unavailable, reason {0}'.format(str(e)))
else:
result['agent_alive_crossbroker'] = True
result['timestamp'] = time.time()
ctx.instance.runtime_properties['agent_status'] = result
if fail_on_agent_dead and not result['agent_alive']:
raise NonRecoverableError(result['agent_alive_error'])
if fail_on_agent_not_installable and not result[
'agent_alive_crossbroker']:
raise NonRecoverableError(result['agent_alive_crossbroker_error'])
|
nilq/baby-python
|
python
|
"""
Represents a square stop.
"""
from BeamlineComponents.Stop.StopRectangle import StopRectangle
class StopSquare(StopRectangle):
def __init__(self, side_length):
StopRectangle.__init__(self, side_length, side_length)
def sideLength(self):
return self.lengthVertical()
|
nilq/baby-python
|
python
|
import urllib, json
from jwcrypto import jwt, jwk
class OpenIDTokenValidator:
def __init__(self, config_url, audience):
"""
Retrieve auth server config and set up the validator
:param config_url: the discovery URI
:param audience: client ID to verify against
"""
# Fetch configuration
self.config = json.loads(OpenIDTokenValidator.__fetch_content__(config_url))
self.config['audience'] = audience
# Fetch signing key/certificate
jwk_response = OpenIDTokenValidator.__fetch_content__(self.config['jwks_uri'])
self.jwk_keyset = jwk.JWKSet.from_json(jwk_response)
@staticmethod
def __fetch_content__(url):
response = urllib.urlopen(url)
return response.read()
def __verify_claim__(self, decoded_token_json):
if decoded_token_json['iss'] != self.config['issuer']:
raise Exception('Invalid Issuer')
if decoded_token_json['aud'] != self.config['audience']:
raise Exception('Invalid Audience')
def verify_and_decode_token(self, token):
"""
Verify the token with the provided JWK certificate and claims
:param token: the token to verify
:return: the decoded ID token body
"""
decoded_token = jwt.JWT(key=self.jwk_keyset, jwt=token)
decoded_json = json.loads(decoded_token.claims)
self.__verify_claim__(decoded_json)
return decoded_json
|
nilq/baby-python
|
python
|
"""
Tests for attention module
"""
import numpy as np
import theano
import theano.tensor as T
import agentnet
from agentnet.memory import GRUCell
from agentnet.memory.attention import AttentionLayer
from lasagne.layers import *
def test_attention():
"""
minimalstic test that showcases attentive RNN that reads some chunk
of input sequence on each tick and outputs nothing
"""
# step inner graph
class step:
enc_activations = InputLayer((None, None, 12), name='placeholder for encoder activations (to be attended)')
prev_gru = InputLayer((None, 15),name='gru prev state (15 units)')
attention = AttentionLayer(enc_activations,prev_gru,num_units=16)
gru = GRUCell(prev_gru, attention['attn'] , name='rnn that reads enc_sequence with attention')
attn_probs = attention['probs'] #weights from inside attention
# outer graph
encoder_activations = InputLayer((None,None,12),name='encoder sequence (will be sent to enc_sequence)')
rec = agentnet.Recurrence(input_nonsequences={step.enc_activations: encoder_activations},
state_variables={step.gru: step.prev_gru},
tracked_outputs=[step.attn_probs],
unroll_scan=False,
n_steps = 10)
weights = get_all_params(rec)
gru_states,attention_probs_seq = rec[step.gru,step.attn_probs]
run = theano.function([encoder_activations.input_var], get_output([gru_states,attention_probs_seq]),
updates=rec.get_automatic_updates(),allow_input_downcast=True)
#run on surrogate data
gru_seq,probs_seq = run(np.random.randn(5, 25, 12))
assert gru_seq.shape == (5, 10, 15) #hidden GRU strates, 5 samples/10ticks/15units
assert probs_seq.shape == (5, 10, 25) #attention sequences, 5 samples/10ticks/25 input seq length
#hard attention
hard_outputs = get_output([gru_states,attention_probs_seq],recurrence_flags={'hard_attention':True})
hard_run = theano.function([encoder_activations.input_var], hard_outputs,
updates=rec.get_automatic_updates(),allow_input_downcast=True)
#run on surrogate data
_,hard_probs_seq = hard_run(np.random.randn(5, 25, 12))
#check if probs are one-hot
assert hard_probs_seq.shape == (5, 10, 25) #attention sequences, 5 samples/10ticks/25 input seq length
assert len(np.unique(hard_probs_seq.ravel()))==2 #only 0's and 1's
def test_attention_2d():
"""
Almost a copy-paste of previous test, but this time attention is applied to an image instead
of a 1d sequence.
"""
# step inner graph
class step:
image = InputLayer((None,3,24,24), name='placeholder for 24x24 image (to be attended)')
prev_gru = InputLayer((None, 15),name='gru prev state (15 units)')
#get image dimensions
n_channels,width,height = image.output_shape[1:]
#flatten all image spots to look like 1d sequence
image_chunks = reshape(dimshuffle(image,[0,2,3,1]),(-1,width*height,n_channels))
attention = AttentionLayer(image_chunks,prev_gru,num_units=16)
gru = GRUCell(prev_gru, attention['attn'] , name='rnn that reads enc_sequence with attention')
#weights from inside attention - reshape back into image
attn_probs = reshape(attention['probs'],(-1,width,height))
# outer graph
input_image = InputLayer((None,3,24,24),name='24x24-pixel RGB image to be sent into step.image')
rec = agentnet.Recurrence(input_nonsequences={step.image: input_image},
state_variables={step.gru: step.prev_gru},
tracked_outputs=[step.attn_probs],
unroll_scan=False,
n_steps = 10)
weights = get_all_params(rec)
gru_states,attention_probs_seq = rec[step.gru,step.attn_probs]
run = theano.function([input_image.input_var], get_output([gru_states,attention_probs_seq]),
updates=rec.get_automatic_updates(),allow_input_downcast=True)
#run on surrogate data
gru_seq,probs_seq = run(np.random.randn(5, 3, 24,24))
assert gru_seq.shape == (5, 10, 15) #hidden GRU strates, 5 samples/10ticks/15units
assert probs_seq.shape == (5, 10, 24,24) #attention sequences, 5 samples/10ticks/24width/24height
|
nilq/baby-python
|
python
|
# coding: utf-8
# $Id: $
from celery import Celery
from celery.utils.log import get_task_logger, get_logger
CELERY_CONFIG = {
'BROKER_URL': 'amqp://guest@localhost/',
'CELERY_RESULT_BACKEND': "redis://localhost/0",
'CELERY_TASK_SERIALIZER': "pickle",
'CELERY_RESULT_SERIALIZER': "pickle",
'CELERYD_LOG_FORMAT': '[%(asctime)s] %(levelname)s: %(message)s',
'CELERYD_TASK_LOG_FORMAT': '[%(asctime)s] %(levelname)s <%(sid)s> %(task_name)s: %(message)s',
}
from logcollect.boot import celery_config
celery_config('amqp://guest:guest@127.0.0.1/',
collect_root_logs=True,
activity_identity={'project': 'logcollect',
'subsystem': 'celery_test'})
celery = Celery(CELERY_CONFIG)
@celery.task
def sample_task(msg='CELERY'):
get_task_logger("sample_task").info("get task logger message")
get_logger("celery_sample_logger").info("get logger message")
|
nilq/baby-python
|
python
|
from pyparsing import *
import act
topnum = Forward().setParseAction(act.topnum)
attacking = Forward().setParseAction(act.attacking)
blocking = Forward().setParseAction(act.blocking)
tapped = Forward().setParseAction(act.tapped)
untapped = Forward().setParseAction(act.untapped)
enchanted = Forward().setParseAction(act.enchanted)
equipped = Forward().setParseAction(act.equipped)
exiled = Forward().setParseAction(act.exiled)
sacrificed = Forward().setParseAction(act.sacrificed)
haunted = Forward().setParseAction(act.haunted)
adjective = Forward()
andadjectives = Forward()
oradjectives = Forward().setParseAction(act.oradjectives)
adjectives = Forward().setParseAction(act.adjectives)
|
nilq/baby-python
|
python
|
import functools
import numpy as np
import pytest
from ansys import dpf
from ansys.dpf.core import examples
from ansys.dpf.core import misc
NO_PLOTTING = True
if misc.module_exists("pyvista"):
from pyvista.plotting import system_supports_plotting
NO_PLOTTING = not system_supports_plotting()
@pytest.fixture()
def static_model():
return dpf.core.Model(dpf.core.upload_file_in_tmp_folder(examples.static_rst))
def test_model_from_data_source(simple_bar):
data_source = dpf.core.DataSources(simple_bar)
model = dpf.core.Model(data_source)
assert "displacement" in model.metadata.result_info
def test_model_metadata_from_data_source(simple_bar):
data_source = dpf.core.DataSources(simple_bar)
model = dpf.core.Model(data_source)
assert model.metadata.result_info is not None
assert model.metadata.time_freq_support is not None
assert model.metadata.meshed_region is not None
assert model.metadata.data_sources is not None
def test_displacements_eval(static_model):
disp = static_model.results.displacement()
fc = disp.outputs.fields_container()
disp_field_from_eval = fc[0]
fc_from_outputs = disp.outputs.fields_container()[0]
assert np.allclose(disp_field_from_eval.data, fc_from_outputs.data)
def test_extract_component(static_model):
disp = static_model.results.displacement()
disp = disp.X()
disp_field = disp.outputs.fields_container()[0]
assert isinstance(disp_field.data, np.ndarray)
def test_kinetic(static_model):
e = static_model.results.kinetic_energy()
energy = e.outputs.fields_container()[0]
assert isinstance(energy.data, np.ndarray)
def test_str_model(static_model):
assert "Static" in str(static_model)
assert "81" in str(static_model)
assert "Unit: m" in str(static_model)
def test_connect_inputs_in_constructor_model(plate_msup):
model = dpf.core.Model(plate_msup)
u = model.results.displacement(0.015)
fc = u.outputs.fields_container()
assert len(fc) == 1
assert np.allclose(fc[0].data[0], [5.12304110e-14, 3.64308310e-04, 5.79805917e-06])
scop = dpf.core.Scoping()
scop.ids = list(range(1, 21))
u = model.results.displacement(0.015, scop)
fc = u.outputs.fields_container()
assert len(fc) == 1
assert np.allclose(fc[0].data[0], [9.66814331e-16, 6.82591973e-06, 1.35911110e-06])
assert fc[0].shape == (20, 3)
def test_named_selection_model(allkindofcomplexity):
model = dpf.core.Model(allkindofcomplexity)
ns = model.metadata.available_named_selections
assert ns == [
"_CM82",
"_CM86UX_XP",
"_DISPNONZEROUX",
"_DISPZEROUZ",
"_ELMISC",
"_FIXEDSU",
]
scop = model.metadata.named_selection("_CM86UX_XP")
assert len(scop) == 481
assert scop.location == dpf.core.locations().nodal
def test_all_result_operators_exist(allkindofcomplexity):
model = dpf.core.Model(allkindofcomplexity)
res = model.results
for key in res.__dict__:
if isinstance(res.__dict__[key], functools.partial):
res.__dict__[key]()
def test_iterate_results_model(allkindofcomplexity):
model = dpf.core.Model(allkindofcomplexity)
res = model.results
for key in res:
key()
def test_result_not_overrided(plate_msup):
model1 = dpf.core.Model(examples.electric_therm)
size = len(model1.results)
model2 = dpf.core.Model(plate_msup)
assert len(model1.results) == size
assert len(model2.results) > len(model1.results)
def test_result_displacement_model():
model = dpf.core.Model(examples.download_all_kinds_of_complexity_modal())
results = model.results
assert isinstance(results.displacement(), dpf.core.Operator)
assert len(results.displacement.on_all_time_freqs.eval()) == 45
assert results.displacement.on_first_time_freq.eval().get_label_scoping().ids == [1]
assert results.displacement.on_last_time_freq.eval().get_label_scoping().ids == [45]
assert len(results.displacement.split_by_body.eval()) == 32
assert len(results.displacement.split_by_shape.eval()) == 4
assert (
len(results.displacement.on_named_selection("_FIXEDSU").eval()[0].scoping)
== 222
)
all_time_ns = results.displacement.on_named_selection(
"_FIXEDSU"
).on_all_time_freqs.eval()
assert len(all_time_ns) == 45
assert len(all_time_ns[0].scoping) == 222
assert len(all_time_ns[19].scoping) == 222
def test_result_stress_model():
model = dpf.core.Model(examples.download_all_kinds_of_complexity_modal())
results = model.results
assert isinstance(results.stress(), dpf.core.Operator)
assert len(results.stress.on_all_time_freqs.eval()) == 45
assert results.stress.on_first_time_freq.eval().get_label_scoping().ids == [1]
assert results.stress.on_last_time_freq.eval().get_label_scoping().ids == [45]
assert len(results.stress.split_by_body.eval()) == 32
assert len(results.stress.split_by_shape.eval()) == 4
assert len(results.stress.on_named_selection("_FIXEDSU").eval()[0].scoping) == 222
all_time_ns = results.stress.on_named_selection("_FIXEDSU").on_all_time_freqs.eval()
assert len(all_time_ns) == 45
assert len(all_time_ns[0].scoping) == 222
assert len(all_time_ns[19].scoping) == 222
def test_result_no_memory(plate_msup):
model = dpf.core.Model(plate_msup)
assert len(model.results.elastic_strain.on_all_time_freqs.eval()) == 20
assert len(model.results.elastic_strain.eval()) == 1
def test_result_stress_location_model(plate_msup):
model = dpf.core.Model(plate_msup)
stress = model.results.stress
fc = (
stress.on_mesh_scoping(
dpf.core.Scoping(ids=[1, 2], location=dpf.core.locations.elemental)
)
.on_location(dpf.core.locations.nodal)
.eval()
)
assert fc[0].location == "Nodal"
def test_result_time_scoping(plate_msup):
model = dpf.core.Model(plate_msup)
stress = model.results.stress
fc = stress.on_time_scoping([1, 2, 3, 19]).eval()
assert len(fc) == 4
fc = stress.on_time_scoping([0.115, 0.125]).eval()
assert len(fc) == 2
assert np.allclose(
fc.time_freq_support.time_frequencies.data, np.array([0.115, 0.125])
)
def test_result_splitted_subset(allkindofcomplexity):
model = dpf.core.Model(allkindofcomplexity)
vol = model.results.elemental_volume
assert len(vol.split_by_body.eval()) == 11
assert len(vol.split_by_body.eval()[0].scoping) == 105
assert len(vol.on_mesh_scoping([1, 2, 3, 10992]).split_by_body.eval()) == 2
assert len(vol.eval()[0].scoping) == 3
assert len(vol.eval()[1].scoping) == 1
def test_result_not_dynamic(plate_msup):
dpf.core.settings.set_dynamic_available_results_capability(False)
model = dpf.core.Model(plate_msup)
assert isinstance(model.results, dpf.core.results.CommonResults)
stress = model.results.stress
fc = stress.on_time_scoping([1, 2, 3, 19]).eval()
assert len(fc) == 4
fc = stress.on_time_scoping([0.115, 0.125]).eval()
assert len(fc) == 2
assert np.allclose(
fc.time_freq_support.time_frequencies.data, np.array([0.115, 0.125])
)
assert fc[0].unit == "Pa"
dis = model.results.displacement().eval()
dpf.core.settings.set_dynamic_available_results_capability(True)
# @pytest.mark.skipif(NO_PLOTTING, reason="Requires system to support plotting")
# def test_displacements_plot(static_model):
# from pyvista import CameraPosition
# disp = static_model.results.displacement()
# cpos = disp.outputs.fields_container()[0].plot('x')
# assert isinstance(cpos, CameraPosition)
|
nilq/baby-python
|
python
|
import os.path as osp
from .builder import DATASETS
from .custom import CustomDataset
@DATASETS.register_module()
class Combine(CustomDataset):
"""PascalContext dataset.
In segmentation map annotation for PascalContext, 0 stands for background,
which is included in 60 categories. ``reduce_zero_label`` is fixed to
False. The ``img_suffix`` is fixed to '.jpg' and ``seg_map_suffix`` is
fixed to '.png'.
Args:
split (str): Split txt file for PascalContext.
"""
CLASSES = ('other','water')
PALETTE = [[0, 0, 0], [128, 0, 0]]
def __init__(self, split, **kwargs):
super(Combine, self).__init__(
split=split,
reduce_zero_label=False,
#att_metrics = ['PRE','REC','F-measure','F-max','FPR','FNR'],
#att_metrics=['Grmse','Gmax'], ##训练不能价att_metrics因为pre_eval_to_metrics(results, metric)
**kwargs)
assert osp.exists(self.img_dir) and self.split is not None
|
nilq/baby-python
|
python
|
<CustButton@Button>:
font_size: 32
<SudGridLayout>:
id = sudoku
cols: 9
rows: 9
spacing = 10
BoxLayout:
spacing = 10
CustButton:
text: = "7"
CustButton:
text: = "8"
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# Returns a list of datetimes ranging from yesterday's
# date back to 2014-03-30 or if passed a first argument
# back to the first argument
import sys
import datetime
yesterday = (datetime.datetime.today() - datetime.timedelta(days=1))
opening_date = datetime.datetime(2014, 03, 30)
if len(sys.argv) > 1:
opening_date = datetime.datetime.strptime(sys.argv[1], "%Y-%m-%d")
days = (yesterday - opening_date).days
for x in range(0, days+1):
print (yesterday - datetime.timedelta(days=x)).strftime("%Y-%m-%d")
|
nilq/baby-python
|
python
|
from patchify import patchify, unpatchify
from matplotlib import image as mpimg
from matplotlib import pyplot as plt
import cv2 as cv
from PIL import Image
import numpy as np
from patchfly import patchfly, unpatchfly
import os
# ----------------------
# get a image from internet
# ----------------------
# url = "https://gimg2.baidu.com/image_search/src=http%3A%2F%2Fwww.petsid.us%2Fwp-content%2Fuploads%2F2018%2F07%2FCats-Health-The-Dos-And-Donts-For-Cat-owners.jpg&refer=http%3A%2F%2Fwww.petsid.us&app=2002&size=f9999,10000&q=a80&n=0&g=0n&fmt=jpeg?sec=1642598122&t=976acf48cb6e5dc77b17048b24efdaa8"
# def request_download(IMAGE_URL):
# import requests
# r = requests.get(IMAGE_URL)
# with open('./data/img.png', 'wb') as f:
# f.write(r.content)
# request_download(url)
# ----------------------
# My patchfly
# ----------------------
img = Image.open(r"/mnt/4t/ljt/project/patchfly/data/img.png")
img_copy = img.copy()
img_array = np.array(img_copy)
img_patches = patchfly(img_array, (256, 256, 3))
print(img_patches.shape)
recon = unpatchfly(img_patches, img_array.shape)
# def main():
# os.makedirs("/mnt/4t/ljt/project/patchfly/data/patch", exist_ok=True)
# img = Image.open(r"/mnt/4t/ljt/project/patchfly/data/img.png")
# img_copy = img.copy()
# img_array = np.array(img_copy)
# img_patches = patchfly(img_array, (555, 555, 3))
# for i in range(img_patches.shape[0]):
# for j in range(img_patches.shape[1]):
# print(i, j)
# print(img_patches[i][j][0].shape)
# plt.imsave("/mnt/4t/ljt/project/patchfly/data/patch/{}_{}.png".format(i, j), img_patches[i][j][0])
# recon = unpatchfly(img_patches=img_patches, img_size=img_array.shape)
# plt.imsave("recon.jpg", recon)
# print(recon.shape)
# if __name__ == '__main__':
# main()
# from patchify import patchify, unpatchify
# from matplotlib import image as mpimg
# from matplotlib import pyplot as plt
# import cv2 as cv
# from PIL import Image
# import numpy as np
# from patchfly import patchfly
# # ----------------------
# # get a image from internet
# # ----------------------
# # url = "https://gimg2.baidu.com/image_search/src=http%3A%2F%2Fwww.petsid.us%2Fwp-content%2Fuploads%2F2018%2F07%2FCats-Health-The-Dos-And-Donts-For-Cat-owners.jpg&refer=http%3A%2F%2Fwww.petsid.us&app=2002&size=f9999,10000&q=a80&n=0&g=0n&fmt=jpeg?sec=1642598122&t=976acf48cb6e5dc77b17048b24efdaa8"
# # def request_download(IMAGE_URL):
# # import requests
# # r = requests.get(IMAGE_URL)
# # with open('./data/img.png', 'wb') as f:
# # f.write(r.content)
# # request_download(url)
# # ----------------------
# # My patchfly
# # ----------------------
# img = Image.open(r"/mnt/4t/ljt/project/patchfly/data/img.png")
# img_copy = img.copy()
# img_array = np.array(img_copy)
# img_patches = patchfly(img_array, (256, 256, 3))
|
nilq/baby-python
|
python
|
from unittest import TestCase
from moff.parser import Parser
from moff.node import VideoNode, SourceNode, ParagraphNode, LinkNode, TextNode
class TestReadVideo (TestCase):
def test_parse1(self):
parser = Parser()
node1 = parser.parse_string("@video example.mp4")
node2 = VideoNode(
src="example.mp4",
preload="none",
controls=True,
nodes=[
ParagraphNode(nodes=[
TextNode(
"Your browser has not supported playing video with HTML5."),
TextNode("You can download video from "),
LinkNode(
href="example.mp4",
target="_blank",
nodes=[
TextNode("here")
]),
TextNode(".")
])
])
self.assertEqual(str(node1), str(node2))
def test_parse2(self):
parser = Parser()
node1 = parser.parse_string(
"@video example.mp4\n@video @thumbnail thumbnail.jpg")
node2 = VideoNode(
src="example.mp4",
poster="thumbnail.jpg",
preload="none",
controls=True,
nodes=[
ParagraphNode(nodes=[
TextNode(
"Your browser has not supported playing video with HTML5."),
TextNode("You can download video from "),
LinkNode(
href="example.mp4",
target="_blank",
nodes=[
TextNode("here")
]),
TextNode(".")
])
])
self.assertEqual(str(node1), str(node2))
def test_parse3(self):
parser = Parser()
node1 = parser.parse_string(
"@video example.mp4\n@video @src example.mp4\n@video @src example.webm video/webm")
node2 = VideoNode(
preload="none",
controls=True,
nodes=[
SourceNode(
src="example.mp4",
type="video/mp4"),
SourceNode(
src="example.mp4",
type="video/mp4"),
SourceNode(
src="example.webm",
type="video/webm"),
ParagraphNode(nodes=[
TextNode(
"Your browser has not supported playing video with HTML5."),
TextNode("You can download video from "),
LinkNode(
href="example.mp4",
target="_blank",
nodes=[
TextNode("here")
]),
TextNode(".")
])
])
self.assertEqual(str(node1), str(node2))
|
nilq/baby-python
|
python
|
#!/usr/bin/python
import json
import sys
from datetime import datetime
from pprint import pprint
def dateconv(d):
return datetime.strptime(d, "%Y-%m-%dT%H:%M:%S.%fZ").strftime("%Y-%m-%d %a %H:%M")
def printtask(task, lev):
print("%s %s %s" % (
lev,
("DONE" if task["completed"] else "TODO"),
task["title"]
))
print("CREATED: [%s]" % dateconv(task["created_at"]))
if (task["completed"]):
print("CLOSED: [%s]" % dateconv(task["completed_at"]))
with open(sys.argv[1]) as data_f:
data = json.load(data_f)
print("* Wunderlist")
print("EXPORTED: [%s]" % datetime.strptime(data["exported"], "%a %b %d %Y %H:%M:%S GMT%z (%Z)").strftime("%Y-%m-%d %a %H:%M"))
data = data["data"]
for wlist in data["lists"]:
print("** %s" % wlist["title"])
print("CREATED: [%s]" % dateconv(wlist["created_at"]))
for task in (task for task in data["tasks"] if task["list_id"] == wlist["id"]):
printtask(task, "***")
for note in (note for note in data["notes"] if note["task_id"] == task["id"] and note["content"]):
print(note["content"])
for subtask in (subtask for subtask in data["subtasks"] if subtask["task_id"] == task["id"]):
printtask(subtask, "****")
|
nilq/baby-python
|
python
|
import unittest
from mitama._extra import _classproperty
class TestClassProperty(unittest.TestCase):
def test_getter(self):
class ClassA:
@_classproperty
def value(cls):
return "hello, world!"
self.assertEqual(ClassA.value, "hello, world!")
|
nilq/baby-python
|
python
|
from asyncio.events import AbstractEventLoop
import inspect
from typing import Any, Coroutine, List, Tuple, Protocol, Union
from xml.etree.ElementTree import Element, XML
from aiohttp import web
import xml.etree.ElementTree as ET
import socket
XMLRPCValue = Any #TODO FIXME
def parse_args(params: List[Element]):
args: List[Any] = []
for p in params:
if p.tag == "int" or p.tag == "i4":
assert p.text
args.append(int(p.text))
elif p.tag == "string":
args.append(p.text)
elif p.tag == "array":
data = p.find("data")
assert not data is None
args.append(parse_args([e[0] for e in data.findall("value")]))
# TODO implement the other xmlrpc value types
return args
def to_xml(value: Any) -> Element:
v = ET.Element("value")
if isinstance(value, int):
i = ET.SubElement(v, "int")
i.text = str(value)
elif isinstance(value, str):
i = ET.SubElement(v, "string")
i.text = value
elif isinstance(value, list) or isinstance(value, tuple):
arr = ET.SubElement(v, "array")
data = ET.SubElement(arr, "data")
for e in value:
data.append(to_xml(e))
# TODO implement the other xmlrpc value types
return v
class XMLRPCServer:
loop: AbstractEventLoop
addr: Tuple[str, int]
def __init__(self, loop: AbstractEventLoop) -> None:
self.loop = loop
def create_server(self):
self.loop.run_until_complete(self.start_server())
async def start_server(self):
self.server = web.Server(self.handler)
host_name = socket.gethostname()
self.loop_server = await self.loop.create_server(self.server, "0.0.0.0", 0)
_, port = self.loop_server.sockets[0].getsockname()
self.addr = (host_name, port)
print("Started the XMLRPC endpoint at address:", self.addr)
async def handler(self, request):
root = ET.fromstring(await request.text())
method = root.find("methodName").text
params = [e.find("value")[0] for e in root.find("params").findall("param")]
args = parse_args(params)
fun = getattr(self, method)
if inspect.iscoroutinefunction(fun):
ret = await fun(*args)
else:
ret = fun(*args)
response = ET.Element("methodResponse")
responseParams = ET.SubElement(response, "params")
try:
for p in ret:
param = ET.SubElement(responseParams, "param")
param.append(to_xml(p))
except TypeError:
param = ET.SubElement(responseParams, "param")
param.append(to_xml(ret))
return web.Response(body=ET.tostring(response))
@property
def uri(self):
addr, port = self.addr
return f"http://{addr}:{port}"
# TODO add default function which can return a proper rpc error instead of raising an exception
|
nilq/baby-python
|
python
|
from abc import ABC
from abc import abstractmethod
from dataclasses import dataclass
from dataclasses import field
from io import IOBase
from numpy import integer
from syntax import SyntaxBlock
from syntax import SyntaxStatement
from syntax import SyntaxTerm
from typing import Dict
from typing import List
from typing import Optional
from typing import Type
from typing import Union
@dataclass
class MachineValue:
pass
@dataclass
class MachineBlob(MachineValue):
value: bytearray
@dataclass
class MachineNumber(MachineValue):
value: integer
value_type: Type = field(init=False)
def __post_init__(self):
self.value_type = type(self.value)
assert issubclass(
self.value_type, integer
), self.value_type
@dataclass
class MachineStream(MachineValue):
value: IOBase
@dataclass
class MachineExpressionStack:
values: List[MachineValue]
def push(self, value: MachineValue) -> None:
self.values.append(value)
def push_many(self, values: List[MachineValue]) -> None:
for value in values:
self.push(value)
def pop(self) -> MachineValue:
return self.values.pop()
def pop_many(self, count: int) -> MachineValue:
assert len(self) >= count, (self, count)
values = []
for _ in range(count):
values.append(self.values.pop())
return values
def __len__(self) -> int:
return len(self.values)
@dataclass
class MachineCall(ABC):
@abstractmethod
def __call__(
self, frame_stack: "MachineFrameStack"
) -> None:
pass
@dataclass
class MachineBinding:
name: str
value_or_call: Union[MachineValue, MachineCall]
@property
def value(self):
assert isinstance(self.value_or_call, MachineValue)
return self.value_or_call
@property
def call(self):
assert isinstance(self.value_or_call, MachineCall)
return self.value_or_call
@dataclass
class MachineEnvironment:
bindings: Dict[str, Union[MachineValue, MachineCall]]
base: Optional["MachineEnvironment"]
def extend(
self,
bindings: Optional[
Dict[str, Union[MachineValue, MachineCall]]
] = None,
) -> "MachineEnvironment":
return MachineEnvironment(bindings or {}, base=self)
def __contains__(self, key: str) -> bool:
if key in self.bindings:
return True
elif self.base:
return key in self.base
else:
return False
def __getitem__(
self, key: str
) -> Union[MachineValue, MachineCall]:
value = self.bindings.get(key)
if value is None:
if self.base:
return self.base[key]
else:
raise KeyError(key)
else:
return value
def __setitem__(
self,
key: str,
value: Union[MachineValue, MachineCall],
) -> None:
self.bindings[key] = value
@staticmethod
def from_bindings(
bindings: List[MachineBinding]
) -> "MachineEnvironment":
return MachineEnvironment(
bindings={
binding.name: binding.value_or_call
for binding in bindings
},
base=None,
)
@dataclass
class MachineInstructionPointer:
block: SyntaxBlock
statement_index: int
term_index: int
@dataclass
class MachineFrame:
instruction_pointer: MachineInstructionPointer
expression_stack: MachineExpressionStack
environment: MachineEnvironment
@property
def block(self) -> SyntaxBlock:
return self.instruction_pointer.block
@property
def statement_index(self) -> int:
return self.instruction_pointer.statement_index
@statement_index.setter
def statement_index(self, value) -> int:
self.instruction_pointer.statement_index = value
@property
def statement(self) -> SyntaxStatement:
return self.block.statements[self.statement_index]
@property
def term_index(self) -> int:
return self.instruction_pointer.term_index
@term_index.setter
def term_index(self, value) -> int:
self.instruction_pointer.term_index = value
@property
def term(self) -> SyntaxTerm:
return self.statement.terms[self.term_index]
@dataclass
class MachineFrameStack:
frames: List[MachineFrame]
def push(self, frame: MachineFrame) -> None:
self.frames.append(frame)
def pop(self) -> None:
return self.frames.pop()
def __bool__(self) -> bool:
return bool(self.frames)
def __len__(self) -> int:
return len(self.frames)
@property
def current(self) -> MachineFrame:
assert self.frames
return self.frames[-1]
|
nilq/baby-python
|
python
|
from flask import render_template_string
from datetime import datetime
from actions.action import BaseAction
from models import ISTHISLEGIT_SVC
from models.email import EmailResponse
from models.event import EventReportResponded
from models.template import Template
from services.email import email_provider
def get_templates(**kwargs):
""" Gets the list of templates that are accessible to our current user. """
templates = Template.domain_query(kwargs.get('domain')).fetch()
return [template.name for template in templates]
class SendTemplateAction(BaseAction):
"""
Sends a template specified by the provided template_name to the
user who sent the initial report.
"""
action_id = 'send_template'
name = "Send Template"
description = "Sends a template in response to the report"
options = {
"template_name": {
"name": "Template Name",
"choiceFunc": get_templates
}
}
def execute(self, report, options):
template_name = options.get('template_name')
if not template_name:
return
base_query = Template.domain_query(report.reported_domain)
template = Template.get_by_name(base_query, template_name)
if not template:
return
subject = render_template_string(template.subject, report=report)
body = render_template_string(template.text, report=report)
response = EmailResponse(
responder=ISTHISLEGIT_SVC,
sender=template.sender,
content=body,
subject=subject)
try:
response_key = response.put()
report.responses.append(response_key)
if not report.date_responded:
report.date_responded = datetime.now()
event_key = EventReportResponded(
response=response, report=report).put()
report.events.append(event_key)
report.put()
email_provider.send(
to=report.reported_by,
sender=response.sender,
subject=subject,
body=body)
except Exception as e:
return
|
nilq/baby-python
|
python
|
from account.conf import settings
from account.models import Account
def account(request):
ctx = {
"account": Account.for_request(request),
"ACCOUNT_OPEN_SIGNUP": settings.ACCOUNT_OPEN_SIGNUP,
}
return ctx
|
nilq/baby-python
|
python
|
def create_info_2dfaces(cellid:'int[:,:]', nodeid:'int[:,:]', namen:'int[:]', vertex:'double[:,:]',
centerc:'double[:,:]', nbfaces:'int', normalf:'double[:,:]', mesuref:'double[:]',
centerf:'double[:,:]', namef:'int[:]'):
from numpy import double, zeros, sqrt
norm = zeros(3, dtype=double)
snorm = zeros(3, dtype=double)
#Faces aux bords (1,2,3,4), Faces à l'interieur 0 A VOIR !!!!!
for i in range(nbfaces):
if (cellid[i][1] == -1 and cellid[i][1] != -10):
if namen[nodeid[i][0]] == namen[nodeid[i][1]]:
namef[i] = namen[nodeid[i][0]]
elif ((namen[nodeid[i][0]] == 3 and namen[nodeid[i][1]] != 0) or
(namen[nodeid[i][0]] != 0 and namen[nodeid[i][1]] == 3)):
namef[i] = 3
elif ((namen[nodeid[i][0]] == 4 and namen[nodeid[i][1]] != 0) or
(namen[nodeid[i][0]] != 0 and namen[nodeid[i][1]] == 4)):
namef[i] = 4
else:
namef[i] = 100
norm[0] = vertex[nodeid[i][0]][1] - vertex[nodeid[i][1]][1]
norm[1] = vertex[nodeid[i][1]][0] - vertex[nodeid[i][0]][0]
centerf[i][:] = 0.5 * (vertex[nodeid[i][0]][0:3] + vertex[nodeid[i][1]][0:3])
snorm[:] = centerc[cellid[i][0]][:] - centerf[i][:]
if (snorm[0] * norm[0] + snorm[1] * norm[1]) > 0:
normalf[i][:] = -1*norm[:]
else:
normalf[i][:] = norm[:]
mesuref[i] = sqrt(normalf[i][0]**2 + normalf[i][1]**2)
return 0
def create_info_3dfaces(cellid:'int[:,:]', nodeid:'int[:,:]', namen:'int[:]', vertex:'double[:,:]',
centerc:'double[:,:]', nbfaces:'int', normalf:'double[:,:]', mesuref:'double[:]',
centerf:'double[:,:]', namef:'int[:]'):
from numpy import double, zeros, sqrt
norm = zeros(3, dtype=double)
snorm = zeros(3, dtype=double)
u = zeros(3, dtype=double)
v = zeros(3, dtype=double)
for i in range(nbfaces):
if (cellid[i][1] == -1 ):
if namen[nodeid[i][0]] == namen[nodeid[i][1]] and namen[nodeid[i][0]] == namen[nodeid[i][2]] :
namef[i] = namen[nodeid[i][0]]
elif ((namen[nodeid[i][0]] == 3 and namen[nodeid[i][1]] != 0 and namen[nodeid[i][2]] != 0) or
(namen[nodeid[i][0]] != 0 and namen[nodeid[i][1]] == 3 and namen[nodeid[i][2]] != 0) or
(namen[nodeid[i][0]] != 0 and namen[nodeid[i][1]] != 0 and namen[nodeid[i][2]] != 3)):
namef[i] = 3
elif ((namen[nodeid[i][0]] == 4 and namen[nodeid[i][1]] != 0 and namen[nodeid[i][2]] != 0) or
(namen[nodeid[i][0]] != 0 and namen[nodeid[i][1]] == 4 and namen[nodeid[i][2]] != 0) or
(namen[nodeid[i][0]] != 0 and namen[nodeid[i][1]] != 0 and namen[nodeid[i][2]] != 4)):
namef[i] = 4
else:
namef[i] = 100
u[:] = vertex[nodeid[i][1]][0:3]-vertex[nodeid[i][0]][0:3]
v[:] = vertex[nodeid[i][2]][0:3]-vertex[nodeid[i][0]][0:3]
norm[0] = 0.5*(u[1]*v[2] - u[2]*v[1])
norm[1] = 0.5*(u[2]*v[0] - u[0]*v[2])
norm[2] = 0.5*(u[0]*v[1] - u[1]*v[0])
centerf[i][:] = 1./3 * (vertex[nodeid[i][0]][:3] + vertex[nodeid[i][1]][:3] + vertex[nodeid[i][2]][:3])
snorm[:] = centerc[cellid[i][0]][:] - centerf[i][:]
if (snorm[0] * norm[0] + snorm[1] * norm[1] + snorm[2] * norm[2]) > 0:
normalf[i][:] = -1*norm[:]
else:
normalf[i][:] = norm[:]
mesuref[i] = sqrt(normalf[i][0]**2 + normalf[i][1]**2 + normalf[i][2]**2)
return 0
def Compute_2dcentervolumeOfCell(nodeid:'int[:,:]', vertex:'double[:,:]', nbelements:'int',
center:'double[:,:]', volume:'double[:]'):
#calcul du barycentre et volume
for i in range(nbelements):
s_1 = nodeid[i][0]
s_2 = nodeid[i][1]
s_3 = nodeid[i][2]
x_1 = vertex[s_1][0]; y_1 = vertex[s_1][1]; z_1 = vertex[s_1][2]
x_2 = vertex[s_2][0]; y_2 = vertex[s_2][1]; z_2 = vertex[s_2][2]
x_3 = vertex[s_3][0]; y_3 = vertex[s_3][1]; z_3 = vertex[s_3][2]
center[i][0] = 1./3 * (x_1 + x_2 + x_3); center[i][1] = 1./3*(y_1 + y_2 + y_3); center[i][2] = 1./3*(z_1 + z_2 + z_3)
volume[i] = (1./2) * abs((x_1-x_2)*(y_1-y_3)-(x_1-x_3)*(y_1-y_2))
var1 = (x_2-x_1)*(y_3-y_1)-(y_2-y_1)*(x_3-x_1)
if var1 < 0:
nodeid[i][0] = s_1; nodeid[i][1] = s_3; nodeid[i][2] = s_2
return 0
def Compute_3dcentervolumeOfCell(nodeid:'int[:,:]', vertex:'double[:,:]', nbelements:'int',
center:'double[:,:]', volume:'double[:]'):
from numpy import zeros, fabs
wedge = zeros(3)
u = zeros(3)
v = zeros(3)
w = zeros(3)
#calcul du barycentre et volume
for i in range(nbelements):
s_1 = nodeid[i][0]
s_2 = nodeid[i][1]
s_3 = nodeid[i][2]
s_4 = nodeid[i][3]
x_1 = vertex[s_1][0]; y_1 = vertex[s_1][1]; z_1 = vertex[s_1][2]
x_2 = vertex[s_2][0]; y_2 = vertex[s_2][1]; z_2 = vertex[s_2][2]
x_3 = vertex[s_3][0]; y_3 = vertex[s_3][1]; z_3 = vertex[s_3][2]
x_4 = vertex[s_4][0]; y_4 = vertex[s_4][1]; z_4 = vertex[s_4][2]
center[i][0] = 1./4*(x_1 + x_2 + x_3 + x_4)
center[i][1] = 1./4*(y_1 + y_2 + y_3 + y_4)
center[i][2] = 1./4*(z_1 + z_2 + z_3 + z_4)
u[:] = vertex[s_2][:]-vertex[s_1][:]
v[:] = vertex[s_3][:]-vertex[s_1][:]
w[:] = vertex[s_4][:]-vertex[s_1][:]
wedge[0] = v[1]*w[2] - v[2]*w[1]
wedge[1] = v[2]*w[0] - v[0]*w[2]
wedge[2] = v[0]*w[1] - v[1]*w[0]
volume[i] = 1./6*fabs(u[0]*wedge[0] + u[1]*wedge[1] + u[2]*wedge[2])
return 0
def create_cellsOfFace(faceid:'int[:,:]', nbelements:'int', nbfaces:'int', cellid:'int[:,:]', dim:'int'):
for i in range(nbelements):
for j in range(dim+1):
if cellid[faceid[i][j]][0] == -1 :
cellid[faceid[i][j]][0] = i
if cellid[faceid[i][j]][0] != i:
cellid[faceid[i][j]][0] = cellid[faceid[i][j]][0]
cellid[faceid[i][j]][1] = i
return 0
def create_2dfaces(nodeidc:'int[:,:]', nbelements:'int', faces:'int[:,:]',
cellf:'int[:,:]'):
#Create 2d faces
k = 0
for i in range(nbelements):
faces[k][0] = nodeidc[i][0]; faces[k][1] = nodeidc[i][1]
faces[k+1][0] = nodeidc[i][1]; faces[k+1][1] = nodeidc[i][2]
faces[k+2][0] = nodeidc[i][2]; faces[k+2][1] = nodeidc[i][0]
cellf[i][0] = k; cellf[i][1] = k+1; cellf[i][2] = k+2
k = k+3
return 0
def create_cell_faceid(nbelements:'int', oldTonewIndex:'int[:]', cellf:'int[:,:]',
faceid:'int[:,:]', dim:'int'):
for i in range(nbelements):
for j in range(dim+1):
faceid[i][j] = oldTonewIndex[cellf[i][j]]
return 0
def create_3dfaces(nodeidc:'int[:,:]', nbelements:'int',faces:'int[:,:]',
cellf:'int[:,:]'):
#Create 3d faces
k = 0
for i in range(nbelements):
faces[k][0] = nodeidc[i][0]; faces[k][1] = nodeidc[i][1]; faces[k][2] = nodeidc[i][2]
faces[k+1][0] = nodeidc[i][2]; faces[k+1][1] = nodeidc[i][3]; faces[k+1][2] = nodeidc[i][0]
faces[k+2][0] = nodeidc[i][0]; faces[k+2][1] = nodeidc[i][1]; faces[k+2][2] = nodeidc[i][3]
faces[k+3][0] = nodeidc[i][3]; faces[k+3][1] = nodeidc[i][1]; faces[k+3][2] = nodeidc[i][2]
cellf[i][0] = k; cellf[i][1] = k+1; cellf[i][2] = k+2; cellf[i][3] = k+3
k = k+4
return 0
def create_NormalFacesOfCell(centerc:'double[:,:]', centerf:'double[:,:]', faceid:'int[:,:]', normal:'double[:,:]',
nbelements:'int', nf:'double[:,:,:]', dim:'int'):
from numpy import zeros, double
ss = zeros(3, dtype=double)
#compute the outgoing normal faces for each cell
for i in range(nbelements):
G = centerc[i]
for j in range(dim+1):
f = faceid[i][j]
c = centerf[f]
if ((G[0]-c[0])*normal[f][0] + (G[1]-c[1])*normal[f][1] + (G[2]-c[2])*normal[f][2]) < 0.:
ss[:] = normal[f][:]
else:
ss[:] = -1.0*normal[f][:]
nf[i][j][:] = ss[:]
return 0
|
nilq/baby-python
|
python
|
import datetime
import hashlib
import json
from typing import Dict
import uuid
class Utility(object):
@staticmethod
def make_json_serializable(doc: Dict):
"""
Make the document JSON serializable. This is a poor man's implementation that handles dates and nothing else.
This method modifies the given document in place.
Args:
doc: A Python Dictionary, typically a CDR object.
Returns: None
"""
for k, v in doc.items():
if isinstance(v, datetime.date):
doc[k] = v.strftime("%Y-%m-%d")
elif isinstance(v, datetime.datetime):
doc[k] = v.isoformat()
@staticmethod
def create_doc_id_from_json(doc) -> str:
"""
Docs with identical contents get the same ID.
Args:
doc:
Returns: a string with the hash of the given document.
"""
return hashlib.sha256(json.dumps(doc, sort_keys=True).encode('utf-8')).hexdigest()
@staticmethod
def create_uuid():
return str(uuid.uuid4())
|
nilq/baby-python
|
python
|
image_directory = "./images/"
|
nilq/baby-python
|
python
|
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import Error, ErrorException
from ._models_py3 import Key
from ._models_py3 import KeyListResult
from ._models_py3 import KeyValue
from ._models_py3 import KeyValueListResult
from ._models_py3 import Label
from ._models_py3 import LabelListResult
except (SyntaxError, ImportError):
from ._models import Error, ErrorException
from ._models import Key
from ._models import KeyListResult
from ._models import KeyValue
from ._models import KeyValueListResult
from ._models import Label
from ._models import LabelListResult
__all__ = [
'Error', 'ErrorException',
'Key',
'KeyListResult',
'KeyValue',
'KeyValueListResult',
'Label',
'LabelListResult',
]
|
nilq/baby-python
|
python
|
from abc import ABC, abstractmethod
import pandas as pd
class ReducerAbstract(ABC):
@abstractmethod
def transform(self, df: pd.DataFrame) -> pd.DataFrame:
...
|
nilq/baby-python
|
python
|
import difflib
import json
import re
from itertools import zip_longest
try:
import html
except ImportError:
html = None
def _mark_text(text):
return '<span style="color: red;">{}</span>'.format(text)
def _mark_span(text):
return [_mark_text(token) for token in text]
def _markup_diff(a,
b,
mark=_mark_span,
default_mark=lambda x: x,
isjunk=None):
"""Returns a and b with any differences processed by mark
Junk is ignored by the differ
"""
seqmatcher = difflib.SequenceMatcher(isjunk=isjunk, a=a, b=b, autojunk=False)
out_a, out_b = [], []
for tag, a0, a1, b0, b1 in seqmatcher.get_opcodes():
markup = default_mark if tag == 'equal' else mark
out_a += markup(a[a0:a1])
out_b += markup(b[b0:b1])
assert len(out_a) == len(a)
assert len(out_b) == len(b)
return out_a, out_b
def _align_seqs(a, b, fill=''):
out_a, out_b = [], []
seqmatcher = difflib.SequenceMatcher(a=a, b=b, autojunk=False)
for tag, a0, a1, b0, b1 in seqmatcher.get_opcodes():
delta = (a1 - a0) - (b1 - b0)
out_a += a[a0:a1] + [fill] * max(-delta, 0)
out_b += b[b0:b1] + [fill] * max(delta, 0)
assert len(out_a) == len(out_b)
return out_a, out_b
def _html_sidebyside(a, b):
# Set the panel display
out = '<div style="display: grid;grid-template-columns: 1fr 1fr;grid-gap: 0;">'
# There's some CSS in Jupyter notebooks that makes the first pair unalign.
# This is a workaround
out += '<p></p><p></p>'
for left, right in zip_longest(a, b, fillvalue=''):
out += '<pre style="margin-top:0;padding:0">{}</pre>'.format(left)
out += '<pre style="margin-top:0";padding:0>{}</pre>'.format(right)
out += '</div>'
return out
def _html_diffs(a, b):
if not html:
raise ImportError('html package not found; Python 3.x required')
a = html.escape(a)
b = html.escape(b)
out_a, out_b = [], []
for sent_a, sent_b in zip(*_align_seqs(a.splitlines(), b.splitlines())):
mark_a, mark_b = _markup_diff(sent_a.split(' '), sent_b.split(' '))
out_a.append(' '.join(mark_a))
out_b.append(' '.join(mark_b))
return _html_sidebyside(out_a, out_b)
def _show_diffs(a, b):
from IPython.display import HTML, display
display(HTML(_html_diffs(a, b)))
def envdiff(a, b):
""" Compare 2 JSON environments using visual diff
a and b should be either pandas Series or strings of JSON objects
"""
try:
import pandas
except ImportError:
pandas = None
if pandas:
if isinstance(a, pandas.Series):
a = a.to_json()
if isinstance(b, pandas.Series):
b = b.to_json()
return _show_diffs(json.dumps(json.loads(a), indent=2),
json.dumps(json.loads(b), indent=2))
|
nilq/baby-python
|
python
|
project = "Programmation en Python"
copyright = "2020, Dimitri Merejkowsky"
author = "Dimitri Merejkowsky - Contenu placé sous licence CC BY 4.0"
version = "0.3"
language = "fr"
copyright = "CC BY 4.0"
templates_path = ["_templates"]
exclude_patterns = []
keep_warnings = True
extensions = [
"notfound.extension",
]
notfound_context = {
"title": "Page non trouvée",
"body": "<h1>Page non trouvée</h1>",
}
notfound_urls_prefix = "/python/"
html_show_sourcelink = False
html_show_copyright = False
html_theme = "sphinx_book_theme"
# Don't use default "<project> <version> documentation"
html_title = project
html_static_path = ["_static"]
html_use_index = False
|
nilq/baby-python
|
python
|
"""
Implements a decorator that counts the number of times a function was called,
and collects statistics on how long it took to execute every single function call.
"""
from time import time
from sys import stderr, stdout
import numpy as np
class FunctionLogger(object):
"""
stores two dictionaries:
- call_frequencies: mapping of function names to counts of how often they've been called
- call_times: mapping of function names to lists of how long it took to execute a fn call
"""
call_frequencies = {}
call_times = {}
def __init__(self, function, naming):
"""
initialize an instance of FunctionLogger. Notably, the user should not ever have
to do this: this exists solely to create a context manager for function_profiler.
"""
self.start_time = None
if naming == 'qualname':
self.function_name = function.__qualname__
elif naming == 'name':
self.function_name = function.__name__
else:
raise ValueError(
"Invalid naming argument supplied to function_profiler: %s"
.format(naming)
)
def __enter__(self):
FunctionLogger.call_frequencies[self.function_name] = (
FunctionLogger.call_frequencies.get(self.function_name, 0) + 1
)
self.start_time = time()
def __exit__(self, type_, value, traceback):
seconds_taken = time() - self.start_time
call_times_so_far = FunctionLogger.call_times.get(self.function_name, [])
FunctionLogger.call_times[self.function_name] = call_times_so_far + [seconds_taken]
def clear_data():
"""
Clears the data stored in the class variables. No 'self' argument
because this is not run on an instance, but on the class itself.
"""
FunctionLogger.call_frequencies = {}
FunctionLogger.call_times = {}
def log_data(output_option='stderr'):
"""
logs the class variables to stdout, stderr, or to a file. No 'self' arg
because this is not run on an instance, but on the class itself.
"""
# for when we're logging to a file, rather than stderr or stdout
log_file_strings = []
for function_key in sorted(FunctionLogger.call_frequencies.keys()):
call_freq = FunctionLogger.call_frequencies.get(function_key, 0)
call_times = FunctionLogger.call_times.get(function_key, [])
out_string = make_output_string(function_key, call_times, call_freq)
if output_option == 'stderr':
stderr.write(out_string)
elif output_option == 'stdout':
stdout.write(out_string)
elif output_option == 'suppress':
# this is mostly intended for testing
pass
else:
log_file_strings.append(out_string)
if log_file_strings:
with open(output_option, 'w') as out_file:
for out_string in log_file_strings:
out_file.write(out_string)
def make_output_string(fn_name, call_times, call_freq):
"""
Construct a string that represents the log for this one particular function.
- fn_name: string, name of the function
- call_times: list of floats (lengths of function calls)
- call_freq: integer, number of times the function was called
"""
if call_times == []:
# call_times == [] iff __enter__ was called with this fn, but __exit__ was not
stats_string = (
"No time stats were recorded for this function, "
"despite it having been called. This is an error.\n"
)
else:
stats_string = (
"Min: {:08f}, Max: {:08f}, Mean: {:08f}, Median: {:08f}, Stddev: {:08f}\n"
.format(np.min(call_times), np.max(call_times), np.mean(call_times),
np.median(call_times), np.std(call_times))
)
if call_freq != len(call_times):
# for at least one call of this function, __enter__ was called but __exit__ was not.
stats_string += (
("WARNING: number of call times ({}) is not equal to call frequency count ({}). "
"This suggests the function was called, but did not return as normal. Check "
"for errors or program termination.\n").format(len(call_times), call_freq)
)
call_text = "call" if (call_freq == 1) else "calls"
return "{}: {} {}. Time stats (s): {}".format(fn_name, call_freq, call_text, stats_string)
def function_profiler(naming='qualname'):
"""
decorator that uses FunctionLogger as a context manager to
log information about this call of the function.
"""
def layer(function):
def wrapper(*args, **kwargs):
with FunctionLogger(function, naming):
return function(*args, **kwargs)
return wrapper
return layer
def with_logger(output='stderr'):
"""
decorator that calls FunctionLogger.log_data when the decorated function
terminates, whether due to an exception or not.
"""
def layer(function):
def wrapper(*args, **kwargs):
try:
function(*args, **kwargs)
finally:
FunctionLogger.log_data(output)
return wrapper
return layer
|
nilq/baby-python
|
python
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Finetuning the library models for question-answering on SQuAD (DistilBERT, Bert, XLM, XLNet)."""
import glob
import logging
import os
import torch
from args import get_args, preprocessing_data
from utils import load_and_cache_examples
from train_eval import train, evaluate, set_seed
from prettytable import PrettyTable
from model import *
from transformers import (
MODEL_FOR_QUESTION_ANSWERING_MAPPING,
WEIGHTS_NAME,
AutoConfig,
AutoModelForQuestionAnswering,
AutoTokenizer,
get_linear_schedule_with_warmup,
squad_convert_examples_to_features,
)
LANGS = ['en', 'es', 'de', 'ar', 'hi', 'vi', 'zh']
XQ_LANGS = ['en', 'ar', 'de', 'el', 'es', 'hi', 'ru', 'tr', 'vi', 'zh']
# LANGS = ['en', 'es', 'de', 'hi', 'vi', 'zh']
logger = logging.getLogger(__name__)
MODEL_CONFIG_CLASSES = list(MODEL_FOR_QUESTION_ANSWERING_MAPPING.keys())
MODEL_TYPES = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES)
def main():
args = get_args()
if args.doc_stride >= args.max_seq_length - args.max_query_length:
logger.warning(
"WARNING - You've set a doc stride which may be superior to the document length in some "
"examples. This could result in errors when building features from the examples. Please reduce the doc "
"stride or increase the maximum length to ensure the features are correctly built."
)
if (
os.path.exists(args.output_dir)
and os.listdir(args.output_dir)
and args.do_train
and not args.overwrite_output_dir
):
print(
"Output directory ({}) already exists and is not empty. Use --overwrite_output_dir to overcome.".format(
args.output_dir
)
)
# Setup distant debugging if needed
if args.server_ip and args.server_port:
# Distant debugging - see https://code.visualstudio.com/docs/python/debugging#_attach-to-a-local-script
import ptvsd
print("Waiting for debugger attach")
ptvsd.enable_attach(address=(args.server_ip, args.server_port), redirect_output=True)
ptvsd.wait_for_attach()
# Setup CUDA, GPU & distributed training
if args.local_rank == -1 or args.no_cuda:
device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu")
args.n_gpu = 0 if args.no_cuda else torch.cuda.device_count()
else: # Initializes the distributed backend which will take care of sychronizing nodes/GPUs
torch.cuda.set_device(args.local_rank)
device = torch.device("cuda", args.local_rank)
torch.distributed.init_process_group(backend="nccl")
args.n_gpu = 1
args.device = device
# Setup logging
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO if args.local_rank in [-1, 0] else logging.WARN,
)
logger.warning(
"Process rank: %s, device: %s, n_gpu: %s, distributed training: %s, 16-bits training: %s",
args.local_rank,
device,
args.n_gpu,
bool(args.local_rank != -1),
args.fp16,
)
# Set seed
set_seed(args)
# Load pretrained model and tokenizer
if args.local_rank not in [-1, 0]:
# Make sure only the first process in distributed training will download model & vocab
torch.distributed.barrier()
logging.getLogger("transformers.modeling_utils").setLevel(logging.WARN) # Reduce model loading logs
logging.getLogger("transformers.configuration_utils").setLevel(logging.WARN) # Reduce model loading logs
args.model_type = args.model_type.lower()
config = AutoConfig.from_pretrained(
args.config_name if args.config_name else args.model_name_or_path,
cache_dir=args.cache_dir if args.cache_dir else None,
)
config.addtional_feature_size = args.addtional_feature_size
config.gan_dropout_prob = args.gan_dropout_prob
tokenizer = AutoTokenizer.from_pretrained(
args.tokenizer_name if args.tokenizer_name else args.model_name_or_path,
do_lower_case=args.do_lower_case,
cache_dir=args.cache_dir if args.cache_dir else None,
)
if args.model_type == 'bert':
QAModel = mBertForQuestionAnswering_dep_beta_v3
model = QAModel.from_pretrained(
args.model_name_or_path,
from_tf=bool(".ckpt" in args.model_name_or_path),
config=config,
cache_dir=args.cache_dir if args.cache_dir else None,
)
if args.local_rank == 0:
# Make sure only the first process in distributed training will download model & vocab
torch.distributed.barrier()
model.to(args.device)
logger.info("Training/evaluation parameters %s", args)
# Before we do anything with models, we want to ensure that we get fp16 execution of torch.einsum if args.fp16 is set.
# Otherwise it'll default to "promote" mode, and we'll get fp32 operations. Note that running `--fp16_opt_level="O2"` will
# remove the need for this code, but it is still valid.
if args.fp16:
try:
import apex
apex.amp.register_half_function(torch, "einsum")
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.")
# Data preprocessing with the dev and test data firstly (prevent fp16 issue when facing Stanza)
if args.do_preprocess:
for set_name, lang in preprocessing_data:
logger.info("Now process train/dev/test/xquad data: {}/{}".format(set_name, lang))
dataset, examples, features = load_and_cache_examples(args,
tokenizer,
evaluate=set_name,
context_lang=lang,
query_lang=lang,
output_examples=True)
# Training
if args.do_train:
# train_dataset = load_and_cache_examples(args, tokenizer, evaluate='train', output_examples=False)
global_step, tr_loss, time_stamp = train(args, model, tokenizer)
logger.info(" global_step = %s, average loss = %s", global_step, tr_loss)
# Evaluation - we can ask to evaluate all the checkpoints (sub-directories) in a directory
results = {}
if args.do_eval and args.local_rank in [-1, 0]:
if not args.do_train:
time_stamp = '12-02-11-14'
checkpoints = [args.output_dir]
if args.eval_all_checkpoints:
checkpoints = list(
os.path.dirname(c) for c in sorted(glob.glob(args.output_dir + '/**/' + WEIGHTS_NAME, recursive=True)))
checkpoints = [i for i in checkpoints if time_stamp in i]
logger.info("Evaluate the following checkpoints for dev: %s", checkpoints)
best_f1 = 0
best_em = 0
best_ckpt = checkpoints[0]
for checkpoint in checkpoints:
# Reload the model
global_step = checkpoint.split("-")[-1] if len(checkpoints) > 1 else ""
logger.info("Load the checkpoint: {}".format(checkpoint))
model = QAModel.from_pretrained(checkpoint) # , force_download=True)
model.to(args.device)
# Evaluate
result = evaluate(args, model, tokenizer, prefix=global_step, set='dev')
if result['f1'] > best_f1:
best_f1 = result['f1']
best_em = result['exact_match']
best_ckpt = checkpoint
result = dict((k + ("_{}".format(global_step) if global_step else ""), v) for k, v in result.items())
results.update(result)
logger.info("Dev Results: {}".format(results))
logger.info("Best checkpoint and its dev em/f1 result: {}, {}/{}".format(best_ckpt, best_em, best_f1))
if args.do_test and args.local_rank in [-1, 0]:
model = QAModel.from_pretrained(best_ckpt) # , force_download=True)
model.to(args.device)
logger.info("Evaluate on MLQA dataset!")
mean_em = 0
mean_f1 = 0
table = PrettyTable()
table.add_column(' ', ['EM', 'F1'])
for lang in LANGS:
result = evaluate(args, model, tokenizer, set='test', context_lang=lang, query_lang=lang, prefix=global_step)
table.add_column(lang, [round(result['exact_match'], 2), round(result['f1'], 2)])
# logger.info("Test Results for {}-{}: {}".format(lang,lang,result))
mean_em += result['exact_match']
mean_f1 += result['f1']
mean_em = mean_em/len(LANGS)
mean_f1 = mean_f1/len(LANGS)
table.add_column('Avg', [round(mean_em, 2), round(mean_f1, 2)])
print(table)
logger.info("Evaluate on XQUAD dataset!")
mean_em = 0
mean_f1 = 0
table = PrettyTable()
table.add_column(' ', ['EM', 'F1'])
for lang in XQ_LANGS:
result = evaluate(args, model, tokenizer, set='xquad', context_lang=lang, query_lang=lang, prefix=global_step)
table.add_column(lang, [round(result['exact_match'], 2), round(result['f1'], 2)])
# logger.info("Test Results for {}-{}: {}".format(lang, lang, result))
mean_em += result['exact_match']
mean_f1 += result['f1']
mean_em = mean_em / len(XQ_LANGS)
mean_f1 = mean_f1 / len(XQ_LANGS)
table.add_column('Avg', [round(mean_em, 2), round(mean_f1, 2)])
print(table)
return results
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
from .walker import RandomWalker
class Node2Path:
def __init__(self, graph, walk_length, num_walks, p=1.0, q=1.0, workers=1):
self.graph = graph
self.walk_length = walk_length
self.num_walks = num_walks
self.p = p
self.q = q
self.workers = workers
def get_path(self):
self.walker = RandomWalker(self.graph, p=self.p, q=self.q)
print("Preprocess transition probs...")
self.walker.preprocess_transition_probs()
self.sentences = self.walker.simulate_walks(
num_walks=self.num_walks,
walk_length=self.walk_length,
workers=self.workers,
verbose=1,
)
return self.sentences
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import json
import argparse
import docker
# A Simple module that returns stats for given ids.
def get_nested_elements(info, elements):
# Function to traverse dictionaries and print when value is
# not a dict (instead it's a str)
# pdb.set_trace()
if isinstance(elements, str):
keys = elements.split('.')
else:
keys = elements
for key in keys:
value = info[key]
if isinstance(value, dict):
keys.pop(0)
if keys:
get_nested_elements(value, keys)
elif value is not None:
print(value)
else:
return('Not Encountered Value')
def get_container_attr(container_id, attr, addr):
# Find a container info and return desired attr.
cli = docker.from_env()
container = cli.containers.get(container_id)
stats = container.stats(stream=False, decode=True)
get_nested_elements(stats, attr)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--cid', type=str, required=True)
parser.add_argument('--attr', type=str, required=True)
parser.add_argument('--addr', type=str, required=False)
args = parser.parse_args()
if not args.addr:
addr = 'http://127.0.0.1:2376'
get_container_attr(container_id=args.cid, attr=args.attr, addr=addr)
|
nilq/baby-python
|
python
|
from pyspark.sql import SparkSession
from pyspark.ml import Pipeline
from pyspark.ml.feature import VectorAssembler
from pyspark.ml.regression import LinearRegression
from pyspark.ml.evaluation import RegressionEvaluator
import matplotlib.pyplot as plt
import numpy as np
def read_data(file_path):
cars_df = spark.read.load(file_path, format="csv", sep=",", inferSchema="true", header="true")
result_df = cars_df.toDF("make", "model", "year", "engine_fuel_type", "engine_hp", "engine_cylinders",
"transmission_type", "driven_wheels", "number_of_doors", "market_category", "vehicle_size",
"vehicle_style", "highway_mpg", "city_mpg", "popularity", "price")
result_df.select("make", "model", "year", "engine_hp", "number_of_doors", "highway_mpg", "city_mpg", "popularity",
"price").show(5)
return result_df
def split_data(data_df):
train_df, test_df = data_df.randomSplit([.8, .2], seed=42)
print(f"""Zbior danych wejsciowych podzielono na:\n
Rekordy trenujace:\t{train_df.count()}\n
Rekordy testujace:\t{test_df.count()}""")
return train_df, test_df
def train_model(train_dataset):
numeric_cols = ["year", "engine_hp", "number_of_doors", "highway_mpg", "city_mpg", "popularity"]
vec_assembler = VectorAssembler(inputCols=numeric_cols, outputCol="features", handleInvalid="skip")
vec_train_df = vec_assembler.transform(train_dataset)
vec_train_df.select("features", "price").show(10)
lr = LinearRegression(featuresCol="features", labelCol="price")
lr_model = lr.fit(vec_train_df)
year = round(lr_model.coefficients[0], 2)
engine_hp = round(lr_model.coefficients[1], 2)
number_of_doors = round(lr_model.coefficients[2], 2)
highway_mpg = round(lr_model.coefficients[3], 2)
city_mpg = round(lr_model.coefficients[4], 2)
popularity = round(lr_model.coefficients[5], 2)
b = round(lr_model.intercept, 2)
print(
f"""Wzor nauczonego modelu:
cena = {year}*rok + {engine_hp}*konie_mechaniczne + {number_of_doors}*drzwi + {highway_mpg}*mpg_autostrada
+ {city_mpg}*mpg_miasto + {popularity}*popularnosc + {b}""")
estimator = Pipeline(stages=[vec_assembler, lr])
trained_model = estimator.fit(train_dataset)
return trained_model
def make_predictions(trained_model, test_df):
prediction_df = trained_model.transform(test_df)
prediction_df.select("features", "price", "prediction").show(10)
return prediction_df
def evaluate_model(model):
regression_evaluator = RegressionEvaluator(
predictionCol="prediction",
labelCol="price",
metricName="rmse")
rmse = regression_evaluator.evaluate(model)
print(f"RMSE = {rmse:.1f}")
r2 = regression_evaluator.setMetricName("r2").evaluate(model)
print(f"R2 = {r2}")
def plot_histogram(real_data, prediction):
numbers_of_records = 2000
input_data = [np.log(row['price']) for row in real_data.take(numbers_of_records)]
predicted = [np.log(row['price']) for row in prediction.take(numbers_of_records)]
plt.figure()
plt.hist([predicted, input_data], bins=30, log=False)
plt.legend(('prognozowane ceny', 'rzeczywiste ceny'))
plt.xlabel('cena')
plt.ylabel('ilość')
plt.savefig('result_histogram.png')
if __name__ == '__main__':
spark = SparkSession.builder \
.appName("Ceny pojazdow") \
.getOrCreate()
spark.sparkContext.setLogLevel("ERROR")
data_path = """./data/car-data.csv"""
cars_df = read_data(data_path)
(train_df, test_df) = split_data(cars_df)
estimate_model = train_model(train_df)
predictions_df = make_predictions(estimate_model, test_df)
evaluate_model(predictions_df)
plot_histogram(cars_df, predictions_df)
spark.stop()
|
nilq/baby-python
|
python
|
from collections import OrderedDict
from typing import Optional
from queryfs.db.schema import Schema
class File(Schema):
table_name: str = "files"
fields: OrderedDict[str, str] = OrderedDict(
{
"id": "integer primary key autoincrement",
"name": "text",
"hash": "text",
"ctime": "real",
"atime": "real",
"mtime": "real",
"size": "integer",
"directory_id": "integer null",
}
)
id: int = 0
name: str = ""
hash: str = ""
ctime: float = 0.0
atime: float = 0.0
mtime: float = 0.0
size: int = 0
directory_id: Optional[int] = 0
|
nilq/baby-python
|
python
|
#!/usr/bin/env python2.7
import os
import sys
sys.path.append(os.path.realpath(__file__ + '/../../../../lib'))
import udf
from udf import useData, expectedFailure
class GetpassTest(udf.TestCase):
def setUp(self):
self.query('CREATE SCHEMA getpass', ignore_errors=True)
self.query('OPEN SCHEMA getpass', ignore_errors=True)
def tearDown(self):
self.query('DROP SCHEMA getpass CASCADE', ignore_errors=True)
def test_getuser(self):
self.query(udf.fixindent('''
CREATE OR REPLACE python3 SCALAR SCRIPT
get_user_from_passwd()
RETURNS VARCHAR(10000) AS
def run(ctx):
import getpass
return getpass.getuser()
/
'''))
rows = self.query("select get_user_from_passwd()")
expected = u"exadefusr"
self.assertEqual(expected ,rows[0][0])
if __name__ == '__main__':
udf.main()
# vim: ts=4:sts=4:sw=4:et:fdm=indent
|
nilq/baby-python
|
python
|
from scipy.io import netcdf
import numpy as np
import numpy.matlib
tave = 900
basedir = '/marconi_work/FUA34_MULTEI/stonge0_FUA34/rad_test/2nd_deriv/T1/'
basedir = '/marconi_work/FUA34_MULTEI/stonge0_FUA34/rad_test/2nd_deriv/rho_scan2/r0.001/'
basedir = '/marconi_work/FUA34_MULTEI/stonge0_FUA34/rad_test/fg_drive/rhoscan_hr/0.002/'
basedir = '/marconi_work/FUA34_MULTEI/stonge0_FUA34/rad_test/fg_drive/0.001d/'
basedir = '/marconi_work/FUA35_OXGK/stonge0/rad_scan/0.001d_aht/'
#basedir = '/marconi_work/FUA35_OXGK/stonge0/rad_scan/0.001t_2/'
right_file = basedir + 'left.out.nc'
center_file = basedir + 'center.out.nc'
left_file = basedir + 'right.out.nc'
right_nc = netcdf.netcdf_file(right_file,'r')
center_nc = netcdf.netcdf_file(center_file,'r')
left_nc = netcdf.netcdf_file(left_file,'r')
def read_stella_float(infile, var):
import numpy as np
try:
#print('a')
#arr = np.copy(infile.variables[var][:])
arr = infile.variables[var][:]
#print('b')
flag = True
except KeyError:
print('INFO: '+var+' not found in netcdf file')
arr =np.arange(1,dtype=float)
flag = FLAG
return arr, flag
def phi_vs_t_to_x(infile,var,ny,nx):
# t ntube z kx ky ri
avt, present = read_stella_float(infile,var)
#print('c')
avt_kxky = ny*nx*(avt[:,0,:,:,:,0] + 1j*avt[:,0,:,:,:,1])
#print('d')
arr = np.fft.ifft(avt_kxky,axis=2)
#print('e')
return arr
def mom_vs_t_to_x(infile,var,ny,nx):
#in: t nspec ntube z kx ky ri
#out: t z kx ky
avt, present = read_stella_float(infile,var)
avt_kxky = ny*nx*(avt[:,0,0,:,:,:,0] + 1j*avt[:,0,0,:,:,:,1])
arr = np.fft.ifft(avt_kxky,axis=2)
return arr
print('0')
naky = center_nc.dimensions['ky']
nakxl = left_nc.dimensions['kx']
nakxc = center_nc.dimensions['kx']
nakxr = right_nc.dimensions['kx']
ky = np.copy(center_nc.variables['ky'][:])
kxc = np.copy(center_nc.variables['kx'][:])
t = np.copy(center_nc.variables['t'][:])
nt = t.size
Lxc = 2.*np.pi/kxc[1]
dxc = Lxc / nakxc
zed = np.copy(center_nc.variables['zed'][:])
nzed = zed.size
omp = ((nzed+1)/2) - 1
delzed = zed[1]-zed[0]
radgrid = np.copy(center_nc.variables['rad_grid'][:])
fac = 2*np.ones(naky)
fac[0] = 1
jacobl = np.copy( left_nc.variables['jacob'][:])
jacobc = np.copy(center_nc.variables['jacob'][:])
jacobr = np.copy( right_nc.variables['jacob'][:])
# nalpha nzed
print('1')
dl_over_bl = np.squeeze(delzed*jacobl)
dl_over_bc = np.squeeze(delzed*jacobc)
dl_over_br = np.squeeze(delzed*jacobr)
dl_over_bl[nzed-1] = 0.0
dl_over_bc[nzed-1] = 0.0
dl_over_br[nzed-1] = 0.0
dl_over_bl = dl_over_bl/sum(dl_over_bl)
dl_over_bc = dl_over_bc/sum(dl_over_bc)
dl_over_br = dl_over_br/sum(dl_over_br)
dobl = np.transpose(np.matlib.tile(dl_over_bl,(naky,nakxl,1)))
dobc = np.transpose(np.matlib.tile(dl_over_bc,(naky,nakxc,1)))
dobr = np.transpose(np.matlib.tile(dl_over_br,(naky,nakxr,1)))
print('2')
# t spec x
pfluxl = np.copy( left_nc.variables['pflux_x'][:])
pfluxc = np.copy(center_nc.variables['pflux_x'][:])
pfluxr = np.copy( right_nc.variables['pflux_x'][:])
vfluxl = np.copy( left_nc.variables['vflux_x'][:])
vfluxc = np.copy(center_nc.variables['vflux_x'][:])
vfluxr = np.copy( right_nc.variables['vflux_x'][:])
qfluxl = np.copy( left_nc.variables['qflux_x'][:])
qfluxc = np.copy(center_nc.variables['qflux_x'][:])
qfluxr = np.copy( right_nc.variables['qflux_x'][:])
print('3')
densl_xky = mom_vs_t_to_x(left_nc,'density',naky,nakxc)
uparl_xky = mom_vs_t_to_x(left_nc,'upar',naky,nakxc)
templ_xky = mom_vs_t_to_x(left_nc,'temperature',naky,nakxc)
densc_xky = mom_vs_t_to_x(center_nc,'density',naky,nakxc)
uparc_xky = mom_vs_t_to_x(center_nc,'upar',naky,nakxc)
tempc_xky = mom_vs_t_to_x(center_nc,'temperature',naky,nakxc)
densr_xky = mom_vs_t_to_x(right_nc,'density',naky,nakxc)
uparr_xky = mom_vs_t_to_x(right_nc,'upar',naky,nakxc)
tempr_xky = mom_vs_t_to_x(right_nc,'temperature',naky,nakxc)
dens_zf = np.real(np.sum(dobc[:,:,0]*densc_xky[:,:,:,0],1))
upar_zf = np.real(np.sum(dobc[:,:,0]*uparc_xky[:,:,:,0],1))
temp_zf = np.real(np.sum(dobc[:,:,0]*tempc_xky[:,:,:,0],1))
densl_zf = np.real(np.sum(dobl[:,:,0]*densl_xky[:,:,:,0],1))
uparl_zf = np.real(np.sum(dobl[:,:,0]*uparl_xky[:,:,:,0],1))
templ_zf = np.real(np.sum(dobl[:,:,0]*templ_xky[:,:,:,0],1))
densr_zf = np.real(np.sum(dobr[:,:,0]*densr_xky[:,:,:,0],1))
uparr_zf = np.real(np.sum(dobr[:,:,0]*uparr_xky[:,:,:,0],1))
tempr_zf = np.real(np.sum(dobr[:,:,0]*tempr_xky[:,:,:,0],1))
dens_zero = np.mean(dens_zf,1)
upar_zero = np.mean(upar_zf,1)
temp_zero = np.mean(temp_zf,1)
print('4')
cout = open(basedir + 'left.fluxes_t','w')
cout.write('[1] t ')
cout.write('[2] x ')
cout.write('[3] flux_d')
cout.write('[4] flux_u')
cout.write('[5] flux_t')
cout.write('[6] dens ')
cout.write('[7] upar ')
cout.write('[8] temp ')
cout.write('\n')
print('5')
for i in range (0, nt):
for j in range (0, nakxl):
cout.write('%e ' % t[i])
cout.write('%e ' % (dxc*j))
cout.write('%e ' % pfluxl[i,0,j])
cout.write('%e ' % vfluxl[i,0,j])
cout.write('%e ' % qfluxl[i,0,j])
cout.write('%e ' % densl_zf[i,j])
cout.write('%e ' % uparl_zf[i,j])
cout.write('%e ' % templ_zf[i,j])
cout.write('\n')
cout.write('\n')
cout.close()
cout = open(basedir + 'center.fluxes_t','w')
cout.write('[1] t ')
cout.write('[2] x ')
cout.write('[3] x simp')
cout.write('[4] r ')
cout.write('[5] flux_d')
cout.write('[6] flux_u')
cout.write('[7] flux_t')
cout.write('[8] dens ')
cout.write('[9] upar ')
cout.write('[10] temp')
cout.write('\n')
print('6')
for i in range (0, nt):
for j in range (0, nakxc):
cout.write('%e ' % t[i])
cout.write('%e ' % radgrid[j,0])
cout.write('%e ' % (dxc*j))
cout.write('%e ' % radgrid[j,2])
cout.write('%e ' % pfluxc[i,0,j])
cout.write('%e ' % vfluxc[i,0,j])
cout.write('%e ' % qfluxc[i,0,j])
cout.write('%e ' % dens_zf[i,j])
cout.write('%e ' % upar_zf[i,j])
cout.write('%e ' % temp_zf[i,j])
cout.write('\n')
cout.write('\n')
cout.close()
cout = open(basedir + 'right.fluxes_t','w')
cout.write('[1] t ')
cout.write('[2] x ')
cout.write('[3] flux_d')
cout.write('[4] flux_u')
cout.write('[5] flux_t')
cout.write('[6] dens ')
cout.write('[7] upar ')
cout.write('[8] temp ')
cout.write('\n')
print('7')
for i in range (0, nt):
for j in range (0, nakxr):
cout.write('%e ' % t[i])
cout.write('%e ' % (dxc*j))
cout.write('%e ' % pfluxr[i,0,j])
cout.write('%e ' % vfluxr[i,0,j])
cout.write('%e ' % qfluxr[i,0,j])
cout.write('%e ' % densr_zf[i,j])
cout.write('%e ' % uparr_zf[i,j])
cout.write('%e ' % tempr_zf[i,j])
cout.write('\n')
cout.write('\n')
cout.close()
tind=nt-1
for i in range (0, nt):
if(t[i]> tave):
tind = i
break
print(str(tind) + ' ' + str(nt))
print('8')
plave = np.mean(pfluxl[tind:nt,0,:],0)
vlave = np.mean(vfluxl[tind:nt,0,:],0)
qlave = np.mean(qfluxl[tind:nt,0,:],0)
pcave = np.mean(pfluxc[tind:nt,0,:],0)
vcave = np.mean(vfluxc[tind:nt,0,:],0)
qcave = np.mean(qfluxc[tind:nt,0,:],0)
dlave = np.mean(densl_zf[tind:nt,:],0)
ulave = np.mean(uparl_zf[tind:nt,:],0)
tlave = np.mean(templ_zf[tind:nt,:],0)
dcave = np.mean(dens_zf[tind:nt,:],0)
ucave = np.mean(upar_zf[tind:nt,:],0)
tcave = np.mean(temp_zf[tind:nt,:],0)
drave = np.mean(densr_zf[tind:nt,:],0)
urave = np.mean(uparr_zf[tind:nt,:],0)
trave = np.mean(tempr_zf[tind:nt,:],0)
prave = np.mean(pfluxr[tind:nt,0,:],0)
vrave = np.mean(vfluxr[tind:nt,0,:],0)
qrave = np.mean(qfluxr[tind:nt,0,:],0)
print('9')
cout = open(basedir + 'center.prof_ave','w')
cout.write('#Average from t=' + str(t[tind])+ ' to t=' + str(t[nt-1]) + '\n')
cout.write('#')
cout.write('[1] x ')
cout.write('[2] x simp ')
cout.write('[3] r ')
cout.write('[4] pflux ')
cout.write('[5] vflux ')
cout.write('[6] qflux ')
cout.write('[7] dens ')
cout.write('[8] upar ')
cout.write('[9] temp ')
cout.write('\n')
for i in range (0, nakxc):
cout.write('%e ' % radgrid[i,0])
cout.write('%e ' % (dxc*i))
cout.write('%e ' % radgrid[i,2])
cout.write('%e ' % pcave[i])
cout.write('%e ' % vcave[i])
cout.write('%e ' % qcave[i])
cout.write('%e ' % dcave[i])
cout.write('%e ' % ucave[i])
cout.write('%e ' % tcave[i])
cout.write('\n')
cout.close()
cout = open(basedir + 'left.prof_ave','w')
cout.write('#Average from t=' + str(t[tind])+ ' to t=' + str(t[nt-1]) + '\n')
cout.write('#')
cout.write('[1] x ')
cout.write('[2] pflux ')
cout.write('[3] vflux ')
cout.write('[4] qflux ')
cout.write('[5] dens ')
cout.write('[7] upar ')
cout.write('[8] temp ')
cout.write('\n')
for i in range (0, nakxl):
cout.write('%e ' % (dxc*i-0.5*Lxc))
cout.write('%e ' % plave[i])
cout.write('%e ' % vlave[i])
cout.write('%e ' % qlave[i])
cout.write('%e ' % dlave[i])
cout.write('%e ' % ulave[i])
cout.write('%e ' % tlave[i])
cout.write('\n')
cout.close()
cout.close()
cout = open(basedir + 'right.prof_ave','w')
cout.write('#Average from t=' + str(t[tind])+ ' to t=' + str(t[nt-1]) + '\n')
cout.write('#')
cout.write('[1] x ')
cout.write('[2] pflux ')
cout.write('[3] vflux ')
cout.write('[4] qflux ')
cout.write('[5] dens ')
cout.write('[7] upar ')
cout.write('[8] temp ')
cout.write('\n')
for i in range (0, nakxr):
cout.write('%e ' % (dxc*i-0.5*Lxc))
cout.write('%e ' % prave[i])
cout.write('%e ' % vrave[i])
cout.write('%e ' % qrave[i])
cout.write('%e ' % drave[i])
cout.write('%e ' % urave[i])
cout.write('%e ' % trave[i])
cout.write('\n')
cout.close()
cout = open(basedir + 'center.zero_mode','w')
cout.write('[1] t ')
cout.write('[2] dens ')
cout.write('[3] upar ')
cout.write('[4] temp ')
cout.write('\n')
print('4')
for i in range (0, nt):
cout.write('%e ' % t[i])
cout.write('%e ' % dens_zero[i])
cout.write('%e ' % upar_zero[i])
cout.write('%e ' % temp_zero[i])
cout.write('\n')
cout.close()
exit()
|
nilq/baby-python
|
python
|
"""Data structures supporting the who wrote this news crawler.
----
Copyright 2019 Data Driven Empathy LLC
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
class Article:
"""Data structure describing an article."""
def __init__(self, source, source_feed, title, description, publish_date,
crawl_date, link, author):
"""Create a new article record.
Args:
source: The name of the agency that published this article.
source_feed: The name for the RSS feed, helping disambiguate if there are many.
title: The title for the article.
description: The description for the article.
publish_date: The datetime.datetime for when this article was published.
crawl_date: The datetime.datetime for when this article was crawled.
link: URL where the full article can be found.
author: The author of the article.
"""
self.__source = source
self.__source_feed = source_feed
self.__title = title
self.__description = description
self.__publish_date = publish_date
self.__crawl_date = crawl_date
self.__link = link
self.__author = author
def get_source(self):
"""Get the name of the agency that published this article.
Returns:
The name of the agency that published this article like NPR.
"""
return self.__source
def get_source_feed(self):
"""Get the bame for the RSS feed in which this application was found.
Returns:
The string name for the RSS feed, helping disambiguate if there are many.
"""
return self.__source_feed
def get_title(self):
"""Get the text of the article title.
Returns:
The title for the article as a string.
"""
return self.__title
def get_description(self):
"""Get the description contents for this article.
Returns:
The description for the article as a string.
"""
return self.__description
def get_publish_date(self):
"""Get the datetime for when this article was published.
Returns:
The datetime.datetime for when this article was published.
"""
return self.__publish_date
def get_crawl_date(self):
"""Get the datetime for when this article was crawled.
Returns:
The datetime.datetime for when this article was crawled.
"""
return self.__crawl_date
def get_link(self):
"""Get the URL at which the full article can be found.
Returns:
URL where the full article can be found.
"""
return self.__link
def get_author(self):
"""Get the name of the author if provided.
Returns:
The author of the article as a string. None if no author given.
"""
return self.__author
|
nilq/baby-python
|
python
|
""" Module for controlling motors. """
__all__ = [
"stepper",
]
|
nilq/baby-python
|
python
|
from rest_framework import serializers
from .models import CrashCourse, CourseChapter, ChapterSection
class CrashCourseSerializer(serializers.ModelSerializer):
no_of_chapter = serializers.SerializerMethodField()
class Meta:
model = CrashCourse
fields = ('id', 'title', 'slug', 'no_of_chapter')
def get_no_of_chapter(self, obj):
return obj.coursechapter_set.count()
class CourseChapterSerializer(serializers.ModelSerializer):
class Meta:
model = CourseChapter
fields = ('id', 'title', 'slug', 'course')
class ChapterSectionListSerializer(serializers.ModelSerializer):
class Meta:
model = ChapterSection
fields = ('id', 'title', 'slug', )
class ChapterSectionDetailSerializer(serializers.ModelSerializer):
class Meta:
model = ChapterSection
fields = ('id', 'title', 'slug', 'description')
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# Test whether a PUBLISH to a topic with QoS 2 results in the correct packet flow.
from mosq_test_helper import *
rc = 1
keepalive = 60
connect_packet = mosq_test.gen_connect("test-helper", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
mid = 128
publish_packet = mosq_test.gen_publish("qos1/timeout/test", qos=1, mid=mid, payload="timeout-message")
puback_packet = mosq_test.gen_puback(mid)
sock = mosq_test.do_client_connect(connect_packet, connack_packet, connack_error="helper connack")
mosq_test.do_send_receive(sock, publish_packet, puback_packet, "helper puback")
rc = 0
sock.close()
exit(rc)
|
nilq/baby-python
|
python
|
import time
class Logger:
def __init__(self) -> None:
pass
def log(self, message: str) -> None:
print(f'{time.ctime()}: {message}')
|
nilq/baby-python
|
python
|
# coding: utf-8
from enum import Enum
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
from bitmovin_api_sdk.models.av1_adaptive_quant_mode import Av1AdaptiveQuantMode
from bitmovin_api_sdk.models.av1_key_placement_mode import Av1KeyPlacementMode
from bitmovin_api_sdk.models.color_config import ColorConfig
from bitmovin_api_sdk.models.display_aspect_ratio import DisplayAspectRatio
from bitmovin_api_sdk.models.encoding_mode import EncodingMode
from bitmovin_api_sdk.models.pixel_format import PixelFormat
from bitmovin_api_sdk.models.video_configuration import VideoConfiguration
import pprint
import six
class Av1VideoConfiguration(VideoConfiguration):
@poscheck_model
def __init__(self,
id_=None,
name=None,
description=None,
created_at=None,
modified_at=None,
custom_data=None,
width=None,
height=None,
bitrate=None,
rate=None,
pixel_format=None,
color_config=None,
sample_aspect_ratio_numerator=None,
sample_aspect_ratio_denominator=None,
display_aspect_ratio=None,
encoding_mode=None,
key_placement_mode=None,
adaptive_quant_mode=None,
lag_in_frames=None,
min_q=None,
max_q=None,
undershoot_pct=None,
overshoot_pct=None,
client_buffer_size=None,
client_initial_buffer_size=None,
client_optimal_buffer_size=None,
tile_columns=None,
tile_rows=None,
is_automatic_alt_ref_frames_enabled=None,
arnr_max_frames=None,
arnr_strength=None,
max_intra_rate=None,
is_lossless=None,
is_frame_parallel=None,
sharpness=None,
is_frame_boost_enabled=None,
noise_sensitivity=None,
min_gf_interval=None,
max_gf_interval=None,
num_tile_groups=None,
mtu_size=None):
# type: (string_types, string_types, string_types, datetime, datetime, dict, int, int, int, float, PixelFormat, ColorConfig, int, int, DisplayAspectRatio, EncodingMode, Av1KeyPlacementMode, Av1AdaptiveQuantMode, int, int, int, int, int, int, int, int, int, int, bool, int, int, int, bool, bool, int, bool, bool, int, int, int, int) -> None
super(Av1VideoConfiguration, self).__init__(id_=id_, name=name, description=description, created_at=created_at, modified_at=modified_at, custom_data=custom_data, width=width, height=height, bitrate=bitrate, rate=rate, pixel_format=pixel_format, color_config=color_config, sample_aspect_ratio_numerator=sample_aspect_ratio_numerator, sample_aspect_ratio_denominator=sample_aspect_ratio_denominator, display_aspect_ratio=display_aspect_ratio, encoding_mode=encoding_mode)
self._key_placement_mode = None
self._adaptive_quant_mode = None
self._lag_in_frames = None
self._min_q = None
self._max_q = None
self._undershoot_pct = None
self._overshoot_pct = None
self._client_buffer_size = None
self._client_initial_buffer_size = None
self._client_optimal_buffer_size = None
self._tile_columns = None
self._tile_rows = None
self._is_automatic_alt_ref_frames_enabled = None
self._arnr_max_frames = None
self._arnr_strength = None
self._max_intra_rate = None
self._is_lossless = None
self._is_frame_parallel = None
self._sharpness = None
self._is_frame_boost_enabled = None
self._noise_sensitivity = None
self._min_gf_interval = None
self._max_gf_interval = None
self._num_tile_groups = None
self._mtu_size = None
self.discriminator = None
if key_placement_mode is not None:
self.key_placement_mode = key_placement_mode
if adaptive_quant_mode is not None:
self.adaptive_quant_mode = adaptive_quant_mode
if lag_in_frames is not None:
self.lag_in_frames = lag_in_frames
if min_q is not None:
self.min_q = min_q
if max_q is not None:
self.max_q = max_q
if undershoot_pct is not None:
self.undershoot_pct = undershoot_pct
if overshoot_pct is not None:
self.overshoot_pct = overshoot_pct
if client_buffer_size is not None:
self.client_buffer_size = client_buffer_size
if client_initial_buffer_size is not None:
self.client_initial_buffer_size = client_initial_buffer_size
if client_optimal_buffer_size is not None:
self.client_optimal_buffer_size = client_optimal_buffer_size
if tile_columns is not None:
self.tile_columns = tile_columns
if tile_rows is not None:
self.tile_rows = tile_rows
if is_automatic_alt_ref_frames_enabled is not None:
self.is_automatic_alt_ref_frames_enabled = is_automatic_alt_ref_frames_enabled
if arnr_max_frames is not None:
self.arnr_max_frames = arnr_max_frames
if arnr_strength is not None:
self.arnr_strength = arnr_strength
if max_intra_rate is not None:
self.max_intra_rate = max_intra_rate
if is_lossless is not None:
self.is_lossless = is_lossless
if is_frame_parallel is not None:
self.is_frame_parallel = is_frame_parallel
if sharpness is not None:
self.sharpness = sharpness
if is_frame_boost_enabled is not None:
self.is_frame_boost_enabled = is_frame_boost_enabled
if noise_sensitivity is not None:
self.noise_sensitivity = noise_sensitivity
if min_gf_interval is not None:
self.min_gf_interval = min_gf_interval
if max_gf_interval is not None:
self.max_gf_interval = max_gf_interval
if num_tile_groups is not None:
self.num_tile_groups = num_tile_groups
if mtu_size is not None:
self.mtu_size = mtu_size
@property
def openapi_types(self):
types = {}
if hasattr(super(Av1VideoConfiguration, self), 'openapi_types'):
types = getattr(super(Av1VideoConfiguration, self), 'openapi_types')
types.update({
'key_placement_mode': 'Av1KeyPlacementMode',
'adaptive_quant_mode': 'Av1AdaptiveQuantMode',
'lag_in_frames': 'int',
'min_q': 'int',
'max_q': 'int',
'undershoot_pct': 'int',
'overshoot_pct': 'int',
'client_buffer_size': 'int',
'client_initial_buffer_size': 'int',
'client_optimal_buffer_size': 'int',
'tile_columns': 'int',
'tile_rows': 'int',
'is_automatic_alt_ref_frames_enabled': 'bool',
'arnr_max_frames': 'int',
'arnr_strength': 'int',
'max_intra_rate': 'int',
'is_lossless': 'bool',
'is_frame_parallel': 'bool',
'sharpness': 'int',
'is_frame_boost_enabled': 'bool',
'noise_sensitivity': 'bool',
'min_gf_interval': 'int',
'max_gf_interval': 'int',
'num_tile_groups': 'int',
'mtu_size': 'int'
})
return types
@property
def attribute_map(self):
attributes = {}
if hasattr(super(Av1VideoConfiguration, self), 'attribute_map'):
attributes = getattr(super(Av1VideoConfiguration, self), 'attribute_map')
attributes.update({
'key_placement_mode': 'keyPlacementMode',
'adaptive_quant_mode': 'adaptiveQuantMode',
'lag_in_frames': 'lagInFrames',
'min_q': 'minQ',
'max_q': 'maxQ',
'undershoot_pct': 'undershootPct',
'overshoot_pct': 'overshootPct',
'client_buffer_size': 'clientBufferSize',
'client_initial_buffer_size': 'clientInitialBufferSize',
'client_optimal_buffer_size': 'clientOptimalBufferSize',
'tile_columns': 'tileColumns',
'tile_rows': 'tileRows',
'is_automatic_alt_ref_frames_enabled': 'isAutomaticAltRefFramesEnabled',
'arnr_max_frames': 'arnrMaxFrames',
'arnr_strength': 'arnrStrength',
'max_intra_rate': 'maxIntraRate',
'is_lossless': 'isLossless',
'is_frame_parallel': 'isFrameParallel',
'sharpness': 'sharpness',
'is_frame_boost_enabled': 'isFrameBoostEnabled',
'noise_sensitivity': 'noiseSensitivity',
'min_gf_interval': 'minGfInterval',
'max_gf_interval': 'maxGfInterval',
'num_tile_groups': 'numTileGroups',
'mtu_size': 'mtuSize'
})
return attributes
@property
def key_placement_mode(self):
# type: () -> Av1KeyPlacementMode
"""Gets the key_placement_mode of this Av1VideoConfiguration.
:return: The key_placement_mode of this Av1VideoConfiguration.
:rtype: Av1KeyPlacementMode
"""
return self._key_placement_mode
@key_placement_mode.setter
def key_placement_mode(self, key_placement_mode):
# type: (Av1KeyPlacementMode) -> None
"""Sets the key_placement_mode of this Av1VideoConfiguration.
:param key_placement_mode: The key_placement_mode of this Av1VideoConfiguration.
:type: Av1KeyPlacementMode
"""
if key_placement_mode is not None:
if not isinstance(key_placement_mode, Av1KeyPlacementMode):
raise TypeError("Invalid type for `key_placement_mode`, type has to be `Av1KeyPlacementMode`")
self._key_placement_mode = key_placement_mode
@property
def adaptive_quant_mode(self):
# type: () -> Av1AdaptiveQuantMode
"""Gets the adaptive_quant_mode of this Av1VideoConfiguration.
:return: The adaptive_quant_mode of this Av1VideoConfiguration.
:rtype: Av1AdaptiveQuantMode
"""
return self._adaptive_quant_mode
@adaptive_quant_mode.setter
def adaptive_quant_mode(self, adaptive_quant_mode):
# type: (Av1AdaptiveQuantMode) -> None
"""Sets the adaptive_quant_mode of this Av1VideoConfiguration.
:param adaptive_quant_mode: The adaptive_quant_mode of this Av1VideoConfiguration.
:type: Av1AdaptiveQuantMode
"""
if adaptive_quant_mode is not None:
if not isinstance(adaptive_quant_mode, Av1AdaptiveQuantMode):
raise TypeError("Invalid type for `adaptive_quant_mode`, type has to be `Av1AdaptiveQuantMode`")
self._adaptive_quant_mode = adaptive_quant_mode
@property
def lag_in_frames(self):
# type: () -> int
"""Gets the lag_in_frames of this Av1VideoConfiguration.
Number of frames to look ahead for alternate reference frame selection
:return: The lag_in_frames of this Av1VideoConfiguration.
:rtype: int
"""
return self._lag_in_frames
@lag_in_frames.setter
def lag_in_frames(self, lag_in_frames):
# type: (int) -> None
"""Sets the lag_in_frames of this Av1VideoConfiguration.
Number of frames to look ahead for alternate reference frame selection
:param lag_in_frames: The lag_in_frames of this Av1VideoConfiguration.
:type: int
"""
if lag_in_frames is not None:
if lag_in_frames is not None and lag_in_frames > 25:
raise ValueError("Invalid value for `lag_in_frames`, must be a value less than or equal to `25`")
if lag_in_frames is not None and lag_in_frames < 0:
raise ValueError("Invalid value for `lag_in_frames`, must be a value greater than or equal to `0`")
if not isinstance(lag_in_frames, int):
raise TypeError("Invalid type for `lag_in_frames`, type has to be `int`")
self._lag_in_frames = lag_in_frames
@property
def min_q(self):
# type: () -> int
"""Gets the min_q of this Av1VideoConfiguration.
Minimum (best quality) quantizer
:return: The min_q of this Av1VideoConfiguration.
:rtype: int
"""
return self._min_q
@min_q.setter
def min_q(self, min_q):
# type: (int) -> None
"""Sets the min_q of this Av1VideoConfiguration.
Minimum (best quality) quantizer
:param min_q: The min_q of this Av1VideoConfiguration.
:type: int
"""
if min_q is not None:
if not isinstance(min_q, int):
raise TypeError("Invalid type for `min_q`, type has to be `int`")
self._min_q = min_q
@property
def max_q(self):
# type: () -> int
"""Gets the max_q of this Av1VideoConfiguration.
Maximum (worst quality) quantizer
:return: The max_q of this Av1VideoConfiguration.
:rtype: int
"""
return self._max_q
@max_q.setter
def max_q(self, max_q):
# type: (int) -> None
"""Sets the max_q of this Av1VideoConfiguration.
Maximum (worst quality) quantizer
:param max_q: The max_q of this Av1VideoConfiguration.
:type: int
"""
if max_q is not None:
if not isinstance(max_q, int):
raise TypeError("Invalid type for `max_q`, type has to be `int`")
self._max_q = max_q
@property
def undershoot_pct(self):
# type: () -> int
"""Gets the undershoot_pct of this Av1VideoConfiguration.
Rate control adaptation undershoot control
:return: The undershoot_pct of this Av1VideoConfiguration.
:rtype: int
"""
return self._undershoot_pct
@undershoot_pct.setter
def undershoot_pct(self, undershoot_pct):
# type: (int) -> None
"""Sets the undershoot_pct of this Av1VideoConfiguration.
Rate control adaptation undershoot control
:param undershoot_pct: The undershoot_pct of this Av1VideoConfiguration.
:type: int
"""
if undershoot_pct is not None:
if undershoot_pct is not None and undershoot_pct > 1000:
raise ValueError("Invalid value for `undershoot_pct`, must be a value less than or equal to `1000`")
if undershoot_pct is not None and undershoot_pct < 0:
raise ValueError("Invalid value for `undershoot_pct`, must be a value greater than or equal to `0`")
if not isinstance(undershoot_pct, int):
raise TypeError("Invalid type for `undershoot_pct`, type has to be `int`")
self._undershoot_pct = undershoot_pct
@property
def overshoot_pct(self):
# type: () -> int
"""Gets the overshoot_pct of this Av1VideoConfiguration.
Rate control adaptation overshoot control
:return: The overshoot_pct of this Av1VideoConfiguration.
:rtype: int
"""
return self._overshoot_pct
@overshoot_pct.setter
def overshoot_pct(self, overshoot_pct):
# type: (int) -> None
"""Sets the overshoot_pct of this Av1VideoConfiguration.
Rate control adaptation overshoot control
:param overshoot_pct: The overshoot_pct of this Av1VideoConfiguration.
:type: int
"""
if overshoot_pct is not None:
if overshoot_pct is not None and overshoot_pct > 1000:
raise ValueError("Invalid value for `overshoot_pct`, must be a value less than or equal to `1000`")
if overshoot_pct is not None and overshoot_pct < 0:
raise ValueError("Invalid value for `overshoot_pct`, must be a value greater than or equal to `0`")
if not isinstance(overshoot_pct, int):
raise TypeError("Invalid type for `overshoot_pct`, type has to be `int`")
self._overshoot_pct = overshoot_pct
@property
def client_buffer_size(self):
# type: () -> int
"""Gets the client_buffer_size of this Av1VideoConfiguration.
Decoder buffer size in milliseconds
:return: The client_buffer_size of this Av1VideoConfiguration.
:rtype: int
"""
return self._client_buffer_size
@client_buffer_size.setter
def client_buffer_size(self, client_buffer_size):
# type: (int) -> None
"""Sets the client_buffer_size of this Av1VideoConfiguration.
Decoder buffer size in milliseconds
:param client_buffer_size: The client_buffer_size of this Av1VideoConfiguration.
:type: int
"""
if client_buffer_size is not None:
if not isinstance(client_buffer_size, int):
raise TypeError("Invalid type for `client_buffer_size`, type has to be `int`")
self._client_buffer_size = client_buffer_size
@property
def client_initial_buffer_size(self):
# type: () -> int
"""Gets the client_initial_buffer_size of this Av1VideoConfiguration.
Decoder buffer initial size in milliseconds
:return: The client_initial_buffer_size of this Av1VideoConfiguration.
:rtype: int
"""
return self._client_initial_buffer_size
@client_initial_buffer_size.setter
def client_initial_buffer_size(self, client_initial_buffer_size):
# type: (int) -> None
"""Sets the client_initial_buffer_size of this Av1VideoConfiguration.
Decoder buffer initial size in milliseconds
:param client_initial_buffer_size: The client_initial_buffer_size of this Av1VideoConfiguration.
:type: int
"""
if client_initial_buffer_size is not None:
if not isinstance(client_initial_buffer_size, int):
raise TypeError("Invalid type for `client_initial_buffer_size`, type has to be `int`")
self._client_initial_buffer_size = client_initial_buffer_size
@property
def client_optimal_buffer_size(self):
# type: () -> int
"""Gets the client_optimal_buffer_size of this Av1VideoConfiguration.
Decoder buffer optimal size in milliseconds
:return: The client_optimal_buffer_size of this Av1VideoConfiguration.
:rtype: int
"""
return self._client_optimal_buffer_size
@client_optimal_buffer_size.setter
def client_optimal_buffer_size(self, client_optimal_buffer_size):
# type: (int) -> None
"""Sets the client_optimal_buffer_size of this Av1VideoConfiguration.
Decoder buffer optimal size in milliseconds
:param client_optimal_buffer_size: The client_optimal_buffer_size of this Av1VideoConfiguration.
:type: int
"""
if client_optimal_buffer_size is not None:
if not isinstance(client_optimal_buffer_size, int):
raise TypeError("Invalid type for `client_optimal_buffer_size`, type has to be `int`")
self._client_optimal_buffer_size = client_optimal_buffer_size
@property
def tile_columns(self):
# type: () -> int
"""Gets the tile_columns of this Av1VideoConfiguration.
Number of tile columns to use, log2
:return: The tile_columns of this Av1VideoConfiguration.
:rtype: int
"""
return self._tile_columns
@tile_columns.setter
def tile_columns(self, tile_columns):
# type: (int) -> None
"""Sets the tile_columns of this Av1VideoConfiguration.
Number of tile columns to use, log2
:param tile_columns: The tile_columns of this Av1VideoConfiguration.
:type: int
"""
if tile_columns is not None:
if tile_columns is not None and tile_columns > 6:
raise ValueError("Invalid value for `tile_columns`, must be a value less than or equal to `6`")
if tile_columns is not None and tile_columns < 0:
raise ValueError("Invalid value for `tile_columns`, must be a value greater than or equal to `0`")
if not isinstance(tile_columns, int):
raise TypeError("Invalid type for `tile_columns`, type has to be `int`")
self._tile_columns = tile_columns
@property
def tile_rows(self):
# type: () -> int
"""Gets the tile_rows of this Av1VideoConfiguration.
Number of tile rows to use, log2
:return: The tile_rows of this Av1VideoConfiguration.
:rtype: int
"""
return self._tile_rows
@tile_rows.setter
def tile_rows(self, tile_rows):
# type: (int) -> None
"""Sets the tile_rows of this Av1VideoConfiguration.
Number of tile rows to use, log2
:param tile_rows: The tile_rows of this Av1VideoConfiguration.
:type: int
"""
if tile_rows is not None:
if tile_rows is not None and tile_rows > 2:
raise ValueError("Invalid value for `tile_rows`, must be a value less than or equal to `2`")
if tile_rows is not None and tile_rows < 0:
raise ValueError("Invalid value for `tile_rows`, must be a value greater than or equal to `0`")
if not isinstance(tile_rows, int):
raise TypeError("Invalid type for `tile_rows`, type has to be `int`")
self._tile_rows = tile_rows
@property
def is_automatic_alt_ref_frames_enabled(self):
# type: () -> bool
"""Gets the is_automatic_alt_ref_frames_enabled of this Av1VideoConfiguration.
Enable automatic set and use alf frames
:return: The is_automatic_alt_ref_frames_enabled of this Av1VideoConfiguration.
:rtype: bool
"""
return self._is_automatic_alt_ref_frames_enabled
@is_automatic_alt_ref_frames_enabled.setter
def is_automatic_alt_ref_frames_enabled(self, is_automatic_alt_ref_frames_enabled):
# type: (bool) -> None
"""Sets the is_automatic_alt_ref_frames_enabled of this Av1VideoConfiguration.
Enable automatic set and use alf frames
:param is_automatic_alt_ref_frames_enabled: The is_automatic_alt_ref_frames_enabled of this Av1VideoConfiguration.
:type: bool
"""
if is_automatic_alt_ref_frames_enabled is not None:
if not isinstance(is_automatic_alt_ref_frames_enabled, bool):
raise TypeError("Invalid type for `is_automatic_alt_ref_frames_enabled`, type has to be `bool`")
self._is_automatic_alt_ref_frames_enabled = is_automatic_alt_ref_frames_enabled
@property
def arnr_max_frames(self):
# type: () -> int
"""Gets the arnr_max_frames of this Av1VideoConfiguration.
The max number of frames to create arf
:return: The arnr_max_frames of this Av1VideoConfiguration.
:rtype: int
"""
return self._arnr_max_frames
@arnr_max_frames.setter
def arnr_max_frames(self, arnr_max_frames):
# type: (int) -> None
"""Sets the arnr_max_frames of this Av1VideoConfiguration.
The max number of frames to create arf
:param arnr_max_frames: The arnr_max_frames of this Av1VideoConfiguration.
:type: int
"""
if arnr_max_frames is not None:
if not isinstance(arnr_max_frames, int):
raise TypeError("Invalid type for `arnr_max_frames`, type has to be `int`")
self._arnr_max_frames = arnr_max_frames
@property
def arnr_strength(self):
# type: () -> int
"""Gets the arnr_strength of this Av1VideoConfiguration.
The filter strength for the arf
:return: The arnr_strength of this Av1VideoConfiguration.
:rtype: int
"""
return self._arnr_strength
@arnr_strength.setter
def arnr_strength(self, arnr_strength):
# type: (int) -> None
"""Sets the arnr_strength of this Av1VideoConfiguration.
The filter strength for the arf
:param arnr_strength: The arnr_strength of this Av1VideoConfiguration.
:type: int
"""
if arnr_strength is not None:
if not isinstance(arnr_strength, int):
raise TypeError("Invalid type for `arnr_strength`, type has to be `int`")
self._arnr_strength = arnr_strength
@property
def max_intra_rate(self):
# type: () -> int
"""Gets the max_intra_rate of this Av1VideoConfiguration.
Maximum data rate for intra frames, expressed as a percentage of the average per-frame bitrate. Default value 0 meaning unlimited
:return: The max_intra_rate of this Av1VideoConfiguration.
:rtype: int
"""
return self._max_intra_rate
@max_intra_rate.setter
def max_intra_rate(self, max_intra_rate):
# type: (int) -> None
"""Sets the max_intra_rate of this Av1VideoConfiguration.
Maximum data rate for intra frames, expressed as a percentage of the average per-frame bitrate. Default value 0 meaning unlimited
:param max_intra_rate: The max_intra_rate of this Av1VideoConfiguration.
:type: int
"""
if max_intra_rate is not None:
if not isinstance(max_intra_rate, int):
raise TypeError("Invalid type for `max_intra_rate`, type has to be `int`")
self._max_intra_rate = max_intra_rate
@property
def is_lossless(self):
# type: () -> bool
"""Gets the is_lossless of this Av1VideoConfiguration.
Lossless encoding mode
:return: The is_lossless of this Av1VideoConfiguration.
:rtype: bool
"""
return self._is_lossless
@is_lossless.setter
def is_lossless(self, is_lossless):
# type: (bool) -> None
"""Sets the is_lossless of this Av1VideoConfiguration.
Lossless encoding mode
:param is_lossless: The is_lossless of this Av1VideoConfiguration.
:type: bool
"""
if is_lossless is not None:
if not isinstance(is_lossless, bool):
raise TypeError("Invalid type for `is_lossless`, type has to be `bool`")
self._is_lossless = is_lossless
@property
def is_frame_parallel(self):
# type: () -> bool
"""Gets the is_frame_parallel of this Av1VideoConfiguration.
Enable frame parallel decoding feature
:return: The is_frame_parallel of this Av1VideoConfiguration.
:rtype: bool
"""
return self._is_frame_parallel
@is_frame_parallel.setter
def is_frame_parallel(self, is_frame_parallel):
# type: (bool) -> None
"""Sets the is_frame_parallel of this Av1VideoConfiguration.
Enable frame parallel decoding feature
:param is_frame_parallel: The is_frame_parallel of this Av1VideoConfiguration.
:type: bool
"""
if is_frame_parallel is not None:
if not isinstance(is_frame_parallel, bool):
raise TypeError("Invalid type for `is_frame_parallel`, type has to be `bool`")
self._is_frame_parallel = is_frame_parallel
@property
def sharpness(self):
# type: () -> int
"""Gets the sharpness of this Av1VideoConfiguration.
Sets the sharpness
:return: The sharpness of this Av1VideoConfiguration.
:rtype: int
"""
return self._sharpness
@sharpness.setter
def sharpness(self, sharpness):
# type: (int) -> None
"""Sets the sharpness of this Av1VideoConfiguration.
Sets the sharpness
:param sharpness: The sharpness of this Av1VideoConfiguration.
:type: int
"""
if sharpness is not None:
if not isinstance(sharpness, int):
raise TypeError("Invalid type for `sharpness`, type has to be `int`")
self._sharpness = sharpness
@property
def is_frame_boost_enabled(self):
# type: () -> bool
"""Gets the is_frame_boost_enabled of this Av1VideoConfiguration.
Enable quality boost by lowering frame level Q periodically
:return: The is_frame_boost_enabled of this Av1VideoConfiguration.
:rtype: bool
"""
return self._is_frame_boost_enabled
@is_frame_boost_enabled.setter
def is_frame_boost_enabled(self, is_frame_boost_enabled):
# type: (bool) -> None
"""Sets the is_frame_boost_enabled of this Av1VideoConfiguration.
Enable quality boost by lowering frame level Q periodically
:param is_frame_boost_enabled: The is_frame_boost_enabled of this Av1VideoConfiguration.
:type: bool
"""
if is_frame_boost_enabled is not None:
if not isinstance(is_frame_boost_enabled, bool):
raise TypeError("Invalid type for `is_frame_boost_enabled`, type has to be `bool`")
self._is_frame_boost_enabled = is_frame_boost_enabled
@property
def noise_sensitivity(self):
# type: () -> bool
"""Gets the noise_sensitivity of this Av1VideoConfiguration.
Enable noise sensitivity on Y channel
:return: The noise_sensitivity of this Av1VideoConfiguration.
:rtype: bool
"""
return self._noise_sensitivity
@noise_sensitivity.setter
def noise_sensitivity(self, noise_sensitivity):
# type: (bool) -> None
"""Sets the noise_sensitivity of this Av1VideoConfiguration.
Enable noise sensitivity on Y channel
:param noise_sensitivity: The noise_sensitivity of this Av1VideoConfiguration.
:type: bool
"""
if noise_sensitivity is not None:
if not isinstance(noise_sensitivity, bool):
raise TypeError("Invalid type for `noise_sensitivity`, type has to be `bool`")
self._noise_sensitivity = noise_sensitivity
@property
def min_gf_interval(self):
# type: () -> int
"""Gets the min_gf_interval of this Av1VideoConfiguration.
Minimum interval between GF/ARF frames
:return: The min_gf_interval of this Av1VideoConfiguration.
:rtype: int
"""
return self._min_gf_interval
@min_gf_interval.setter
def min_gf_interval(self, min_gf_interval):
# type: (int) -> None
"""Sets the min_gf_interval of this Av1VideoConfiguration.
Minimum interval between GF/ARF frames
:param min_gf_interval: The min_gf_interval of this Av1VideoConfiguration.
:type: int
"""
if min_gf_interval is not None:
if not isinstance(min_gf_interval, int):
raise TypeError("Invalid type for `min_gf_interval`, type has to be `int`")
self._min_gf_interval = min_gf_interval
@property
def max_gf_interval(self):
# type: () -> int
"""Gets the max_gf_interval of this Av1VideoConfiguration.
Maximum interval between GF/ARF frames
:return: The max_gf_interval of this Av1VideoConfiguration.
:rtype: int
"""
return self._max_gf_interval
@max_gf_interval.setter
def max_gf_interval(self, max_gf_interval):
# type: (int) -> None
"""Sets the max_gf_interval of this Av1VideoConfiguration.
Maximum interval between GF/ARF frames
:param max_gf_interval: The max_gf_interval of this Av1VideoConfiguration.
:type: int
"""
if max_gf_interval is not None:
if not isinstance(max_gf_interval, int):
raise TypeError("Invalid type for `max_gf_interval`, type has to be `int`")
self._max_gf_interval = max_gf_interval
@property
def num_tile_groups(self):
# type: () -> int
"""Gets the num_tile_groups of this Av1VideoConfiguration.
Maximum number of tile groups
:return: The num_tile_groups of this Av1VideoConfiguration.
:rtype: int
"""
return self._num_tile_groups
@num_tile_groups.setter
def num_tile_groups(self, num_tile_groups):
# type: (int) -> None
"""Sets the num_tile_groups of this Av1VideoConfiguration.
Maximum number of tile groups
:param num_tile_groups: The num_tile_groups of this Av1VideoConfiguration.
:type: int
"""
if num_tile_groups is not None:
if not isinstance(num_tile_groups, int):
raise TypeError("Invalid type for `num_tile_groups`, type has to be `int`")
self._num_tile_groups = num_tile_groups
@property
def mtu_size(self):
# type: () -> int
"""Gets the mtu_size of this Av1VideoConfiguration.
Maximum number of bytes in a tile group
:return: The mtu_size of this Av1VideoConfiguration.
:rtype: int
"""
return self._mtu_size
@mtu_size.setter
def mtu_size(self, mtu_size):
# type: (int) -> None
"""Sets the mtu_size of this Av1VideoConfiguration.
Maximum number of bytes in a tile group
:param mtu_size: The mtu_size of this Av1VideoConfiguration.
:type: int
"""
if mtu_size is not None:
if not isinstance(mtu_size, int):
raise TypeError("Invalid type for `mtu_size`, type has to be `int`")
self._mtu_size = mtu_size
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
if hasattr(super(Av1VideoConfiguration, self), "to_dict"):
result = super(Av1VideoConfiguration, self).to_dict()
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if value is None:
continue
if isinstance(value, list):
if len(value) == 0:
continue
result[self.attribute_map.get(attr)] = [y.value if isinstance(y, Enum) else y for y in [x.to_dict() if hasattr(x, "to_dict") else x for x in value]]
elif hasattr(value, "to_dict"):
result[self.attribute_map.get(attr)] = value.to_dict()
elif isinstance(value, Enum):
result[self.attribute_map.get(attr)] = value.value
elif isinstance(value, dict):
result[self.attribute_map.get(attr)] = {k: (v.to_dict() if hasattr(v, "to_dict") else v) for (k, v) in value.items()}
else:
result[self.attribute_map.get(attr)] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Av1VideoConfiguration):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# This script takes an existing tfrecord dataset and generates a new one
# with the images resized.
# E.g.,
# python tensorflow_image_resizer.py \
# -i /path/to/imagenet-full-tfrecord/ -o /path/to/imagenet-new-tfrecord/ --subset_name train
# python tensorflow_image_resizer.py \
# -i /path/to/imagenet-full-tfrecord/ -o /path/to/imagenet-new-tfrecord/ --subset_name validation
from __future__ import print_function
from builtins import range
from multiprocessing import cpu_count
import os
import tensorflow as tf
import time
global FLAGS
class Dataset(object):
def __init__(self, name, data_dir=None):
self.name = name
if data_dir is None:
data_dir = FLAGS.data_dir
self.data_dir = data_dir
def data_files(self, subset):
tf_record_pattern = os.path.join(self.data_dir, '%s-*' % subset)
data_files = tf.gfile.Glob(tf_record_pattern)
if not data_files:
raise RuntimeError('No files found for %s dataset at %s' %
(subset, self.data_dir))
return data_files
def reader(self):
return tf.TFRecordReader()
def num_classes(self):
raise NotImplementedError
def num_examples_per_epoch(self, subset):
raise NotImplementedError
def __str__(self):
return self.name
class ImagenetData(Dataset):
def __init__(self, data_dir=None):
super(ImagenetData, self).__init__('ImageNet', data_dir)
def num_classes(self):
return 1000
def num_examples_per_epoch(self, subset):
if subset == 'train': return 1281167
elif subset == 'validation': return 50000
else: raise ValueError('Invalid data subset "%s"' % subset)
class FlowersData(Dataset):
def __init__(self, data_dir=None):
super(FlowersData, self).__init__('Flowers', data_dir)
def num_classes(self):
return 5
def num_examples_per_epoch(self, subset):
if subset == 'train': return 3170
elif subset == 'validation': return 500
else: raise ValueError('Invalid data subset "%s"' % subset)
def resize_example(example):
# Dense features in Example proto.
feature_map = {
'image/encoded': tf.FixedLenFeature([], dtype=tf.string, default_value=''),
'image/height': tf.FixedLenFeature([1], dtype=tf.int64, default_value=-1),
'image/width': tf.FixedLenFeature([1], dtype=tf.int64, default_value=-1),
'image/channels': tf.FixedLenFeature([1], dtype=tf.int64, default_value=-1),
'image/colorspace': tf.FixedLenFeature([], dtype=tf.string, default_value=''),
'image/class/label': tf.FixedLenFeature([1], dtype=tf.int64, default_value=-1),
'image/class/text': tf.FixedLenFeature([], dtype=tf.string, default_value=''),
'image/class/synset': tf.FixedLenFeature([], dtype=tf.string, default_value=''),
'image/format': tf.FixedLenFeature([], dtype=tf.string, default_value=''),
'image/filename': tf.FixedLenFeature([], dtype=tf.string, default_value=''),
}
sparse_float32 = tf.VarLenFeature(dtype=tf.float32)
# Sparse features in Example proto.
feature_map.update(
#{k: sparse_float32 for k in ['image/object/bbox/xmin',
{k: tf.VarLenFeature(dtype=tf.float32) for k in ['image/object/bbox/xmin',
'image/object/bbox/ymin',
'image/object/bbox/xmax',
'image/object/bbox/ymax']})
example = tf.parse_single_example(example, feature_map)
encoded_image = example['image/encoded']
decoded = tf.image.decode_jpeg(encoded_image, channels = 3)
#decoded = tf.Print(decoded, [tf.shape(decoded)])
if FLAGS.stretch:
# Stretch to a fixed square
new_height, new_width = FLAGS.size, FLAGS.size
else:
# Preserve aspect ratio and only resize if shorter side > FLAGS.size
shape = tf.shape(decoded)
h, w = tf.to_float(shape[0]), tf.to_float(shape[1])
min_side = tf.minimum(h, w)
scale = float(FLAGS.size) / min_side
scale = tf.minimum(scale, 1.0) # Shrink only
# HACK TESTING upscaling small images to 320
#dnscale = tf.minimum(float(FLAGS.size) / min_side, 1.0)
#upscale = tf.maximum(320. / min_side, 1.0)
#scale = dnscale * upscale
new_height = tf.cast(scale * h, tf.int32)
new_width = tf.cast(scale * w, tf.int32)
#decoded = tf.Print(decoded, [new_height, new_width])
resized_float = tf.image.resize_images(
images = decoded,
size = [new_height, new_width],
method = tf.image.ResizeMethod.BILINEAR,
align_corners = False)
#resized_float = tf.Print(resized_float, [tf.reduce_min(resized_float),
# tf.reduce_max(resized_float)])
resized_uint8 = tf.cast(resized_float, tf.uint8)
encoded_resized = tf.image.encode_jpeg(
resized_uint8,
format='rgb',
quality=FLAGS.quality,
progressive=False,
optimize_size=True,
chroma_downsampling=True,
density_unit='in')
"""
# HACK TESTING
#print 'xmin, xmax', example['image/object/bbox/xmin'], example['image/object/bbox/xmin']
#example['image/object/bbox/xmin'] = tf.Print(example['image/object/bbox/xmin'].values,
# [example['image/object/bbox/xmin'].values])
# HACK TESTING
print '*******', example['image/object/bbox/xmin'].values
bbox = tf.stack([example['image/object/bbox/%s'%x].values
for x in ['ymin', 'xmin', 'ymax', 'xmax']])
bbox = tf.transpose(tf.expand_dims(bbox, 0), [0,2,1])
encoded_resized = tf.Print(encoded_resized,
[bbox, example['image/object/bbox/xmin'].values])
"""
return [encoded_resized,
example['image/height'],
example['image/width'],
example['image/channels'],
example['image/colorspace'],
example['image/class/label'],
example['image/class/text'],
example['image/class/synset'],
example['image/format'],
example['image/filename'],
example['image/object/bbox/xmin'],
example['image/object/bbox/ymin'],
example['image/object/bbox/xmax'],
example['image/object/bbox/ymax']]
def int64_feature(value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def bytes_feature(value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
def float_feature(value):
if not isinstance(value, list):
value = [value]
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
if __name__ == "__main__":
import argparse
import glob
import sys
global FLAGS
cmdline = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
cmdline.add_argument('-i', '--input_dir', required=True)
cmdline.add_argument('-o', '--output_dir', required=True)
cmdline.add_argument('-f', '--force', action="store_true")
cmdline.add_argument('-s', '--subset_name', default='train')
cmdline.add_argument('-R', '--stretch', action="store_true")
cmdline.add_argument('-d', '--dataset_name', default=None)
cmdline.add_argument('-r', '--size', default=480, type=int)
cmdline.add_argument('-Q', '--quality', default=85, type=int)
cmdline.add_argument('--start_offset', default=0, type=int)
cmdline.add_argument('--num_preprocess_threads', default=0, type=int,
help="""Number of preprocessing threads.""")
cmdline.add_argument('--num_intra_threads', default=0, type=int,
help="""Number of threads to use for intra-op
parallelism. If set to 0, the system will pick
an appropriate number.""")
cmdline.add_argument('--num_inter_threads', default=0, type=int,
help="""Number of threads to use for inter-op
parallelism. If set to 0, the system will pick
an appropriate number.""")
FLAGS, unknown_args = cmdline.parse_known_args()
if not FLAGS.num_preprocess_threads:
FLAGS.num_preprocess_threads = cpu_count()
if FLAGS.dataset_name is None:
if "imagenet" in FLAGS.input_dir: FLAGS.dataset_name = "imagenet"
elif "flowers" in FLAGS.input_dir: FLAGS.dataset_name = "flowers"
else: raise ValueError("Could not identify name of dataset. Please specify with --data_name option.")
if FLAGS.dataset_name == "imagenet": dataset = ImagenetData(FLAGS.input_dir)
elif FLAGS.dataset_name == "flowers": dataset = FlowersData(FLAGS.input_dir)
else: raise ValueError("Unknown dataset. Must be one of imagenet or flowers.")
infiles = dataset.data_files(FLAGS.subset_name)
num_shards = len(infiles)
infiles = infiles[FLAGS.start_offset:]
num_examples = dataset.num_examples_per_epoch(FLAGS.subset_name)
examples_per_shard = (num_examples-1) // num_shards + 1
print(" num_preprocess_threads : {}\n examples_per_shard is {}\n "
"num_intra_threads is {}\n num_inter_threads is {}".format(FLAGS.num_preprocess_threads, examples_per_shard,
FLAGS.num_inter_threads, FLAGS.num_intra_threads))
config = tf.ConfigProto(
inter_op_parallelism_threads = FLAGS.num_inter_threads,
intra_op_parallelism_threads = FLAGS.num_intra_threads)
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
filename_queue = tf.train.string_input_producer(
string_tensor = infiles,
shuffle = False,
capacity = examples_per_shard * FLAGS.num_preprocess_threads,
shared_name = 'filename_queue',
name = 'filename_queue',
num_epochs = 1)
reader = tf.TFRecordReader()
_, read_op = reader.read(filename_queue)
examples_queue = tf.FIFOQueue(
capacity = 2 * examples_per_shard * FLAGS.num_preprocess_threads,
dtypes=[tf.string])
example_enqueue_op = examples_queue.enqueue(read_op)
tf.train.queue_runner.add_queue_runner(
tf.train.QueueRunner(examples_queue, [example_enqueue_op]))
example_dequeue_op = examples_queue.dequeue()
resized_batch = resize_example(example_dequeue_op)
"""
resized_example_ops = []
#output_queue = tf.FIFOQueue(
# capacity=2*examples_per_shard * FLAGS.num_preprocess_threads,
# dtypes=[tf.string])
#output_enqueue_ops = []
for t in xrange(FLAGS.num_preprocess_threads):
output = resize_example(example_dequeue_op)
resized_example_ops.append(output)
#output_enqueue_ops.append(output_queue.enqueue(output))
#output_qr = tf.train.QueueRunner(output_queue, [output_enqueue_op])
#output_dequeue_op = output_queue.dequeue()
resized_batch = tf.train.batch_join(
resized_example_ops,
batch_size = examples_per_shard,
capacity = 3 * examples_per_shard)
print resized_batch
"""
print("Initializing")
#init = tf.initialize_local_variables()
init = tf.local_variables_initializer()
sess.run(init)
coordinator = tf.train.Coordinator()
queue_threads = tf.train.start_queue_runners(sess=sess, coord=coordinator)
print("Running")
batch_num = FLAGS.start_offset
while not coordinator.should_stop():
batch_num += 1
print(batch_num)
output_filename = '%s-%05d-of-%05d' % (FLAGS.subset_name, batch_num, num_shards)
output_file = os.path.join(FLAGS.output_dir, output_filename)
if not os.path.exists(FLAGS.output_dir):
os.mkdir(FLAGS.output_dir)
if os.path.exists(output_file) and not FLAGS.force:
raise IOError("Output file already exists (pass -f to overwrite): " + output_file)
with tf.python_io.TFRecordWriter(output_file) as writer:
for i in range(examples_per_shard):
#print sess.run([t.op for t in resized_batch])
encoded_images, heights, widths, channels, colorspaces, \
labels, texts, synsets, img_format, img_filename, \
xmin, ymin, xmax, ymax = \
sess.run(resized_batch)
#output_filename = '%s-%05d-of-%05d' % (FLAGS.subset_name, batch_num, num_shards)
#output_file = os.path.join(FLAGS.output_dir, output_filename)
#with tf.python_io.TFRecordWriter(output_file) as writer:
#for rec in xrange(len(encoded_images)):
example = tf.train.Example(features=tf.train.Features(feature={
'image/encoded': bytes_feature(encoded_images),
'image/height': int64_feature(heights[0]),
'image/width': int64_feature(widths[0]),
'image/channels': int64_feature(channels[0]),
'image/colorspace': bytes_feature(colorspaces),
'image/class/label': int64_feature(labels[0]),
'image/class/text': bytes_feature(texts),
'image/class/synset': bytes_feature(synsets),
'image/format': bytes_feature(img_format),
'image/filename': bytes_feature(img_filename),
'image/object/bbox/xmin': float_feature(xmin.values.tolist()),
'image/object/bbox/ymin': float_feature(ymin.values.tolist()),
'image/object/bbox/xmax': float_feature(xmax.values.tolist()),
'image/object/bbox/ymax': float_feature(ymax.values.tolist()) }))
writer.write(example.SerializeToString())
coordinator.request_stop()
coordinator.join(queue_threads, stop_grace_period_secs=5.)
sess.close()
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.