hexsha
stringlengths 40
40
| size
int64 10
805k
| ext
stringclasses 6
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
176
| max_stars_repo_name
stringlengths 7
114
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
176
| max_issues_repo_name
stringlengths 7
114
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
48.5k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
176
| max_forks_repo_name
stringlengths 7
114
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 10
805k
| avg_line_length
float64 5.53
11k
| max_line_length
int64 10
129k
| alphanum_fraction
float64 0.13
0.93
| content_no_comment
stringlengths 0
449k
| is_comment_constant_removed
bool 2
classes | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71874ea181c3b592a78cc7466c520b1e6d8934c
| 352
|
py
|
Python
|
code/examples/MotionTrackingAccelerometer.py
|
SaschaMzH/hucon
|
830b6c5e21c2c7316c61e8afdf708066374b9b62
|
[
"BSD-3-Clause"
] | 2
|
2019-09-25T13:39:22.000Z
|
2019-09-26T10:06:13.000Z
|
code/examples/MotionTrackingAccelerometer.py
|
SaschaMzH/hucon
|
830b6c5e21c2c7316c61e8afdf708066374b9b62
|
[
"BSD-3-Clause"
] | 44
|
2019-09-25T14:35:48.000Z
|
2021-08-20T17:26:12.000Z
|
code/examples/MotionTrackingAccelerometer.py
|
SaschaMzH/hucon
|
830b6c5e21c2c7316c61e8afdf708066374b9b62
|
[
"BSD-3-Clause"
] | 8
|
2019-09-25T13:53:07.000Z
|
2022-02-24T19:23:44.000Z
|
""" Print the gyro sensor data.
Copyright (C) 2019 Basler AG
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from hucon import Mpu6050
mpu = None
print('Get the data from the accelerometer.')
mpu = Mpu6050()
print(mpu.get_accel_data())
| 19.555556
| 65
| 0.713068
|
from hucon import Mpu6050
mpu = None
print('Get the data from the accelerometer.')
mpu = Mpu6050()
print(mpu.get_accel_data())
| true
| true
|
f71874ed83499c32090bfa730a8893ccd3cb1572
| 4,292
|
py
|
Python
|
extract_feats/opensmile.py
|
ImmortalSdm/Speech-Emotion-Recognition-1
|
c5f766a0f66c77df30c6d75e86d97c27c2bbb240
|
[
"MIT"
] | 1
|
2021-03-13T09:35:54.000Z
|
2021-03-13T09:35:54.000Z
|
extract_feats/opensmile.py
|
Ulrica-ren/Speech-Emotion-Recognition-1
|
c5f766a0f66c77df30c6d75e86d97c27c2bbb240
|
[
"MIT"
] | null | null | null |
extract_feats/opensmile.py
|
Ulrica-ren/Speech-Emotion-Recognition-1
|
c5f766a0f66c77df30c6d75e86d97c27c2bbb240
|
[
"MIT"
] | 1
|
2021-03-17T10:52:26.000Z
|
2021-03-17T10:52:26.000Z
|
import os
import csv
import sys
import time
import pandas as pd
from sklearn.preprocessing import StandardScaler
from typing import Tuple
from sklearn.externals import joblib
from sklearn.model_selection import train_test_split
# 每个特征集的特征数量
FEATURE_NUM = {
'IS09_emotion': 384,
'IS10_paraling': 1582,
'IS11_speaker_state': 4368,
'IS12_speaker_trait': 6125,
'IS13_ComParE': 6373,
'ComParE_2016': 6373
}
'''
get_feature_opensmile(): Opensmile 提取一个音频的特征
输入:
config(Class)
file_path: 音频路径
输出:
该音频的特征向量
'''
def get_feature_opensmile(config, filepath: str):
# 项目路径
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
# single_feature.csv 路径
single_feat_path = os.path.join(BASE_DIR, config.feature_path, 'single_feature.csv')
# Opensmile 配置文件路径
opensmile_config_path = os.path.join(config.opensmile_path, 'config', config.opensmile_config + '.conf')
# Opensmile 命令
cmd = 'cd ' + config.opensmile_path + ' && ./SMILExtract -C ' + opensmile_config_path + ' -I ' + filepath + ' -O ' + single_feat_path
print("Opensmile cmd: ", cmd)
os.system(cmd)
reader = csv.reader(open(single_feat_path,'r'))
rows = [row for row in reader]
last_line = rows[-1]
return last_line[1: FEATURE_NUM[config.opensmile_config] + 1]
'''
load_feature(): 从 .csv 文件中加载特征数据
输入:
config(Class)
feature_path: 特征文件路径
train: 是否为训练数据
输出:
训练数据、测试数据和对应的标签
'''
def load_feature(config, feature_path: str, train: bool):
# 加载特征数据
df = pd.read_csv(feature_path)
features = [str(i) for i in range(1, FEATURE_NUM[config.opensmile_config] + 1)]
X = df.loc[:,features].values
Y = df.loc[:,'label'].values
# 标准化模型路径
scaler_path = os.path.join(config.checkpoint_path, 'SCALER_OPENSMILE.m')
if train == True:
# 标准化数据
scaler = StandardScaler().fit(X)
# 保存标准化模型
joblib.dump(scaler, scaler_path)
X = scaler.transform(X)
# 划分训练集和测试集
x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size = 0.2, random_state = 42)
return x_train, x_test, y_train, y_test
else:
# 标准化数据
# 加载标准化模型
scaler = joblib.load(scaler_path)
X = scaler.transform(X)
return X
'''
get_data():
提取所有音频的特征: 遍历所有文件夹, 读取每个文件夹中的音频, 提取每个音频的特征,把所有特征保存在 feature_path 中
输入:
config(Class)
data_path: 数据集文件夹/测试文件路径
feature_path: 保存特征的路径
train: 是否为训练数据
输出:
train = True: 训练数据、测试数据特征和对应的标签
train = False: 预测数据特征
'''
# Opensmile 提取特征
def get_data(config, data_path, feature_path: str, train: bool):
writer = csv.writer(open(feature_path, 'w'))
first_row = ['label']
for i in range(1, FEATURE_NUM[config.opensmile_config] + 1):
first_row.append(str(i))
writer.writerow(first_row)
writer = csv.writer(open(feature_path, 'a+'))
print('Opensmile extracting...')
if train == True:
cur_dir = os.getcwd()
sys.stderr.write('Curdir: %s\n' % cur_dir)
os.chdir(data_path)
# 遍历文件夹
for i, directory in enumerate(config.class_labels):
sys.stderr.write("Started reading folder %s\n" % directory)
os.chdir(directory)
# label_name = directory
label = config.class_labels.index(directory)
# 读取该文件夹下的音频
for filename in os.listdir('.'):
if not filename.endswith('wav'):
continue
filepath = os.path.join(os.getcwd(), filename)
# 提取该音频的特征
feature_vector = get_feature_opensmile(config, filepath)
feature_vector.insert(0, label)
# 把每个音频的特征整理到一个 csv 文件中
writer.writerow(feature_vector)
sys.stderr.write("Ended reading folder %s\n" % directory)
os.chdir('..')
os.chdir(cur_dir)
else:
feature_vector = get_feature_opensmile(config, data_path)
feature_vector.insert(0, '-1')
writer.writerow(feature_vector)
print('Opensmile extract done.')
# 一个玄学 bug 的暂时性解决方案
# 这里无法直接加载除了 IS10_paraling 以外的其他特征集的预测数据特征,非常玄学
if(train == True):
return load_feature(config, feature_path, train = train)
| 27.164557
| 137
| 0.635368
|
import os
import csv
import sys
import time
import pandas as pd
from sklearn.preprocessing import StandardScaler
from typing import Tuple
from sklearn.externals import joblib
from sklearn.model_selection import train_test_split
FEATURE_NUM = {
'IS09_emotion': 384,
'IS10_paraling': 1582,
'IS11_speaker_state': 4368,
'IS12_speaker_trait': 6125,
'IS13_ComParE': 6373,
'ComParE_2016': 6373
}
def get_feature_opensmile(config, filepath: str):
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
single_feat_path = os.path.join(BASE_DIR, config.feature_path, 'single_feature.csv')
opensmile_config_path = os.path.join(config.opensmile_path, 'config', config.opensmile_config + '.conf')
cmd = 'cd ' + config.opensmile_path + ' && ./SMILExtract -C ' + opensmile_config_path + ' -I ' + filepath + ' -O ' + single_feat_path
print("Opensmile cmd: ", cmd)
os.system(cmd)
reader = csv.reader(open(single_feat_path,'r'))
rows = [row for row in reader]
last_line = rows[-1]
return last_line[1: FEATURE_NUM[config.opensmile_config] + 1]
def load_feature(config, feature_path: str, train: bool):
df = pd.read_csv(feature_path)
features = [str(i) for i in range(1, FEATURE_NUM[config.opensmile_config] + 1)]
X = df.loc[:,features].values
Y = df.loc[:,'label'].values
scaler_path = os.path.join(config.checkpoint_path, 'SCALER_OPENSMILE.m')
if train == True:
scaler = StandardScaler().fit(X)
joblib.dump(scaler, scaler_path)
X = scaler.transform(X)
x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size = 0.2, random_state = 42)
return x_train, x_test, y_train, y_test
else:
scaler = joblib.load(scaler_path)
X = scaler.transform(X)
return X
def get_data(config, data_path, feature_path: str, train: bool):
writer = csv.writer(open(feature_path, 'w'))
first_row = ['label']
for i in range(1, FEATURE_NUM[config.opensmile_config] + 1):
first_row.append(str(i))
writer.writerow(first_row)
writer = csv.writer(open(feature_path, 'a+'))
print('Opensmile extracting...')
if train == True:
cur_dir = os.getcwd()
sys.stderr.write('Curdir: %s\n' % cur_dir)
os.chdir(data_path)
for i, directory in enumerate(config.class_labels):
sys.stderr.write("Started reading folder %s\n" % directory)
os.chdir(directory)
label = config.class_labels.index(directory)
for filename in os.listdir('.'):
if not filename.endswith('wav'):
continue
filepath = os.path.join(os.getcwd(), filename)
feature_vector = get_feature_opensmile(config, filepath)
feature_vector.insert(0, label)
writer.writerow(feature_vector)
sys.stderr.write("Ended reading folder %s\n" % directory)
os.chdir('..')
os.chdir(cur_dir)
else:
feature_vector = get_feature_opensmile(config, data_path)
feature_vector.insert(0, '-1')
writer.writerow(feature_vector)
print('Opensmile extract done.')
if(train == True):
return load_feature(config, feature_path, train = train)
| true
| true
|
f7187653a74f7b01dca4f137c868aa88c9f636ab
| 20,547
|
py
|
Python
|
elasticapm/base.py
|
mvas/apm-agent-python
|
f4582e90eb5308b915ca51e2e98620fc22af09ec
|
[
"BSD-3-Clause"
] | null | null | null |
elasticapm/base.py
|
mvas/apm-agent-python
|
f4582e90eb5308b915ca51e2e98620fc22af09ec
|
[
"BSD-3-Clause"
] | null | null | null |
elasticapm/base.py
|
mvas/apm-agent-python
|
f4582e90eb5308b915ca51e2e98620fc22af09ec
|
[
"BSD-3-Clause"
] | null | null | null |
"""
elasticapm.base
~~~~~~~~~~
:copyright: (c) 2011-2017 Elasticsearch
Large portions are
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import datetime
import logging
import os
import platform
import socket
import sys
import threading
import time
import zlib
from copy import deepcopy
import elasticapm
from elasticapm.conf import Config, constants
from elasticapm.traces import TransactionsStore, get_transaction
from elasticapm.transport.base import TransportException
from elasticapm.utils import compat, is_master_process
from elasticapm.utils import json_encoder as json
from elasticapm.utils import stacks, varmap
from elasticapm.utils.encoding import keyword_field, shorten, transform
from elasticapm.utils.module_import import import_string
__all__ = ('Client',)
class ClientState(object):
ONLINE = 1
ERROR = 0
def __init__(self):
self.status = self.ONLINE
self.last_check = None
self.retry_number = 0
def should_try(self):
if self.status == self.ONLINE:
return True
interval = min(self.retry_number, 6) ** 2
if time.time() - self.last_check > interval:
return True
return False
def set_fail(self):
self.status = self.ERROR
self.retry_number += 1
self.last_check = time.time()
def set_success(self):
self.status = self.ONLINE
self.last_check = None
self.retry_number = 0
def did_fail(self):
return self.status == self.ERROR
class Client(object):
"""
The base ElasticAPM client, which handles communication over the
HTTP API to the APM Server.
Will read default configuration from the environment variable
``ELASTIC_APM_APP_NAME`` and ``ELASTIC_APM_SECRET_TOKEN``
if available. ::
>>> from elasticapm import Client
>>> # Read configuration from environment
>>> client = Client()
>>> # Configure the client manually
>>> client = Client(
>>> include_paths=['my.package'],
>>> service_name='myapp',
>>> secret_token='secret_token',
>>> )
>>> # Record an exception
>>> try:
>>> 1/0
>>> except ZeroDivisionError:
>>> ident = client.capture_exception()
>>> print ("Exception caught; reference is %%s" %% ident)
"""
logger = logging.getLogger('elasticapm')
def __init__(self, config=None, **defaults):
# configure loggers first
cls = self.__class__
self.logger = logging.getLogger('%s.%s' % (cls.__module__, cls.__name__))
self.error_logger = logging.getLogger('elasticapm.errors')
self.state = ClientState()
self.instrumentation_store = None
self.processors = []
self.filter_exception_types_dict = {}
self._send_timer = None
self._transports = {}
self._service_info = None
self.config = Config(config, default_dict=defaults)
if self.config.errors:
for msg in self.config.errors.values():
self.error_logger.error(msg)
self.config.disable_send = True
return
self._transport_class = import_string(self.config.transport_class)
for exc_to_filter in (self.config.filter_exception_types or []):
exc_to_filter_type = exc_to_filter.split(".")[-1]
exc_to_filter_module = ".".join(exc_to_filter.split(".")[:-1])
self.filter_exception_types_dict[exc_to_filter_type] = exc_to_filter_module
self.processors = [import_string(p) for p in self.config.processors] if self.config.processors else []
if platform.python_implementation() == 'PyPy':
# PyPy introduces a `_functools.partial.__call__` frame due to our use
# of `partial` in AbstractInstrumentedModule
skip_modules = ('elasticapm.', '_functools')
else:
skip_modules = ('elasticapm.',)
def frames_collector_func():
return self._get_stack_info_for_trace(
stacks.iter_stack_frames(skip_top_modules=skip_modules),
library_frame_context_lines=self.config.source_lines_span_library_frames,
in_app_frame_context_lines=self.config.source_lines_span_app_frames,
with_locals=self.config.collect_local_variables in ('all', 'transactions'),
locals_processor_func=lambda local_var: varmap(lambda k, v: shorten(
v,
list_length=self.config.local_var_list_max_length,
string_length=self.config.local_var_max_length,
), local_var)
)
self.instrumentation_store = TransactionsStore(
frames_collector_func=frames_collector_func,
collect_frequency=self.config.flush_interval,
sample_rate=self.config.transaction_sample_rate,
max_spans=self.config.transaction_max_spans,
span_frames_min_duration=self.config.span_frames_min_duration_ms,
max_queue_size=self.config.max_queue_size,
ignore_patterns=self.config.transactions_ignore_patterns,
)
self.include_paths_re = stacks.get_path_regex(self.config.include_paths) if self.config.include_paths else None
self.exclude_paths_re = stacks.get_path_regex(self.config.exclude_paths) if self.config.exclude_paths else None
compat.atexit_register(self.close)
def get_handler(self, name):
return import_string(name)
def capture(self, event_type, date=None, context=None, custom=None, stack=None, handled=True, **kwargs):
"""
Captures and processes an event and pipes it off to Client.send.
"""
if event_type == 'Exception':
# never gather log stack for exceptions
stack = False
data = self._build_msg_for_logging(event_type, date=date, context=context, custom=custom, stack=stack,
handled=handled, **kwargs)
if data:
url = self.config.server_url + constants.ERROR_API_PATH
self.send(url, **data)
return data['errors'][0]['id']
def capture_message(self, message=None, param_message=None, **kwargs):
"""
Creates an event from ``message``.
>>> client.capture_message('My event just happened!')
"""
return self.capture('Message', message=message, param_message=param_message, **kwargs)
def capture_exception(self, exc_info=None, handled=True, **kwargs):
"""
Creates an event from an exception.
>>> try:
>>> exc_info = sys.exc_info()
>>> client.capture_exception(exc_info)
>>> finally:
>>> del exc_info
If exc_info is not provided, or is set to True, then this method will
perform the ``exc_info = sys.exc_info()`` and the requisite clean-up
for you.
"""
return self.capture('Exception', exc_info=exc_info, handled=handled, **kwargs)
def send(self, url, **data):
"""
Encodes and sends data to remote URL using configured transport
:param url: URL of endpoint
:param data: dictionary of data to send
"""
if self.config.disable_send or self._filter_exception_type(data):
return
payload = self.encode(data)
headers = {
'Content-Type': 'application/json',
'Content-Encoding': 'deflate',
'User-Agent': 'elasticapm-python/%s' % elasticapm.VERSION,
}
if self.config.secret_token:
headers['Authorization'] = "Bearer %s" % self.config.secret_token
if not self.state.should_try():
message = self._get_log_message(payload)
self.error_logger.error(message)
return
try:
self._send_remote(url=url, data=payload, headers=headers)
except Exception as e:
self.handle_transport_fail(exception=e)
def encode(self, data):
"""
Serializes ``data`` into a raw string.
"""
return zlib.compress(json.dumps(data).encode('utf8'))
def decode(self, data):
"""
Unserializes a string, ``data``.
"""
return json.loads(zlib.decompress(data).decode('utf8'))
def begin_transaction(self, transaction_type):
"""Register the start of a transaction on the client
"""
return self.instrumentation_store.begin_transaction(transaction_type)
def end_transaction(self, name, result=''):
transaction = self.instrumentation_store.end_transaction(result, name)
if self.instrumentation_store.should_collect():
self._collect_transactions()
if not self._send_timer:
# send first batch of data after config._wait_to_first_send
self._start_send_timer(timeout=min(self.config._wait_to_first_send, self.config.flush_interval))
return transaction
def close(self):
self._collect_transactions()
if self._send_timer:
self._stop_send_timer()
for url, transport in list(self._transports.items()):
transport.close()
self._transports.pop(url)
def handle_transport_success(self, **kwargs):
"""
Success handler called by the transport
"""
if kwargs.get('url'):
self.logger.info('Logged error at ' + kwargs['url'])
self.state.set_success()
def handle_transport_fail(self, exception=None, **kwargs):
"""
Failure handler called by the transport
"""
if isinstance(exception, TransportException):
message = self._get_log_message(exception.data)
self.error_logger.error(exception.args[0])
else:
# stdlib exception
message = str(exception)
self.error_logger.error(
'Failed to submit message: %r',
message,
exc_info=getattr(exception, 'print_trace', True)
)
self.state.set_fail()
def _collect_transactions(self):
self._stop_send_timer()
transactions = []
if self.instrumentation_store:
for transaction in self.instrumentation_store.get_all():
for processor in self.processors:
transaction = processor(self, transaction)
transactions.append(transaction)
if not transactions:
return
data = self._build_msg({
'transactions': transactions,
})
api_path = constants.TRANSACTIONS_API_PATH
self.send(self.config.server_url + api_path, **data)
self._start_send_timer()
def _start_send_timer(self, timeout=None):
timeout = timeout or self.config.flush_interval
self._send_timer = threading.Timer(timeout, self._collect_transactions)
self._send_timer.start()
def _stop_send_timer(self):
if self._send_timer and self._send_timer.is_alive() and not self._send_timer == threading.current_thread():
self._send_timer.cancel()
self._send_timer.join()
def _send_remote(self, url, data, headers=None):
if headers is None:
headers = {}
parsed = compat.urlparse.urlparse(url)
transport = self._get_transport(parsed)
if transport.async_mode:
transport.send_async(
data, headers,
success_callback=self.handle_transport_success,
fail_callback=self.handle_transport_fail
)
else:
url = transport.send(data, headers, timeout=self.config.server_timeout)
self.handle_transport_success(url=url)
def get_service_info(self):
if self._service_info:
return self._service_info
language_version = platform.python_version()
if hasattr(sys, 'pypy_version_info'):
runtime_version = '.'.join(map(str, sys.pypy_version_info[:3]))
else:
runtime_version = language_version
result = {
'name': keyword_field(self.config.service_name),
'environment': keyword_field(self.config.environment),
'version': keyword_field(self.config.service_version),
'agent': {
'name': 'python',
'version': elasticapm.VERSION,
},
'language': {
'name': 'python',
'version': keyword_field(platform.python_version()),
},
'runtime': {
'name': keyword_field(platform.python_implementation()),
'version': keyword_field(runtime_version),
}
}
if self.config.framework_name:
result['framework'] = {
'name': keyword_field(self.config.framework_name),
'version': keyword_field(self.config.framework_version),
}
self._service_info = result
return result
def get_process_info(self):
return {
'pid': os.getpid(),
'ppid': os.getppid() if hasattr(os, 'getppid') else None,
'argv': sys.argv,
'title': None, # Note: if we implement this, the value needs to be wrapped with keyword_field
}
def get_system_info(self):
return {
'hostname': keyword_field(socket.gethostname()),
'architecture': platform.machine(),
'platform': platform.system().lower(),
}
def _build_msg(self, data=None, **kwargs):
data = data or {}
data['service'] = self.get_service_info()
data['process'] = self.get_process_info()
data['system'] = self.get_system_info()
data.update(**kwargs)
return data
def _build_msg_for_logging(self, event_type, date=None, context=None, custom=None, stack=None,
handled=True, **kwargs):
"""
Captures, processes and serializes an event into a dict object
"""
transaction = get_transaction()
if transaction:
transaction_context = deepcopy(transaction.context)
else:
transaction_context = {}
event_data = {}
if custom is None:
custom = {}
if not date:
date = datetime.datetime.utcnow()
if stack is None:
stack = self.config.auto_log_stacks
if context:
transaction_context.update(context)
context = transaction_context
else:
context = transaction_context
event_data['context'] = context
if transaction and transaction.tags:
context['tags'] = deepcopy(transaction.tags)
# if '.' not in event_type:
# Assume it's a builtin
event_type = 'elasticapm.events.%s' % event_type
handler = self.get_handler(event_type)
result = handler.capture(self, **kwargs)
if self._filter_exception_type(result):
return
# data (explicit) culprit takes over auto event detection
culprit = result.pop('culprit', None)
if custom.get('culprit'):
culprit = custom.pop('culprit')
for k, v in compat.iteritems(result):
if k not in event_data:
event_data[k] = v
log = event_data.get('log', {})
if stack and 'stacktrace' not in log:
if stack is True:
frames = stacks.iter_stack_frames(skip=3)
else:
frames = stack
frames = stacks.get_stack_info(
frames,
with_locals=self.config.collect_local_variables in ('errors', 'all'),
library_frame_context_lines=self.config.source_lines_error_library_frames,
in_app_frame_context_lines=self.config.source_lines_error_app_frames,
include_paths_re=self.include_paths_re,
exclude_paths_re=self.exclude_paths_re,
locals_processor_func=lambda local_var: varmap(lambda k, v: shorten(
v,
list_length=self.config.local_var_list_max_length,
string_length=self.config.local_var_max_length,
), local_var)
)
log['stacktrace'] = frames
if 'stacktrace' in log and not culprit:
culprit = stacks.get_culprit(
log['stacktrace'],
self.config.include_paths, self.config.exclude_paths
)
if 'level' in log and isinstance(log['level'], compat.integer_types):
log['level'] = logging.getLevelName(log['level']).lower()
if log:
event_data['log'] = log
if culprit:
event_data['culprit'] = culprit
if 'custom' in context:
context['custom'].update(custom)
else:
context['custom'] = custom
# Run the data through processors
for processor in self.processors:
event_data = processor(self, event_data)
# Make sure all data is coerced
event_data = transform(event_data)
if 'exception' in event_data:
event_data['exception']['handled'] = bool(handled)
event_data.update({
'timestamp': date.strftime(constants.TIMESTAMP_FORMAT),
})
transaction = get_transaction()
if transaction:
event_data['transaction'] = {'id': transaction.id}
return self._build_msg({'errors': [event_data]})
def _filter_exception_type(self, data):
exception = data.get('exception')
if not exception:
return False
exc_type = exception.get('type')
exc_module = exception.get('module')
if exc_module == 'None':
exc_module = None
if exc_type in self.filter_exception_types_dict:
exc_to_filter_module = self.filter_exception_types_dict[exc_type]
if not exc_to_filter_module or exc_to_filter_module == exc_module:
if exc_module:
exc_name = '%s.%s' % (exc_module, exc_type)
else:
exc_name = exc_type
self.logger.info(
'Ignored %s exception due to exception type filter',
exc_name
)
return True
return False
def _get_log_message(self, data):
# decode message so we can show the actual event
try:
data = self.decode(data)
except Exception:
message = '<failed decoding data>'
else:
message = data.pop('message', '<no message value>')
return message
def _get_transport(self, parsed_url):
if hasattr(self._transport_class, 'sync_transport') and is_master_process():
# when in the master process, always use SYNC mode. This avoids
# the danger of being forked into an inconsistent threading state
self.logger.info('Sending message synchronously while in master '
'process. PID: %s', os.getpid())
return self._transport_class.sync_transport(parsed_url)
if parsed_url not in self._transports:
self._transports[parsed_url] = self._transport_class(
parsed_url, verify_server_cert=self.config.verify_server_cert
)
return self._transports[parsed_url]
def _get_stack_info_for_trace(self, frames,
library_frame_context_lines=None,
in_app_frame_context_lines=None,
with_locals=True,
locals_processor_func=None):
"""Overrideable in derived clients to add frames/info, e.g. templates"""
return stacks.get_stack_info(
frames,
library_frame_context_lines=library_frame_context_lines,
in_app_frame_context_lines=in_app_frame_context_lines,
with_locals=with_locals,
include_paths_re=self.include_paths_re,
exclude_paths_re=self.exclude_paths_re,
locals_processor_func=locals_processor_func,
)
class DummyClient(Client):
"""Sends messages into an empty void"""
def send(self, url, **kwargs):
return None
| 36.174296
| 119
| 0.609627
|
from __future__ import absolute_import
import datetime
import logging
import os
import platform
import socket
import sys
import threading
import time
import zlib
from copy import deepcopy
import elasticapm
from elasticapm.conf import Config, constants
from elasticapm.traces import TransactionsStore, get_transaction
from elasticapm.transport.base import TransportException
from elasticapm.utils import compat, is_master_process
from elasticapm.utils import json_encoder as json
from elasticapm.utils import stacks, varmap
from elasticapm.utils.encoding import keyword_field, shorten, transform
from elasticapm.utils.module_import import import_string
__all__ = ('Client',)
class ClientState(object):
ONLINE = 1
ERROR = 0
def __init__(self):
self.status = self.ONLINE
self.last_check = None
self.retry_number = 0
def should_try(self):
if self.status == self.ONLINE:
return True
interval = min(self.retry_number, 6) ** 2
if time.time() - self.last_check > interval:
return True
return False
def set_fail(self):
self.status = self.ERROR
self.retry_number += 1
self.last_check = time.time()
def set_success(self):
self.status = self.ONLINE
self.last_check = None
self.retry_number = 0
def did_fail(self):
return self.status == self.ERROR
class Client(object):
logger = logging.getLogger('elasticapm')
def __init__(self, config=None, **defaults):
cls = self.__class__
self.logger = logging.getLogger('%s.%s' % (cls.__module__, cls.__name__))
self.error_logger = logging.getLogger('elasticapm.errors')
self.state = ClientState()
self.instrumentation_store = None
self.processors = []
self.filter_exception_types_dict = {}
self._send_timer = None
self._transports = {}
self._service_info = None
self.config = Config(config, default_dict=defaults)
if self.config.errors:
for msg in self.config.errors.values():
self.error_logger.error(msg)
self.config.disable_send = True
return
self._transport_class = import_string(self.config.transport_class)
for exc_to_filter in (self.config.filter_exception_types or []):
exc_to_filter_type = exc_to_filter.split(".")[-1]
exc_to_filter_module = ".".join(exc_to_filter.split(".")[:-1])
self.filter_exception_types_dict[exc_to_filter_type] = exc_to_filter_module
self.processors = [import_string(p) for p in self.config.processors] if self.config.processors else []
if platform.python_implementation() == 'PyPy':
skip_modules = ('elasticapm.', '_functools')
else:
skip_modules = ('elasticapm.',)
def frames_collector_func():
return self._get_stack_info_for_trace(
stacks.iter_stack_frames(skip_top_modules=skip_modules),
library_frame_context_lines=self.config.source_lines_span_library_frames,
in_app_frame_context_lines=self.config.source_lines_span_app_frames,
with_locals=self.config.collect_local_variables in ('all', 'transactions'),
locals_processor_func=lambda local_var: varmap(lambda k, v: shorten(
v,
list_length=self.config.local_var_list_max_length,
string_length=self.config.local_var_max_length,
), local_var)
)
self.instrumentation_store = TransactionsStore(
frames_collector_func=frames_collector_func,
collect_frequency=self.config.flush_interval,
sample_rate=self.config.transaction_sample_rate,
max_spans=self.config.transaction_max_spans,
span_frames_min_duration=self.config.span_frames_min_duration_ms,
max_queue_size=self.config.max_queue_size,
ignore_patterns=self.config.transactions_ignore_patterns,
)
self.include_paths_re = stacks.get_path_regex(self.config.include_paths) if self.config.include_paths else None
self.exclude_paths_re = stacks.get_path_regex(self.config.exclude_paths) if self.config.exclude_paths else None
compat.atexit_register(self.close)
def get_handler(self, name):
return import_string(name)
def capture(self, event_type, date=None, context=None, custom=None, stack=None, handled=True, **kwargs):
if event_type == 'Exception':
stack = False
data = self._build_msg_for_logging(event_type, date=date, context=context, custom=custom, stack=stack,
handled=handled, **kwargs)
if data:
url = self.config.server_url + constants.ERROR_API_PATH
self.send(url, **data)
return data['errors'][0]['id']
def capture_message(self, message=None, param_message=None, **kwargs):
return self.capture('Message', message=message, param_message=param_message, **kwargs)
def capture_exception(self, exc_info=None, handled=True, **kwargs):
return self.capture('Exception', exc_info=exc_info, handled=handled, **kwargs)
def send(self, url, **data):
if self.config.disable_send or self._filter_exception_type(data):
return
payload = self.encode(data)
headers = {
'Content-Type': 'application/json',
'Content-Encoding': 'deflate',
'User-Agent': 'elasticapm-python/%s' % elasticapm.VERSION,
}
if self.config.secret_token:
headers['Authorization'] = "Bearer %s" % self.config.secret_token
if not self.state.should_try():
message = self._get_log_message(payload)
self.error_logger.error(message)
return
try:
self._send_remote(url=url, data=payload, headers=headers)
except Exception as e:
self.handle_transport_fail(exception=e)
def encode(self, data):
return zlib.compress(json.dumps(data).encode('utf8'))
def decode(self, data):
return json.loads(zlib.decompress(data).decode('utf8'))
def begin_transaction(self, transaction_type):
return self.instrumentation_store.begin_transaction(transaction_type)
def end_transaction(self, name, result=''):
transaction = self.instrumentation_store.end_transaction(result, name)
if self.instrumentation_store.should_collect():
self._collect_transactions()
if not self._send_timer:
self._start_send_timer(timeout=min(self.config._wait_to_first_send, self.config.flush_interval))
return transaction
def close(self):
self._collect_transactions()
if self._send_timer:
self._stop_send_timer()
for url, transport in list(self._transports.items()):
transport.close()
self._transports.pop(url)
def handle_transport_success(self, **kwargs):
if kwargs.get('url'):
self.logger.info('Logged error at ' + kwargs['url'])
self.state.set_success()
def handle_transport_fail(self, exception=None, **kwargs):
if isinstance(exception, TransportException):
message = self._get_log_message(exception.data)
self.error_logger.error(exception.args[0])
else:
message = str(exception)
self.error_logger.error(
'Failed to submit message: %r',
message,
exc_info=getattr(exception, 'print_trace', True)
)
self.state.set_fail()
def _collect_transactions(self):
self._stop_send_timer()
transactions = []
if self.instrumentation_store:
for transaction in self.instrumentation_store.get_all():
for processor in self.processors:
transaction = processor(self, transaction)
transactions.append(transaction)
if not transactions:
return
data = self._build_msg({
'transactions': transactions,
})
api_path = constants.TRANSACTIONS_API_PATH
self.send(self.config.server_url + api_path, **data)
self._start_send_timer()
def _start_send_timer(self, timeout=None):
timeout = timeout or self.config.flush_interval
self._send_timer = threading.Timer(timeout, self._collect_transactions)
self._send_timer.start()
def _stop_send_timer(self):
if self._send_timer and self._send_timer.is_alive() and not self._send_timer == threading.current_thread():
self._send_timer.cancel()
self._send_timer.join()
def _send_remote(self, url, data, headers=None):
if headers is None:
headers = {}
parsed = compat.urlparse.urlparse(url)
transport = self._get_transport(parsed)
if transport.async_mode:
transport.send_async(
data, headers,
success_callback=self.handle_transport_success,
fail_callback=self.handle_transport_fail
)
else:
url = transport.send(data, headers, timeout=self.config.server_timeout)
self.handle_transport_success(url=url)
def get_service_info(self):
if self._service_info:
return self._service_info
language_version = platform.python_version()
if hasattr(sys, 'pypy_version_info'):
runtime_version = '.'.join(map(str, sys.pypy_version_info[:3]))
else:
runtime_version = language_version
result = {
'name': keyword_field(self.config.service_name),
'environment': keyword_field(self.config.environment),
'version': keyword_field(self.config.service_version),
'agent': {
'name': 'python',
'version': elasticapm.VERSION,
},
'language': {
'name': 'python',
'version': keyword_field(platform.python_version()),
},
'runtime': {
'name': keyword_field(platform.python_implementation()),
'version': keyword_field(runtime_version),
}
}
if self.config.framework_name:
result['framework'] = {
'name': keyword_field(self.config.framework_name),
'version': keyword_field(self.config.framework_version),
}
self._service_info = result
return result
def get_process_info(self):
return {
'pid': os.getpid(),
'ppid': os.getppid() if hasattr(os, 'getppid') else None,
'argv': sys.argv,
'title': None,
}
def get_system_info(self):
return {
'hostname': keyword_field(socket.gethostname()),
'architecture': platform.machine(),
'platform': platform.system().lower(),
}
def _build_msg(self, data=None, **kwargs):
data = data or {}
data['service'] = self.get_service_info()
data['process'] = self.get_process_info()
data['system'] = self.get_system_info()
data.update(**kwargs)
return data
def _build_msg_for_logging(self, event_type, date=None, context=None, custom=None, stack=None,
handled=True, **kwargs):
transaction = get_transaction()
if transaction:
transaction_context = deepcopy(transaction.context)
else:
transaction_context = {}
event_data = {}
if custom is None:
custom = {}
if not date:
date = datetime.datetime.utcnow()
if stack is None:
stack = self.config.auto_log_stacks
if context:
transaction_context.update(context)
context = transaction_context
else:
context = transaction_context
event_data['context'] = context
if transaction and transaction.tags:
context['tags'] = deepcopy(transaction.tags)
event_type = 'elasticapm.events.%s' % event_type
handler = self.get_handler(event_type)
result = handler.capture(self, **kwargs)
if self._filter_exception_type(result):
return
# data (explicit) culprit takes over auto event detection
culprit = result.pop('culprit', None)
if custom.get('culprit'):
culprit = custom.pop('culprit')
for k, v in compat.iteritems(result):
if k not in event_data:
event_data[k] = v
log = event_data.get('log', {})
if stack and 'stacktrace' not in log:
if stack is True:
frames = stacks.iter_stack_frames(skip=3)
else:
frames = stack
frames = stacks.get_stack_info(
frames,
with_locals=self.config.collect_local_variables in ('errors', 'all'),
library_frame_context_lines=self.config.source_lines_error_library_frames,
in_app_frame_context_lines=self.config.source_lines_error_app_frames,
include_paths_re=self.include_paths_re,
exclude_paths_re=self.exclude_paths_re,
locals_processor_func=lambda local_var: varmap(lambda k, v: shorten(
v,
list_length=self.config.local_var_list_max_length,
string_length=self.config.local_var_max_length,
), local_var)
)
log['stacktrace'] = frames
if 'stacktrace' in log and not culprit:
culprit = stacks.get_culprit(
log['stacktrace'],
self.config.include_paths, self.config.exclude_paths
)
if 'level' in log and isinstance(log['level'], compat.integer_types):
log['level'] = logging.getLevelName(log['level']).lower()
if log:
event_data['log'] = log
if culprit:
event_data['culprit'] = culprit
if 'custom' in context:
context['custom'].update(custom)
else:
context['custom'] = custom
# Run the data through processors
for processor in self.processors:
event_data = processor(self, event_data)
# Make sure all data is coerced
event_data = transform(event_data)
if 'exception' in event_data:
event_data['exception']['handled'] = bool(handled)
event_data.update({
'timestamp': date.strftime(constants.TIMESTAMP_FORMAT),
})
transaction = get_transaction()
if transaction:
event_data['transaction'] = {'id': transaction.id}
return self._build_msg({'errors': [event_data]})
def _filter_exception_type(self, data):
exception = data.get('exception')
if not exception:
return False
exc_type = exception.get('type')
exc_module = exception.get('module')
if exc_module == 'None':
exc_module = None
if exc_type in self.filter_exception_types_dict:
exc_to_filter_module = self.filter_exception_types_dict[exc_type]
if not exc_to_filter_module or exc_to_filter_module == exc_module:
if exc_module:
exc_name = '%s.%s' % (exc_module, exc_type)
else:
exc_name = exc_type
self.logger.info(
'Ignored %s exception due to exception type filter',
exc_name
)
return True
return False
def _get_log_message(self, data):
# decode message so we can show the actual event
try:
data = self.decode(data)
except Exception:
message = '<failed decoding data>'
else:
message = data.pop('message', '<no message value>')
return message
def _get_transport(self, parsed_url):
if hasattr(self._transport_class, 'sync_transport') and is_master_process():
# when in the master process, always use SYNC mode. This avoids
# the danger of being forked into an inconsistent threading state
self.logger.info('Sending message synchronously while in master '
'process. PID: %s', os.getpid())
return self._transport_class.sync_transport(parsed_url)
if parsed_url not in self._transports:
self._transports[parsed_url] = self._transport_class(
parsed_url, verify_server_cert=self.config.verify_server_cert
)
return self._transports[parsed_url]
def _get_stack_info_for_trace(self, frames,
library_frame_context_lines=None,
in_app_frame_context_lines=None,
with_locals=True,
locals_processor_func=None):
return stacks.get_stack_info(
frames,
library_frame_context_lines=library_frame_context_lines,
in_app_frame_context_lines=in_app_frame_context_lines,
with_locals=with_locals,
include_paths_re=self.include_paths_re,
exclude_paths_re=self.exclude_paths_re,
locals_processor_func=locals_processor_func,
)
class DummyClient(Client):
def send(self, url, **kwargs):
return None
| true
| true
|
f7187739be7c1360375442537af54f3bec58e630
| 7,993
|
py
|
Python
|
YoloV3_ezhirko/detect.py
|
eva5covergence/EVA5_AI_Projects
|
7052373c52b6b9901cd0bc05a4758dd4b63f7480
|
[
"MIT"
] | null | null | null |
YoloV3_ezhirko/detect.py
|
eva5covergence/EVA5_AI_Projects
|
7052373c52b6b9901cd0bc05a4758dd4b63f7480
|
[
"MIT"
] | null | null | null |
YoloV3_ezhirko/detect.py
|
eva5covergence/EVA5_AI_Projects
|
7052373c52b6b9901cd0bc05a4758dd4b63f7480
|
[
"MIT"
] | 2
|
2021-07-25T10:24:11.000Z
|
2021-08-13T09:23:30.000Z
|
import argparse
from sys import platform
from models import * # set ONNX_EXPORT in models.py
from utils.datasets import *
from utils.utils import *
def detect(save_img=False):
img_size = (320, 192) if ONNX_EXPORT else opt.img_size # (320, 192) or (416, 256) or (608, 352) for (height, width)
out, source, weights, half, view_img, save_txt = opt.output, opt.source, opt.weights, opt.half, opt.view_img, opt.save_txt
webcam = source == '0' or source.startswith('rtsp') or source.startswith('http') or source.endswith('.txt')
# Initialize
device = torch_utils.select_device(device='cpu' if ONNX_EXPORT else opt.device)
if os.path.exists(out):
shutil.rmtree(out) # delete output folder
os.makedirs(out) # make new output folder
# Initialize model
model = Darknet(opt.cfg, img_size)
# Load weights
attempt_download(weights)
if weights.endswith('.pt'): # pytorch format
model.load_state_dict(torch.load(weights, map_location=device)['model'])
else: # darknet format
load_darknet_weights(model, weights)
# Second-stage classifier
classify = False
if classify:
modelc = torch_utils.load_classifier(name='resnet101', n=2) # initialize
modelc.load_state_dict(torch.load('weights/resnet101.pt', map_location=device)['model']) # load weights
modelc.to(device).eval()
# Eval mode
model.to(device).eval()
# Fuse Conv2d + BatchNorm2d layers
# model.fuse()
# Export mode
if ONNX_EXPORT:
model.fuse()
img = torch.zeros((1, 3) + img_size) # (1, 3, 320, 192)
f = opt.weights.replace(opt.weights.split('.')[-1], 'onnx') # *.onnx filename
torch.onnx.export(model, img, f, verbose=False, opset_version=11)
# Validate exported model
import onnx
model = onnx.load(f) # Load the ONNX model
onnx.checker.check_model(model) # Check that the IR is well formed
print(onnx.helper.printable_graph(model.graph)) # Print a human readable representation of the graph
return
# Half precision
half = half and device.type != 'cpu' # half precision only supported on CUDA
if half:
model.half()
# Set Dataloader
vid_path, vid_writer = None, None
if webcam:
view_img = True
torch.backends.cudnn.benchmark = True # set True to speed up constant image size inference
dataset = LoadStreams(source, img_size=img_size)
else:
save_img = True
dataset = LoadImages(source, img_size=img_size)
# Get names and colors
names = load_classes(opt.names)
colors = [[random.randint(0, 255) for _ in range(3)] for _ in range(len(names))]
# Run inference
t0 = time.time()
_ = model(torch.zeros((1, 3, img_size, img_size), device=device)) if device.type != 'cpu' else None # run once
for path, img, im0s, vid_cap in dataset:
img = torch.from_numpy(img).to(device)
img = img.half() if half else img.float() # uint8 to fp16/32
img /= 255.0 # 0 - 255 to 0.0 - 1.0
if img.ndimension() == 3:
img = img.unsqueeze(0)
# Inference
t1 = torch_utils.time_synchronized()
pred = model(img, augment=opt.augment)[0]
t2 = torch_utils.time_synchronized()
# to float
if half:
pred = pred.float()
# Apply NMS
pred = non_max_suppression(pred, opt.conf_thres, opt.iou_thres,
multi_label=False, classes=opt.classes, agnostic=opt.agnostic_nms)
# Apply Classifier
if classify:
pred = apply_classifier(pred, modelc, img, im0s)
# Process detections
for i, det in enumerate(pred): # detections per image
if webcam: # batch_size >= 1
p, s, im0 = path[i], '%g: ' % i, im0s[i]
else:
p, s, im0 = path, '', im0s
save_path = str(Path(out) / Path(p).name)
s += '%gx%g ' % img.shape[2:] # print string
if det is not None and len(det):
# Rescale boxes from img_size to im0 size
det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round()
# Print results
for c in det[:, -1].unique():
n = (det[:, -1] == c).sum() # detections per class
s += '%g %ss, ' % (n, names[int(c)]) # add to string
# Write results
for *xyxy, conf, cls in det:
if save_txt: # Write to file
with open(save_path + '.txt', 'a') as file:
file.write(('%g ' * 6 + '\n') % (*xyxy, cls, conf))
if save_img or view_img: # Add bbox to image
label = '%s %.2f' % (names[int(cls)], conf)
plot_one_box(xyxy, im0, label=label, color=colors[int(cls)])
# Print time (inference + NMS)
print('%sDone. (%.3fs)' % (s, t2 - t1))
# Stream results
if view_img:
cv2.imshow(p, im0)
if cv2.waitKey(1) == ord('q'): # q to quit
raise StopIteration
# Save results (image with detections)
if save_img:
if dataset.mode == 'images':
cv2.imwrite(save_path, im0)
else:
if vid_path != save_path: # new video
vid_path = save_path
if isinstance(vid_writer, cv2.VideoWriter):
vid_writer.release() # release previous video writer
fps = vid_cap.get(cv2.CAP_PROP_FPS)
w = int(vid_cap.get(cv2.CAP_PROP_FRAME_WIDTH))
h = int(vid_cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
vid_writer = cv2.VideoWriter(save_path, cv2.VideoWriter_fourcc(*opt.fourcc), fps, (w, h))
vid_writer.write(im0)
if save_txt or save_img:
print('Results saved to %s' % os.getcwd() + os.sep + out)
if platform == 'darwin': # MacOS
os.system('open ' + out + ' ' + save_path)
print('Done. (%.3fs)' % (time.time() - t0))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--cfg', type=str, default='cfg/yolov3-custom.cfg', help='*.cfg path')
parser.add_argument('--names', type=str, default='data/customdata/custom.names', help='*.names path')
parser.add_argument('--weights', type=str, default='weights/last.pt', help='weights path')
parser.add_argument('--source', type=str, default='data/customdata/images', help='source') # input file/folder, 0 for webcam
parser.add_argument('--output', type=str, default='output', help='output folder') # output folder
parser.add_argument('--img-size', type=int, default=512, help='inference size (pixels)')
parser.add_argument('--conf-thres', type=float, default=0.3, help='object confidence threshold')
parser.add_argument('--iou-thres', type=float, default=0.6, help='IOU threshold for NMS')
parser.add_argument('--fourcc', type=str, default='mp4v', help='output video codec (verify ffmpeg support)')
parser.add_argument('--half', action='store_true', help='half precision FP16 inference')
parser.add_argument('--device', default='', help='device id (i.e. 0 or 0,1) or cpu')
parser.add_argument('--view-img', action='store_true', help='display results')
parser.add_argument('--save-txt', action='store_true', help='save results to *.txt')
parser.add_argument('--classes', nargs='+', type=int, help='filter by class')
parser.add_argument('--agnostic-nms', action='store_true', help='class-agnostic NMS')
parser.add_argument('--augment', action='store_true', help='augmented inference')
opt = parser.parse_args()
print(opt)
with torch.no_grad():
detect()
| 42.743316
| 129
| 0.588515
|
import argparse
from sys import platform
from models import *
from utils.datasets import *
from utils.utils import *
def detect(save_img=False):
img_size = (320, 192) if ONNX_EXPORT else opt.img_size
out, source, weights, half, view_img, save_txt = opt.output, opt.source, opt.weights, opt.half, opt.view_img, opt.save_txt
webcam = source == '0' or source.startswith('rtsp') or source.startswith('http') or source.endswith('.txt')
device = torch_utils.select_device(device='cpu' if ONNX_EXPORT else opt.device)
if os.path.exists(out):
shutil.rmtree(out)
os.makedirs(out)
model = Darknet(opt.cfg, img_size)
attempt_download(weights)
if weights.endswith('.pt'):
model.load_state_dict(torch.load(weights, map_location=device)['model'])
else:
load_darknet_weights(model, weights)
classify = False
if classify:
modelc = torch_utils.load_classifier(name='resnet101', n=2)
modelc.load_state_dict(torch.load('weights/resnet101.pt', map_location=device)['model'])
modelc.to(device).eval()
model.to(device).eval()
if ONNX_EXPORT:
model.fuse()
img = torch.zeros((1, 3) + img_size)
f = opt.weights.replace(opt.weights.split('.')[-1], 'onnx')
torch.onnx.export(model, img, f, verbose=False, opset_version=11)
import onnx
model = onnx.load(f)
onnx.checker.check_model(model)
print(onnx.helper.printable_graph(model.graph))
return
half = half and device.type != 'cpu'
if half:
model.half()
vid_path, vid_writer = None, None
if webcam:
view_img = True
torch.backends.cudnn.benchmark = True
dataset = LoadStreams(source, img_size=img_size)
else:
save_img = True
dataset = LoadImages(source, img_size=img_size)
names = load_classes(opt.names)
colors = [[random.randint(0, 255) for _ in range(3)] for _ in range(len(names))]
t0 = time.time()
_ = model(torch.zeros((1, 3, img_size, img_size), device=device)) if device.type != 'cpu' else None
for path, img, im0s, vid_cap in dataset:
img = torch.from_numpy(img).to(device)
img = img.half() if half else img.float()
img /= 255.0
if img.ndimension() == 3:
img = img.unsqueeze(0)
t1 = torch_utils.time_synchronized()
pred = model(img, augment=opt.augment)[0]
t2 = torch_utils.time_synchronized()
if half:
pred = pred.float()
pred = non_max_suppression(pred, opt.conf_thres, opt.iou_thres,
multi_label=False, classes=opt.classes, agnostic=opt.agnostic_nms)
if classify:
pred = apply_classifier(pred, modelc, img, im0s)
for i, det in enumerate(pred):
if webcam:
p, s, im0 = path[i], '%g: ' % i, im0s[i]
else:
p, s, im0 = path, '', im0s
save_path = str(Path(out) / Path(p).name)
s += '%gx%g ' % img.shape[2:]
if det is not None and len(det):
det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round()
for c in det[:, -1].unique():
n = (det[:, -1] == c).sum()
s += '%g %ss, ' % (n, names[int(c)])
for *xyxy, conf, cls in det:
if save_txt:
with open(save_path + '.txt', 'a') as file:
file.write(('%g ' * 6 + '\n') % (*xyxy, cls, conf))
if save_img or view_img:
label = '%s %.2f' % (names[int(cls)], conf)
plot_one_box(xyxy, im0, label=label, color=colors[int(cls)])
print('%sDone. (%.3fs)' % (s, t2 - t1))
if view_img:
cv2.imshow(p, im0)
if cv2.waitKey(1) == ord('q'):
raise StopIteration
if save_img:
if dataset.mode == 'images':
cv2.imwrite(save_path, im0)
else:
if vid_path != save_path:
vid_path = save_path
if isinstance(vid_writer, cv2.VideoWriter):
vid_writer.release()
fps = vid_cap.get(cv2.CAP_PROP_FPS)
w = int(vid_cap.get(cv2.CAP_PROP_FRAME_WIDTH))
h = int(vid_cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
vid_writer = cv2.VideoWriter(save_path, cv2.VideoWriter_fourcc(*opt.fourcc), fps, (w, h))
vid_writer.write(im0)
if save_txt or save_img:
print('Results saved to %s' % os.getcwd() + os.sep + out)
if platform == 'darwin':
os.system('open ' + out + ' ' + save_path)
print('Done. (%.3fs)' % (time.time() - t0))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--cfg', type=str, default='cfg/yolov3-custom.cfg', help='*.cfg path')
parser.add_argument('--names', type=str, default='data/customdata/custom.names', help='*.names path')
parser.add_argument('--weights', type=str, default='weights/last.pt', help='weights path')
parser.add_argument('--source', type=str, default='data/customdata/images', help='source')
parser.add_argument('--output', type=str, default='output', help='output folder')
parser.add_argument('--img-size', type=int, default=512, help='inference size (pixels)')
parser.add_argument('--conf-thres', type=float, default=0.3, help='object confidence threshold')
parser.add_argument('--iou-thres', type=float, default=0.6, help='IOU threshold for NMS')
parser.add_argument('--fourcc', type=str, default='mp4v', help='output video codec (verify ffmpeg support)')
parser.add_argument('--half', action='store_true', help='half precision FP16 inference')
parser.add_argument('--device', default='', help='device id (i.e. 0 or 0,1) or cpu')
parser.add_argument('--view-img', action='store_true', help='display results')
parser.add_argument('--save-txt', action='store_true', help='save results to *.txt')
parser.add_argument('--classes', nargs='+', type=int, help='filter by class')
parser.add_argument('--agnostic-nms', action='store_true', help='class-agnostic NMS')
parser.add_argument('--augment', action='store_true', help='augmented inference')
opt = parser.parse_args()
print(opt)
with torch.no_grad():
detect()
| true
| true
|
f71877dabf3bdc4389a088d6b5ff9767e469ea5b
| 11,246
|
py
|
Python
|
pandas/tests/series/test_replace.py
|
kpflugshaupt/pandas
|
c9e3883c630c48b17218e6bcc5593720c1402bf1
|
[
"BSD-3-Clause"
] | 1
|
2019-05-19T13:44:03.000Z
|
2019-05-19T13:44:03.000Z
|
pandas/tests/series/test_replace.py
|
sanjusci/pandas
|
a1fee9199eba7ebf423880243936b9f1501d3d3a
|
[
"BSD-3-Clause"
] | null | null | null |
pandas/tests/series/test_replace.py
|
sanjusci/pandas
|
a1fee9199eba7ebf423880243936b9f1501d3d3a
|
[
"BSD-3-Clause"
] | 3
|
2018-01-08T08:40:55.000Z
|
2019-10-07T02:02:40.000Z
|
# coding=utf-8
# pylint: disable-msg=E1101,W0612
import numpy as np
import pytest
import pandas as pd
import pandas.util.testing as tm
from .common import TestData
class TestSeriesReplace(TestData):
def test_replace(self):
N = 100
ser = pd.Series(np.random.randn(N))
ser[0:4] = np.nan
ser[6:10] = 0
# replace list with a single value
ser.replace([np.nan], -1, inplace=True)
exp = ser.fillna(-1)
tm.assert_series_equal(ser, exp)
rs = ser.replace(0., np.nan)
ser[ser == 0.] = np.nan
tm.assert_series_equal(rs, ser)
ser = pd.Series(np.fabs(np.random.randn(N)), tm.makeDateIndex(N),
dtype=object)
ser[:5] = np.nan
ser[6:10] = 'foo'
ser[20:30] = 'bar'
# replace list with a single value
rs = ser.replace([np.nan, 'foo', 'bar'], -1)
assert (rs[:5] == -1).all()
assert (rs[6:10] == -1).all()
assert (rs[20:30] == -1).all()
assert (pd.isna(ser[:5])).all()
# replace with different values
rs = ser.replace({np.nan: -1, 'foo': -2, 'bar': -3})
assert (rs[:5] == -1).all()
assert (rs[6:10] == -2).all()
assert (rs[20:30] == -3).all()
assert (pd.isna(ser[:5])).all()
# replace with different values with 2 lists
rs2 = ser.replace([np.nan, 'foo', 'bar'], [-1, -2, -3])
tm.assert_series_equal(rs, rs2)
# replace inplace
ser.replace([np.nan, 'foo', 'bar'], -1, inplace=True)
assert (ser[:5] == -1).all()
assert (ser[6:10] == -1).all()
assert (ser[20:30] == -1).all()
ser = pd.Series([np.nan, 0, np.inf])
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
ser = pd.Series([np.nan, 0, 'foo', 'bar', np.inf, None, pd.NaT])
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
filled = ser.copy()
filled[4] = 0
tm.assert_series_equal(ser.replace(np.inf, 0), filled)
ser = pd.Series(self.ts.index)
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
# malformed
msg = r"Replacement lists must match in length\. Expecting 3 got 2"
with pytest.raises(ValueError, match=msg):
ser.replace([1, 2, 3], [np.nan, 0])
# make sure that we aren't just masking a TypeError because bools don't
# implement indexing
with pytest.raises(TypeError, match='Cannot compare types .+'):
ser.replace([1, 2], [np.nan, 0])
ser = pd.Series([0, 1, 2, 3, 4])
result = ser.replace([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])
tm.assert_series_equal(result, pd.Series([4, 3, 2, 1, 0]))
def test_replace_gh5319(self):
# API change from 0.12?
# GH 5319
ser = pd.Series([0, np.nan, 2, 3, 4])
expected = ser.ffill()
result = ser.replace([np.nan])
tm.assert_series_equal(result, expected)
ser = pd.Series([0, np.nan, 2, 3, 4])
expected = ser.ffill()
result = ser.replace(np.nan)
tm.assert_series_equal(result, expected)
# GH 5797
ser = pd.Series(pd.date_range('20130101', periods=5))
expected = ser.copy()
expected.loc[2] = pd.Timestamp('20120101')
result = ser.replace({pd.Timestamp('20130103'):
pd.Timestamp('20120101')})
tm.assert_series_equal(result, expected)
result = ser.replace(pd.Timestamp('20130103'),
pd.Timestamp('20120101'))
tm.assert_series_equal(result, expected)
# GH 11792: Test with replacing NaT in a list with tz data
ts = pd.Timestamp('2015/01/01', tz='UTC')
s = pd.Series([pd.NaT, pd.Timestamp('2015/01/01', tz='UTC')])
result = s.replace([np.nan, pd.NaT], pd.Timestamp.min)
expected = pd.Series([pd.Timestamp.min, ts], dtype=object)
tm.assert_series_equal(expected, result)
def test_replace_with_single_list(self):
ser = pd.Series([0, 1, 2, 3, 4])
result = ser.replace([1, 2, 3])
tm.assert_series_equal(result, pd.Series([0, 0, 0, 0, 4]))
s = ser.copy()
s.replace([1, 2, 3], inplace=True)
tm.assert_series_equal(s, pd.Series([0, 0, 0, 0, 4]))
# make sure things don't get corrupted when fillna call fails
s = ser.copy()
msg = (r"Invalid fill method\. Expecting pad \(ffill\) or backfill"
r" \(bfill\)\. Got crash_cymbal")
with pytest.raises(ValueError, match=msg):
s.replace([1, 2, 3], inplace=True, method='crash_cymbal')
tm.assert_series_equal(s, ser)
def test_replace_with_empty_list(self):
# GH 21977
s = pd.Series([[1], [2, 3], [], np.nan, [4]])
expected = s
result = s.replace([], np.nan)
tm.assert_series_equal(result, expected)
# GH 19266
with pytest.raises(ValueError, match="cannot assign mismatch"):
s.replace({np.nan: []})
with pytest.raises(ValueError, match="cannot assign mismatch"):
s.replace({np.nan: ['dummy', 'alt']})
def test_replace_mixed_types(self):
s = pd.Series(np.arange(5), dtype='int64')
def check_replace(to_rep, val, expected):
sc = s.copy()
r = s.replace(to_rep, val)
sc.replace(to_rep, val, inplace=True)
tm.assert_series_equal(expected, r)
tm.assert_series_equal(expected, sc)
# MUST upcast to float
e = pd.Series([0., 1., 2., 3., 4.])
tr, v = [3], [3.0]
check_replace(tr, v, e)
# MUST upcast to float
e = pd.Series([0, 1, 2, 3.5, 4])
tr, v = [3], [3.5]
check_replace(tr, v, e)
# casts to object
e = pd.Series([0, 1, 2, 3.5, 'a'])
tr, v = [3, 4], [3.5, 'a']
check_replace(tr, v, e)
# again casts to object
e = pd.Series([0, 1, 2, 3.5, pd.Timestamp('20130101')])
tr, v = [3, 4], [3.5, pd.Timestamp('20130101')]
check_replace(tr, v, e)
# casts to object
e = pd.Series([0, 1, 2, 3.5, True], dtype='object')
tr, v = [3, 4], [3.5, True]
check_replace(tr, v, e)
# test an object with dates + floats + integers + strings
dr = pd.date_range('1/1/2001', '1/10/2001',
freq='D').to_series().reset_index(drop=True)
result = dr.astype(object).replace(
[dr[0], dr[1], dr[2]], [1.0, 2, 'a'])
expected = pd.Series([1.0, 2, 'a'] + dr[3:].tolist(), dtype=object)
tm.assert_series_equal(result, expected)
def test_replace_bool_with_string_no_op(self):
s = pd.Series([True, False, True])
result = s.replace('fun', 'in-the-sun')
tm.assert_series_equal(s, result)
def test_replace_bool_with_string(self):
# nonexistent elements
s = pd.Series([True, False, True])
result = s.replace(True, '2u')
expected = pd.Series(['2u', False, '2u'])
tm.assert_series_equal(expected, result)
def test_replace_bool_with_bool(self):
s = pd.Series([True, False, True])
result = s.replace(True, False)
expected = pd.Series([False] * len(s))
tm.assert_series_equal(expected, result)
def test_replace_with_dict_with_bool_keys(self):
s = pd.Series([True, False, True])
with pytest.raises(TypeError, match='Cannot compare types .+'):
s.replace({'asdf': 'asdb', True: 'yes'})
def test_replace2(self):
N = 100
ser = pd.Series(np.fabs(np.random.randn(N)), tm.makeDateIndex(N),
dtype=object)
ser[:5] = np.nan
ser[6:10] = 'foo'
ser[20:30] = 'bar'
# replace list with a single value
rs = ser.replace([np.nan, 'foo', 'bar'], -1)
assert (rs[:5] == -1).all()
assert (rs[6:10] == -1).all()
assert (rs[20:30] == -1).all()
assert (pd.isna(ser[:5])).all()
# replace with different values
rs = ser.replace({np.nan: -1, 'foo': -2, 'bar': -3})
assert (rs[:5] == -1).all()
assert (rs[6:10] == -2).all()
assert (rs[20:30] == -3).all()
assert (pd.isna(ser[:5])).all()
# replace with different values with 2 lists
rs2 = ser.replace([np.nan, 'foo', 'bar'], [-1, -2, -3])
tm.assert_series_equal(rs, rs2)
# replace inplace
ser.replace([np.nan, 'foo', 'bar'], -1, inplace=True)
assert (ser[:5] == -1).all()
assert (ser[6:10] == -1).all()
assert (ser[20:30] == -1).all()
def test_replace_with_empty_dictlike(self):
# GH 15289
s = pd.Series(list('abcd'))
tm.assert_series_equal(s, s.replace(dict()))
tm.assert_series_equal(s, s.replace(pd.Series([])))
def test_replace_string_with_number(self):
# GH 15743
s = pd.Series([1, 2, 3])
result = s.replace('2', np.nan)
expected = pd.Series([1, 2, 3])
tm.assert_series_equal(expected, result)
def test_replace_replacer_equals_replacement(self):
# GH 20656
# make sure all replacers are matching against original values
s = pd.Series(['a', 'b'])
expected = pd.Series(['b', 'a'])
result = s.replace({'a': 'b', 'b': 'a'})
tm.assert_series_equal(expected, result)
def test_replace_unicode_with_number(self):
# GH 15743
s = pd.Series([1, 2, 3])
result = s.replace('2', np.nan)
expected = pd.Series([1, 2, 3])
tm.assert_series_equal(expected, result)
def test_replace_mixed_types_with_string(self):
# Testing mixed
s = pd.Series([1, 2, 3, '4', 4, 5])
result = s.replace([2, '4'], np.nan)
expected = pd.Series([1, np.nan, 3, np.nan, 4, 5])
tm.assert_series_equal(expected, result)
@pytest.mark.parametrize("categorical, numeric", [
(pd.Categorical('A', categories=['A', 'B']), [1]),
(pd.Categorical(('A', ), categories=['A', 'B']), [1]),
(pd.Categorical(('A', 'B'), categories=['A', 'B']), [1, 2]),
])
def test_replace_categorical(self, categorical, numeric):
# GH 24971
# Do not check if dtypes are equal due to a known issue that
# Categorical.replace sometimes coerces to object (GH 23305)
s = pd.Series(categorical)
result = s.replace({'A': 1, 'B': 2})
expected = pd.Series(numeric)
tm.assert_series_equal(expected, result, check_dtype=False)
def test_replace_with_no_overflowerror(self):
# GH 25616
# casts to object without Exception from OverflowError
s = pd.Series([0, 1, 2, 3, 4])
result = s.replace([3], ['100000000000000000000'])
expected = pd.Series([0, 1, 2, '100000000000000000000', 4])
tm.assert_series_equal(result, expected)
s = pd.Series([0, '100000000000000000000',
'100000000000000000001'])
result = s.replace(['100000000000000000000'], [1])
expected = pd.Series([0, 1, '100000000000000000001'])
tm.assert_series_equal(result, expected)
| 36.160772
| 79
| 0.55273
|
import numpy as np
import pytest
import pandas as pd
import pandas.util.testing as tm
from .common import TestData
class TestSeriesReplace(TestData):
def test_replace(self):
N = 100
ser = pd.Series(np.random.randn(N))
ser[0:4] = np.nan
ser[6:10] = 0
ser.replace([np.nan], -1, inplace=True)
exp = ser.fillna(-1)
tm.assert_series_equal(ser, exp)
rs = ser.replace(0., np.nan)
ser[ser == 0.] = np.nan
tm.assert_series_equal(rs, ser)
ser = pd.Series(np.fabs(np.random.randn(N)), tm.makeDateIndex(N),
dtype=object)
ser[:5] = np.nan
ser[6:10] = 'foo'
ser[20:30] = 'bar'
rs = ser.replace([np.nan, 'foo', 'bar'], -1)
assert (rs[:5] == -1).all()
assert (rs[6:10] == -1).all()
assert (rs[20:30] == -1).all()
assert (pd.isna(ser[:5])).all()
rs = ser.replace({np.nan: -1, 'foo': -2, 'bar': -3})
assert (rs[:5] == -1).all()
assert (rs[6:10] == -2).all()
assert (rs[20:30] == -3).all()
assert (pd.isna(ser[:5])).all()
rs2 = ser.replace([np.nan, 'foo', 'bar'], [-1, -2, -3])
tm.assert_series_equal(rs, rs2)
ser.replace([np.nan, 'foo', 'bar'], -1, inplace=True)
assert (ser[:5] == -1).all()
assert (ser[6:10] == -1).all()
assert (ser[20:30] == -1).all()
ser = pd.Series([np.nan, 0, np.inf])
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
ser = pd.Series([np.nan, 0, 'foo', 'bar', np.inf, None, pd.NaT])
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
filled = ser.copy()
filled[4] = 0
tm.assert_series_equal(ser.replace(np.inf, 0), filled)
ser = pd.Series(self.ts.index)
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
msg = r"Replacement lists must match in length\. Expecting 3 got 2"
with pytest.raises(ValueError, match=msg):
ser.replace([1, 2, 3], [np.nan, 0])
with pytest.raises(TypeError, match='Cannot compare types .+'):
ser.replace([1, 2], [np.nan, 0])
ser = pd.Series([0, 1, 2, 3, 4])
result = ser.replace([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])
tm.assert_series_equal(result, pd.Series([4, 3, 2, 1, 0]))
def test_replace_gh5319(self):
ser = pd.Series([0, np.nan, 2, 3, 4])
expected = ser.ffill()
result = ser.replace([np.nan])
tm.assert_series_equal(result, expected)
ser = pd.Series([0, np.nan, 2, 3, 4])
expected = ser.ffill()
result = ser.replace(np.nan)
tm.assert_series_equal(result, expected)
ser = pd.Series(pd.date_range('20130101', periods=5))
expected = ser.copy()
expected.loc[2] = pd.Timestamp('20120101')
result = ser.replace({pd.Timestamp('20130103'):
pd.Timestamp('20120101')})
tm.assert_series_equal(result, expected)
result = ser.replace(pd.Timestamp('20130103'),
pd.Timestamp('20120101'))
tm.assert_series_equal(result, expected)
ts = pd.Timestamp('2015/01/01', tz='UTC')
s = pd.Series([pd.NaT, pd.Timestamp('2015/01/01', tz='UTC')])
result = s.replace([np.nan, pd.NaT], pd.Timestamp.min)
expected = pd.Series([pd.Timestamp.min, ts], dtype=object)
tm.assert_series_equal(expected, result)
def test_replace_with_single_list(self):
ser = pd.Series([0, 1, 2, 3, 4])
result = ser.replace([1, 2, 3])
tm.assert_series_equal(result, pd.Series([0, 0, 0, 0, 4]))
s = ser.copy()
s.replace([1, 2, 3], inplace=True)
tm.assert_series_equal(s, pd.Series([0, 0, 0, 0, 4]))
s = ser.copy()
msg = (r"Invalid fill method\. Expecting pad \(ffill\) or backfill"
r" \(bfill\)\. Got crash_cymbal")
with pytest.raises(ValueError, match=msg):
s.replace([1, 2, 3], inplace=True, method='crash_cymbal')
tm.assert_series_equal(s, ser)
def test_replace_with_empty_list(self):
# GH 21977
s = pd.Series([[1], [2, 3], [], np.nan, [4]])
expected = s
result = s.replace([], np.nan)
tm.assert_series_equal(result, expected)
# GH 19266
with pytest.raises(ValueError, match="cannot assign mismatch"):
s.replace({np.nan: []})
with pytest.raises(ValueError, match="cannot assign mismatch"):
s.replace({np.nan: ['dummy', 'alt']})
def test_replace_mixed_types(self):
s = pd.Series(np.arange(5), dtype='int64')
def check_replace(to_rep, val, expected):
sc = s.copy()
r = s.replace(to_rep, val)
sc.replace(to_rep, val, inplace=True)
tm.assert_series_equal(expected, r)
tm.assert_series_equal(expected, sc)
# MUST upcast to float
e = pd.Series([0., 1., 2., 3., 4.])
tr, v = [3], [3.0]
check_replace(tr, v, e)
# MUST upcast to float
e = pd.Series([0, 1, 2, 3.5, 4])
tr, v = [3], [3.5]
check_replace(tr, v, e)
# casts to object
e = pd.Series([0, 1, 2, 3.5, 'a'])
tr, v = [3, 4], [3.5, 'a']
check_replace(tr, v, e)
# again casts to object
e = pd.Series([0, 1, 2, 3.5, pd.Timestamp('20130101')])
tr, v = [3, 4], [3.5, pd.Timestamp('20130101')]
check_replace(tr, v, e)
# casts to object
e = pd.Series([0, 1, 2, 3.5, True], dtype='object')
tr, v = [3, 4], [3.5, True]
check_replace(tr, v, e)
# test an object with dates + floats + integers + strings
dr = pd.date_range('1/1/2001', '1/10/2001',
freq='D').to_series().reset_index(drop=True)
result = dr.astype(object).replace(
[dr[0], dr[1], dr[2]], [1.0, 2, 'a'])
expected = pd.Series([1.0, 2, 'a'] + dr[3:].tolist(), dtype=object)
tm.assert_series_equal(result, expected)
def test_replace_bool_with_string_no_op(self):
s = pd.Series([True, False, True])
result = s.replace('fun', 'in-the-sun')
tm.assert_series_equal(s, result)
def test_replace_bool_with_string(self):
# nonexistent elements
s = pd.Series([True, False, True])
result = s.replace(True, '2u')
expected = pd.Series(['2u', False, '2u'])
tm.assert_series_equal(expected, result)
def test_replace_bool_with_bool(self):
s = pd.Series([True, False, True])
result = s.replace(True, False)
expected = pd.Series([False] * len(s))
tm.assert_series_equal(expected, result)
def test_replace_with_dict_with_bool_keys(self):
s = pd.Series([True, False, True])
with pytest.raises(TypeError, match='Cannot compare types .+'):
s.replace({'asdf': 'asdb', True: 'yes'})
def test_replace2(self):
N = 100
ser = pd.Series(np.fabs(np.random.randn(N)), tm.makeDateIndex(N),
dtype=object)
ser[:5] = np.nan
ser[6:10] = 'foo'
ser[20:30] = 'bar'
# replace list with a single value
rs = ser.replace([np.nan, 'foo', 'bar'], -1)
assert (rs[:5] == -1).all()
assert (rs[6:10] == -1).all()
assert (rs[20:30] == -1).all()
assert (pd.isna(ser[:5])).all()
# replace with different values
rs = ser.replace({np.nan: -1, 'foo': -2, 'bar': -3})
assert (rs[:5] == -1).all()
assert (rs[6:10] == -2).all()
assert (rs[20:30] == -3).all()
assert (pd.isna(ser[:5])).all()
# replace with different values with 2 lists
rs2 = ser.replace([np.nan, 'foo', 'bar'], [-1, -2, -3])
tm.assert_series_equal(rs, rs2)
# replace inplace
ser.replace([np.nan, 'foo', 'bar'], -1, inplace=True)
assert (ser[:5] == -1).all()
assert (ser[6:10] == -1).all()
assert (ser[20:30] == -1).all()
def test_replace_with_empty_dictlike(self):
# GH 15289
s = pd.Series(list('abcd'))
tm.assert_series_equal(s, s.replace(dict()))
tm.assert_series_equal(s, s.replace(pd.Series([])))
def test_replace_string_with_number(self):
# GH 15743
s = pd.Series([1, 2, 3])
result = s.replace('2', np.nan)
expected = pd.Series([1, 2, 3])
tm.assert_series_equal(expected, result)
def test_replace_replacer_equals_replacement(self):
# GH 20656
# make sure all replacers are matching against original values
s = pd.Series(['a', 'b'])
expected = pd.Series(['b', 'a'])
result = s.replace({'a': 'b', 'b': 'a'})
tm.assert_series_equal(expected, result)
def test_replace_unicode_with_number(self):
# GH 15743
s = pd.Series([1, 2, 3])
result = s.replace('2', np.nan)
expected = pd.Series([1, 2, 3])
tm.assert_series_equal(expected, result)
def test_replace_mixed_types_with_string(self):
# Testing mixed
s = pd.Series([1, 2, 3, '4', 4, 5])
result = s.replace([2, '4'], np.nan)
expected = pd.Series([1, np.nan, 3, np.nan, 4, 5])
tm.assert_series_equal(expected, result)
@pytest.mark.parametrize("categorical, numeric", [
(pd.Categorical('A', categories=['A', 'B']), [1]),
(pd.Categorical(('A', ), categories=['A', 'B']), [1]),
(pd.Categorical(('A', 'B'), categories=['A', 'B']), [1, 2]),
])
def test_replace_categorical(self, categorical, numeric):
# GH 24971
# Do not check if dtypes are equal due to a known issue that
# Categorical.replace sometimes coerces to object (GH 23305)
s = pd.Series(categorical)
result = s.replace({'A': 1, 'B': 2})
expected = pd.Series(numeric)
tm.assert_series_equal(expected, result, check_dtype=False)
def test_replace_with_no_overflowerror(self):
# GH 25616
# casts to object without Exception from OverflowError
s = pd.Series([0, 1, 2, 3, 4])
result = s.replace([3], ['100000000000000000000'])
expected = pd.Series([0, 1, 2, '100000000000000000000', 4])
tm.assert_series_equal(result, expected)
s = pd.Series([0, '100000000000000000000',
'100000000000000000001'])
result = s.replace(['100000000000000000000'], [1])
expected = pd.Series([0, 1, '100000000000000000001'])
tm.assert_series_equal(result, expected)
| true
| true
|
f71878ecaeeecc9487d63ee73a12842ed9ee5b34
| 73,406
|
py
|
Python
|
src/transformers/models/pegasus/modeling_tf_pegasus.py
|
Shashi456/transformers
|
0f43e742d908772733870730dbddd8e00e0253ef
|
[
"Apache-2.0"
] | 17
|
2020-10-13T06:53:25.000Z
|
2022-02-22T06:12:17.000Z
|
src/transformers/models/pegasus/modeling_tf_pegasus.py
|
Shashi456/transformers
|
0f43e742d908772733870730dbddd8e00e0253ef
|
[
"Apache-2.0"
] | 13
|
2020-10-13T11:41:11.000Z
|
2022-02-16T14:13:31.000Z
|
src/transformers/models/pegasus/modeling_tf_pegasus.py
|
Shashi456/transformers
|
0f43e742d908772733870730dbddd8e00e0253ef
|
[
"Apache-2.0"
] | 13
|
2020-10-04T05:06:00.000Z
|
2022-02-09T01:14:59.000Z
|
# coding=utf-8
# Copyright 2021, Google Inc. and The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" TF 2.0 Pegasus model. """
import random
from typing import Dict, Optional, Tuple, Union
import numpy as np
import tensorflow as tf
from ...activations_tf import get_tf_activation
from ...file_utils import (
add_code_sample_docstrings,
add_end_docstrings,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from ...modeling_tf_outputs import (
TFBaseModelOutput,
TFBaseModelOutputWithPastAndCrossAttentions,
TFSeq2SeqLMOutput,
TFSeq2SeqModelOutput,
)
# Public API
from ...modeling_tf_utils import (
DUMMY_INPUTS,
TFCausalLanguageModelingLoss,
TFPreTrainedModel,
TFSharedEmbeddings,
TFWrappedEmbeddings,
input_processing,
keras_serializable,
shape_list,
)
from ...utils import logging
from .configuration_pegasus import PegasusConfig
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "google/pegasus-large"
_CONFIG_FOR_DOC = "PegasusConfig"
_TOKENIZER_FOR_DOC = "PegasusTokenizer"
LARGE_NEGATIVE = -1e8
# Copied from transformers.models.bart.modeling_tf_bart.shift_tokens_right
def shift_tokens_right(input_ids: tf.Tensor, pad_token_id: int, decoder_start_token_id: int):
start_tokens = tf.fill((shape_list(input_ids)[0], 1), decoder_start_token_id)
shifted_input_ids = tf.concat([start_tokens, input_ids[:, :-1]], -1)
# replace possible -100 values in labels by `pad_token_id`
shifted_input_ids = tf.where(
shifted_input_ids == -100, tf.fill(shape_list(shifted_input_ids), pad_token_id), shifted_input_ids
)
if tf.executing_eagerly():
# "Verify that `labels` has only positive values and -100"
assert_gte0 = tf.debugging.assert_greater_equal(shifted_input_ids, tf.constant(0))
# Make sure the assertion op is called by wrapping the result in an identity no-op
with tf.control_dependencies([assert_gte0]):
shifted_input_ids = tf.identity(shifted_input_ids)
return shifted_input_ids
# Copied from transformers.models.bart.modeling_tf_bart._make_causal_mask
def _make_causal_mask(input_ids_shape: tf.TensorShape, past_key_values_length: int = 0):
"""
Make causal mask used for bi-directional self-attention.
"""
bsz, tgt_len = input_ids_shape
mask = tf.ones((tgt_len, tgt_len)) * LARGE_NEGATIVE
mask_cond = tf.range(shape_list(mask)[-1])
mask = tf.where(mask_cond < tf.reshape(mask_cond + 1, (shape_list(mask)[-1], 1)), 0.0, mask)
if past_key_values_length > 0:
mask = tf.concat([tf.zeros((tgt_len, past_key_values_length)), mask], axis=-1)
return tf.tile(mask[None, None, :, :], (bsz, 1, 1, 1))
# Copied from transformers.models.bart.modeling_tf_bart._expand_mask
def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int = 0):
"""
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
"""
src_len = shape_list(mask)[1]
tgt_len = tgt_len if tgt_len is not None else src_len
one_cst = tf.constant(1.0)
mask = tf.cast(mask, dtype=one_cst.dtype)
expanded_mask = tf.tile(mask[:, None, None, :], (1, 1, tgt_len, 1))
return (one_cst - expanded_mask) * LARGE_NEGATIVE
# Copied from transformers.models.marian.modeling_tf_marian.TFMarianSinusoidalPositionalEmbedding with Marian->Pegasus
class TFPegasusSinusoidalPositionalEmbedding(tf.keras.layers.Layer):
"""This module produces sinusoidal positional embeddings of any length."""
def __init__(self, num_positions: int, embedding_dim: int, **kwargs):
super().__init__(**kwargs)
if embedding_dim % 2 != 0:
raise NotImplementedError(f"odd embedding_dim {embedding_dim} not supported")
self.embedding_dim = embedding_dim
self.num_positions = num_positions
def build(self, input_shape: tf.TensorShape):
"""
Build shared token embedding layer Shared weights logic adapted from
https://github.com/tensorflow/models/blob/a009f4fb9d2fc4949e32192a944688925ef78659/official/transformer/v2/embedding_layer.py#L24
"""
weight = self._init_weight(self.num_positions, self.embedding_dim)
self.weight = self.add_weight(
name="embeddings",
shape=[self.num_positions, self.embedding_dim],
)
weight = tf.cast(weight, dtype=self.weight.dtype)
self.weight.assign(weight)
super().build(input_shape)
@staticmethod
def _init_weight(n_pos: int, dim: int):
"""
Identical to the XLM create_sinusoidal_embeddings except features are not interleaved. The cos features are in
the 2nd half of the vector. [dim // 2:]
"""
position_enc = np.array(
[[pos / np.power(10000, 2 * (j // 2) / dim) for j in range(dim)] for pos in range(n_pos)]
)
# index 0 is all zero
position_enc[:, 0 : dim // 2] = np.sin(position_enc[:, 0::2])
position_enc[:, dim // 2 :] = np.cos(position_enc[:, 1::2])
# convert to tensor
table = tf.convert_to_tensor(position_enc)
tf.stop_gradient(table)
return table
def call(self, input_shape: tf.TensorShape, past_key_values_length: int = 0):
"""Input is expected to be of size [bsz x seqlen]."""
bsz, seq_len = input_shape[:2]
positions = tf.range(past_key_values_length, seq_len + past_key_values_length, delta=1, name="range")
return tf.gather(self.weight, positions)
# Copied from transformers.models.bart.modeling_tf_bart.TFBartAttention with Bart->Pegasus
class TFPegasusAttention(tf.keras.layers.Layer):
"""Multi-headed attention from "Attention Is All You Need"""
def __init__(
self,
embed_dim: int,
num_heads: int,
dropout: float = 0.0,
is_decoder: bool = False,
bias: bool = True,
**kwargs,
):
super().__init__(**kwargs)
self.embed_dim = embed_dim
self.num_heads = num_heads
self.dropout = tf.keras.layers.Dropout(dropout)
self.head_dim = embed_dim // num_heads
assert self.head_dim * num_heads == self.embed_dim, "embed_dim must be divisible by num_heads"
self.scaling = self.head_dim ** -0.5
self.is_decoder = is_decoder
self.k_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="k_proj")
self.q_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="q_proj")
self.v_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="v_proj")
self.out_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="out_proj")
def _shape(self, tensor: tf.Tensor, seq_len: int, bsz: int):
return tf.transpose(tf.reshape(tensor, (bsz, seq_len, self.num_heads, self.head_dim)), (0, 2, 1, 3))
def call(
self,
hidden_states: tf.Tensor,
key_value_states: Optional[tf.Tensor] = None,
past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None,
attention_mask: Optional[tf.Tensor] = None,
layer_head_mask: Optional[tf.Tensor] = None,
training=False,
) -> Tuple[tf.Tensor, Optional[tf.Tensor]]:
"""Input shape: Batch x Time x Channel"""
# if key_value_states are provided this layer is used as a cross-attention layer
# for the decoder
is_cross_attention = key_value_states is not None
bsz, tgt_len, embed_dim = shape_list(hidden_states)
# get query proj
query_states = self.q_proj(hidden_states) * self.scaling
# get key, value proj
if is_cross_attention and past_key_value is not None:
# reuse k,v, cross_attentions
key_states = past_key_value[0]
value_states = past_key_value[1]
elif is_cross_attention:
# cross_attentions
key_states = self._shape(self.k_proj(key_value_states), -1, bsz)
value_states = self._shape(self.v_proj(key_value_states), -1, bsz)
elif past_key_value is not None:
# reuse k, v, self_attention
key_states = self._shape(self.k_proj(hidden_states), -1, bsz)
value_states = self._shape(self.v_proj(hidden_states), -1, bsz)
key_states = tf.concat([past_key_value[0], key_states], axis=2)
value_states = tf.concat([past_key_value[1], value_states], axis=2)
else:
# self_attention
key_states = self._shape(self.k_proj(hidden_states), -1, bsz)
value_states = self._shape(self.v_proj(hidden_states), -1, bsz)
if self.is_decoder:
# if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states.
# Further calls to cross_attention layer can then reuse all cross-attention
# key/value_states (first "if" case)
# if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of
# all previous decoder key/value_states. Further calls to uni-directional self-attention
# can concat previous decoder key/value_states to current projected key/value_states (third "elif" case)
# if encoder bi-directional self-attention `past_key_value` is always `None`
past_key_value = (key_states, value_states)
proj_shape = (bsz * self.num_heads, -1, self.head_dim)
query_states = tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape)
key_states = tf.reshape(key_states, proj_shape)
value_states = tf.reshape(value_states, proj_shape)
src_len = shape_list(key_states)[1]
attn_weights = tf.matmul(query_states, key_states, transpose_b=True)
# The tf.debugging asserts are not compliant with XLA then they
# have to be disabled in other modes than eager.
if tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(attn_weights),
[bsz * self.num_heads, tgt_len, src_len],
message=f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}",
)
if attention_mask is not None:
# The tf.debugging asserts are not compliant with XLA then they
# have to be disabled in other modes than eager.
if tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(attention_mask),
[bsz, 1, tgt_len, src_len],
message=f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {shape_list(attention_mask)}",
)
attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype)
attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) + attention_mask
attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len))
attn_weights = tf.nn.softmax(attn_weights, axis=-1)
if layer_head_mask is not None:
# The tf.debugging asserts are not compliant with XLA then they
# have to be disabled in other modes than eager.
if tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(layer_head_mask),
[self.num_heads],
message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}",
)
attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape(
attn_weights, (bsz, self.num_heads, tgt_len, src_len)
)
attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len))
attn_probs = self.dropout(attn_weights, training=training)
attn_output = tf.matmul(attn_probs, value_states)
# The tf.debugging asserts are not compliant with XLA then they
# have to be disabled in other modes than eager.
if tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(attn_output),
[bsz * self.num_heads, tgt_len, self.head_dim],
message=f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}",
)
attn_output = tf.transpose(
tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)), (0, 2, 1, 3)
)
attn_output = tf.reshape(attn_output, (bsz, tgt_len, embed_dim))
attn_output = self.out_proj(attn_output)
attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len))
return attn_output, attn_weights, past_key_value
# Copied from transformers.models.mbart.modeling_tf_mbart.TFMBartEncoderLayer with MBart->Pegasus
class TFPegasusEncoderLayer(tf.keras.layers.Layer):
def __init__(self, config: PegasusConfig, **kwargs):
super().__init__(**kwargs)
self.embed_dim = config.d_model
self.self_attn = TFPegasusAttention(
self.embed_dim, config.encoder_attention_heads, dropout=config.attention_dropout, name="self_attn"
)
self.self_attn_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="self_attn_layer_norm")
self.dropout = tf.keras.layers.Dropout(config.dropout)
self.activation_fn = get_tf_activation(config.activation_function)
self.activation_dropout = tf.keras.layers.Dropout(config.activation_dropout)
self.fc1 = tf.keras.layers.Dense(config.encoder_ffn_dim, name="fc1")
self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2")
self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm")
def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, training=False):
"""
Args:
hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)`
attention_mask (:obj:`tf.Tensor`): attention mask of size
`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size
`(encoder_attention_heads,)`
"""
residual = hidden_states
hidden_states = self.self_attn_layer_norm(hidden_states)
hidden_states, self_attn_weights, _ = self.self_attn(
hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask
)
# The tf.debugging asserts are not compliant with XLA then they
# have to be disabled in other modes than eager.
if tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(hidden_states),
shape_list(residual),
message=f"Self attn modified the shape of query {shape_list(residual)} to {shape_list(hidden_states)}",
)
hidden_states = self.dropout(hidden_states, training=training)
hidden_states = residual + hidden_states
residual = hidden_states
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.activation_fn(self.fc1(hidden_states))
hidden_states = self.activation_dropout(hidden_states, training=training)
hidden_states = self.fc2(hidden_states)
hidden_states = self.dropout(hidden_states, training=training)
hidden_states = residual + hidden_states
return hidden_states, self_attn_weights
# Copied from transformers.models.mbart.modeling_tf_mbart.TFMBartDecoderLayer with MBart->Pegasus
class TFPegasusDecoderLayer(tf.keras.layers.Layer):
def __init__(self, config: PegasusConfig, **kwargs):
super().__init__(**kwargs)
self.embed_dim = config.d_model
self.self_attn = TFPegasusAttention(
embed_dim=self.embed_dim,
num_heads=config.decoder_attention_heads,
dropout=config.attention_dropout,
name="self_attn",
is_decoder=True,
)
self.dropout = tf.keras.layers.Dropout(config.dropout)
self.activation_fn = get_tf_activation(config.activation_function)
self.activation_dropout = tf.keras.layers.Dropout(config.activation_dropout)
self.self_attn_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="self_attn_layer_norm")
self.encoder_attn = TFPegasusAttention(
self.embed_dim,
config.decoder_attention_heads,
dropout=config.attention_dropout,
name="encoder_attn",
is_decoder=True,
)
self.encoder_attn_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="encoder_attn_layer_norm")
self.fc1 = tf.keras.layers.Dense(config.decoder_ffn_dim, name="fc1")
self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2")
self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm")
def call(
self,
hidden_states,
attention_mask: Optional[tf.Tensor] = None,
encoder_hidden_states: Optional[tf.Tensor] = None,
encoder_attention_mask: Optional[tf.Tensor] = None,
layer_head_mask: Optional[tf.Tensor] = None,
cross_attn_layer_head_mask: Optional[tf.Tensor] = None,
past_key_value: Optional[Tuple[tf.Tensor]] = None,
training=False,
) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]:
"""
Args:
hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)`
attention_mask (:obj:`tf.Tensor`): attention mask of size
`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size
`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size
`(decoder_attention_heads,)`
cross_attn_layer_head_mask (:obj:`tf.Tensor`): mask for heads of the cross-attention module.
`(decoder_attention_heads,)`
past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states
"""
residual = hidden_states
hidden_states = self.self_attn_layer_norm(hidden_states)
# Self Attention
# decoder uni-directional self-attention cached key/values tuple is at positions 1,2
self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None
# add present self-attn cache to positions 1,2 of present_key_value tuple
hidden_states, self_attn_weights, present_key_value = self.self_attn(
hidden_states=hidden_states,
past_key_value=self_attn_past_key_value,
attention_mask=attention_mask,
layer_head_mask=layer_head_mask,
)
hidden_states = self.dropout(hidden_states, training=training)
hidden_states = residual + hidden_states
# Cross-Attention Block
cross_attn_present_key_value = None
cross_attn_weights = None
if encoder_hidden_states is not None:
residual = hidden_states
hidden_states = self.encoder_attn_layer_norm(hidden_states)
# cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple
cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None
hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn(
hidden_states=hidden_states,
key_value_states=encoder_hidden_states,
attention_mask=encoder_attention_mask,
layer_head_mask=cross_attn_layer_head_mask,
past_key_value=cross_attn_past_key_value,
)
hidden_states = self.dropout(hidden_states, training=training)
hidden_states = residual + hidden_states
# add cross-attn to positions 3,4 of present_key_value tuple
present_key_value = present_key_value + cross_attn_present_key_value
# Fully Connected
residual = hidden_states
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.activation_fn(self.fc1(hidden_states))
hidden_states = self.activation_dropout(hidden_states, training=training)
hidden_states = self.fc2(hidden_states)
hidden_states = self.dropout(hidden_states, training=training)
hidden_states = residual + hidden_states
return (
hidden_states,
self_attn_weights,
cross_attn_weights,
present_key_value,
)
class TFPegasusPreTrainedModel(TFPreTrainedModel):
config_class = PegasusConfig
base_model_prefix = "model"
@property
def dummy_inputs(self):
pad_token = 1
input_ids = tf.cast(tf.convert_to_tensor(DUMMY_INPUTS), tf.int32)
decoder_input_ids = tf.cast(tf.convert_to_tensor(DUMMY_INPUTS), tf.int32)
dummy_inputs = {
"decoder_input_ids": decoder_input_ids,
"attention_mask": tf.math.not_equal(input_ids, pad_token),
"input_ids": input_ids,
}
return dummy_inputs
@tf.function(
input_signature=[
{
"input_ids": tf.TensorSpec((None, None), tf.int32, name="input_ids"),
"attention_mask": tf.TensorSpec((None, None), tf.int32, name="attention_mask"),
"decoder_input_ids": tf.TensorSpec((None, None), tf.int32, name="decoder_input_ids"),
"decoder_attention_mask": tf.TensorSpec((None, None), tf.int32, name="decoder_attention_mask"),
}
]
)
# Copied from transformers.models.bart.modeling_tf_bart.TFBartPretrainedModel.serving
def serving(self, inputs):
output = self.call(inputs)
return self.serving_output(output)
PEGASUS_START_DOCSTRING = r"""
This model inherits from :class:`~transformers.TFPreTrainedModel`. Check the superclass documentation for the
generic methods the library implements for all its model (such as downloading or saving, resizing the input
embeddings, pruning heads etc.)
This model is also a `tf.keras.Model <https://www.tensorflow.org/api_docs/python/tf/keras/Model>`__ subclass. Use
it as a regular TF 2.0 Keras Model and refer to the TF 2.0 documentation for all matter related to general usage
and behavior.
.. note::
TF 2.0 models accepts two formats as inputs:
- having all inputs as keyword arguments (like PyTorch models), or
- having all inputs as a list, tuple or dict in the first positional arguments.
This second option is useful when using :meth:`tf.keras.Model.fit` method which currently requires having all
the tensors in the first argument of the model call function: :obj:`model(inputs)`.
If you choose this second option, there are three possibilities you can use to gather all the input Tensors in
the first positional argument :
- a single Tensor with :obj:`input_ids` only and nothing else: :obj:`model(input_ids)`
- a list of varying length with one or several input Tensors IN THE ORDER given in the docstring:
:obj:`model([input_ids, attention_mask])` or :obj:`model([input_ids, attention_mask, token_type_ids])`
- a dictionary with one or several input Tensors associated to the input names given in the docstring:
:obj:`model({"input_ids": input_ids, "token_type_ids": token_type_ids})`
Args:
config (:class:`~transformers.PegasusConfig`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the :meth:`~transformers.TFPreTrainedModel.from_pretrained` method to load the
model weights.
"""
PEGASUS_GENERATION_EXAMPLE = r"""
Summarization example::
>>> from transformers import PegasusTokenizer, TFPegasusForConditionalGeneration
>>> model = TFPegasusForConditionalGeneration.from_pretrained('google/pegasus-xsum')
>>> tokenizer = PegasusTokenizer.from_pretrained('google/pegasus-xsum')
>>> ARTICLE_TO_SUMMARIZE = (
... "PG&E stated it scheduled the blackouts in response to forecasts for high winds "
... "amid dry conditions. The aim is to reduce the risk of wildfires. Nearly 800 thousand customers were "
... "scheduled to be affected by the shutoffs which were expected to last through at least midday tomorrow."
... )
>>> inputs = tokenizer([ARTICLE_TO_SUMMARIZE], max_length=1024, return_tensors='tf')
>>> # Generate Summary
>>> summary_ids = model.generate(inputs['input_ids'])
>>> print([tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=False) for g in summary_ids])
"""
PEGASUS_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`tf.Tensor` of shape :obj:`({0})`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`tf.Tensor` of shape :obj:`({0})`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
decoder_input_ids (:obj:`tf.Tensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`):
Indices of decoder input sequence tokens in the vocabulary.
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are decoder input IDs? <../glossary.html#decoder-input-ids>`__
Pegasus uses the :obj:`pad_token_id` as the starting token for :obj:`decoder_input_ids` generation. If
:obj:`past_key_values` is used, optionally only the last :obj:`decoder_input_ids` have to be input (see
:obj:`past_key_values`).
decoder_attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`):
will be made by default and ignore pad tokens. It is not recommended to set this for most use cases.
head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
decoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
cross_attn_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the cross-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
encoder_outputs (:obj:`tf.FloatTensor`, `optional`):
hidden states at the output of the last layer of the encoder. Used in the cross-attention of the decoder.
of shape :obj:`(batch_size, sequence_length, hidden_size)` is a sequence of
past_key_values (:obj:`Tuple[Tuple[tf.Tensor]]` of length :obj:`config.n_layers`)
contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids`
(those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`
instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`.
use_cache (:obj:`bool`, `optional`, defaults to :obj:`True`):
If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up
decoding (see :obj:`past_key_values`). Set to :obj:`False` during training, :obj:`True` during generation
output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all
attention layers. See ``attentions`` under returned tensors for more detail. This argument can be used only
in eager mode, in graph mode the value in the config will be used instead.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the
config will be used instead.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail. This argument can be used only in eager mode, in graph mode the value in the config will be
used instead.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple. This
argument can be used in eager mode, in graph mode the value will always be set to True.
training (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to use the model in training mode (some modules like dropout modules have different
behaviors between training and evaluation).
"""
@keras_serializable
class TFPegasusEncoder(tf.keras.layers.Layer):
config_class = PegasusConfig
"""
Transformer encoder consisting of *config.encoder_layers* self attention layers. Each layer is a
:class:`TFPegasusEncoderLayer`.
Args:
config: PegasusConfig
"""
def __init__(self, config: PegasusConfig, embed_tokens: Optional[TFSharedEmbeddings] = None, **kwargs):
super().__init__(**kwargs)
self.config = config
self.dropout = tf.keras.layers.Dropout(config.dropout)
self.layerdrop = config.encoder_layerdrop
self.padding_idx = config.pad_token_id
self.max_source_positions = config.max_position_embeddings
self.embed_scale = tf.math.sqrt(float(config.d_model)) if config.scale_embedding else 1.0
self.embed_tokens = embed_tokens
self.embed_positions = TFPegasusSinusoidalPositionalEmbedding(
config.max_position_embeddings,
config.d_model,
name="embed_positions",
)
self.layers = [TFPegasusEncoderLayer(config, name=f"layers.{i}") for i in range(config.encoder_layers)]
self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="layer_norm")
def get_embed_tokens(self):
return self.embed_tokens
def set_embed_tokens(self, embed_tokens):
self.embed_tokens = embed_tokens
def call(
self,
input_ids=None,
inputs_embeds=None,
attention_mask=None,
head_mask=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
training=False,
**kwargs,
):
"""
Args:
input_ids (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
provide it.
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__`
for details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional):
Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded
representation. This is useful if you want more control over how to convert :obj:`input_ids` indices
into associated vectors than the model's internal embedding lookup matrix.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under
returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value
in the config will be used instead.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors
for more detail. This argument can be used only in eager mode, in graph mode the value in the config
will be used instead.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple. This
argument can be used in eager mode, in graph mode the value will always be set to True.
training (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to use the model in training mode (some modules like dropout modules have different
behaviors between training and evaluation).
"""
inputs = input_processing(
func=self.call,
config=self.config,
input_ids=input_ids,
attention_mask=attention_mask,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
training=training,
kwargs_call=kwargs,
)
if inputs["input_ids"] is not None and inputs["inputs_embeds"] is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif inputs["input_ids"] is not None:
input_shape = shape_list(inputs["input_ids"])
elif inputs["inputs_embeds"] is not None:
input_shape = shape_list(inputs["inputs_embeds"])[:-1]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
if inputs["inputs_embeds"] is None:
inputs["inputs_embeds"] = self.embed_tokens(inputs["input_ids"]) * self.embed_scale
embed_pos = self.embed_positions(input_shape)
hidden_states = inputs["inputs_embeds"] + embed_pos
hidden_states = self.dropout(hidden_states, training=inputs["training"])
# check attention mask and invert
if inputs["attention_mask"] is not None:
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
attention_mask = _expand_mask(inputs["attention_mask"])
else:
attention_mask = None
encoder_states = () if inputs["output_hidden_states"] else None
all_attentions = () if inputs["output_attentions"] else None
# check if head_mask has a correct number of layers specified if desired
# The tf.debugging asserts are not compliant with XLA then they
# have to be disabled in other modes than eager.
if inputs["head_mask"] is not None and tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(inputs["head_mask"])[0],
len(self.layers),
message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.",
)
# encoder layers
for idx, encoder_layer in enumerate(self.layers):
if inputs["output_hidden_states"]:
encoder_states = encoder_states + (hidden_states,)
# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)
dropout_probability = random.uniform(0, 1)
if inputs["training"] and (dropout_probability < self.layerdrop): # skip the layer
continue
hidden_states, attn = encoder_layer(
hidden_states,
attention_mask,
inputs["head_mask"][idx] if inputs["head_mask"] is not None else None,
)
if inputs["output_attentions"]:
all_attentions += (attn,)
hidden_states = self.layer_norm(hidden_states)
if inputs["output_hidden_states"]:
encoder_states = encoder_states + (hidden_states,)
if not inputs["return_dict"]:
return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None)
return TFBaseModelOutput(
last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions
)
@keras_serializable
class TFPegasusDecoder(tf.keras.layers.Layer):
config_class = PegasusConfig
"""
Transformer decoder consisting of *config.decoder_layers* layers. Each layer is a :class:`TFPegasusDecoderLayer`
Args:
config: PegasusConfig
embed_tokens: output embedding
"""
def __init__(self, config: PegasusConfig, embed_tokens: Optional[TFSharedEmbeddings] = None, **kwargs):
super().__init__(**kwargs)
self.config = config
self.padding_idx = config.pad_token_id
self.embed_tokens = embed_tokens
self.layerdrop = config.decoder_layerdrop
self.embed_positions = TFPegasusSinusoidalPositionalEmbedding(
config.max_position_embeddings,
config.d_model,
name="embed_positions",
)
self.embed_scale = tf.math.sqrt(float(config.d_model)) if config.scale_embedding else 1.0
self.layers = [TFPegasusDecoderLayer(config, name=f"layers.{i}") for i in range(config.decoder_layers)]
self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="layer_norm")
self.dropout = tf.keras.layers.Dropout(config.dropout)
def get_embed_tokens(self):
return self.embed_tokens
def set_embed_tokens(self, embed_tokens):
self.embed_tokens = embed_tokens
def call(
self,
input_ids=None,
inputs_embeds=None,
attention_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
head_mask=None,
cross_attn_head_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
training=False,
**kwargs,
):
r"""
Args:
input_ids (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
provide it.
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__`
for details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
encoder_hidden_states (:obj:`tf.Tensor` of shape :obj:`(batch_size, encoder_sequence_length, hidden_size)`, `optional`):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention
of the decoder.
encoder_attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, encoder_sequence_length)`, `optional`):
Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values
selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
cross_attn_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the cross-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
past_key_values (:obj:`Tuple[Tuple[tf.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up
decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last
:obj:`decoder_input_ids` (those that don't have their past key value states given to this model) of
shape :obj:`(batch_size, 1)` instead of all :obj:`decoder_input_ids`` of shape :obj:`(batch_size,
sequence_length)`.
inputs_embeds (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded
representation. This is useful if you want more control over how to convert :obj:`input_ids` indices
into associated vectors than the model's internal embedding lookup matrix.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under
returned tensors for more detail. This argument can be used only in eager mode, in graph mode the value
in the config will be used instead.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors
for more detail. This argument can be used only in eager mode, in graph mode the value in the config
will be used instead.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple. This
argument can be used in eager mode, in graph mode the value will always be set to True.
training (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to use the model in training mode (some modules like dropout modules have different
behaviors between training and evaluation).
"""
inputs = input_processing(
func=self.call,
config=self.config,
input_ids=input_ids,
attention_mask=attention_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
head_mask=head_mask,
cross_attn_head_mask=cross_attn_head_mask,
inputs_embeds=inputs_embeds,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
training=training,
kwargs_call=kwargs,
)
if inputs["input_ids"] is not None and inputs["inputs_embeds"] is not None:
raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time")
elif inputs["input_ids"] is not None:
input_shape = shape_list(inputs["input_ids"])
elif inputs["inputs_embeds"] is not None:
input_shape = shape_list(inputs["inputs_embeds"])[:-1]
else:
raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds")
past_key_values_length = (
shape_list(inputs["past_key_values"][0][0])[2] if inputs["past_key_values"] is not None else 0
)
# embed positions
positions = self.embed_positions(input_shape, past_key_values_length)
if inputs["inputs_embeds"] is None:
inputs["inputs_embeds"] = self.embed_tokens(inputs["input_ids"]) * self.embed_scale
hidden_states = inputs["inputs_embeds"]
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
if input_shape[-1] > 1:
combined_attention_mask = _make_causal_mask(input_shape, past_key_values_length=past_key_values_length)
else:
combined_attention_mask = _expand_mask(
tf.ones((input_shape[0], input_shape[1] + past_key_values_length)), tgt_len=input_shape[-1]
)
if inputs["attention_mask"] is not None:
combined_attention_mask = combined_attention_mask + _expand_mask(
inputs["attention_mask"], tgt_len=input_shape[-1]
)
if inputs["encoder_hidden_states"] is not None and inputs["encoder_attention_mask"] is not None:
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
inputs["encoder_attention_mask"] = _expand_mask(inputs["encoder_attention_mask"], tgt_len=input_shape[-1])
hidden_states = self.dropout(hidden_states + positions, training=inputs["training"])
# decoder layers
all_hidden_states = () if inputs["output_hidden_states"] else None
all_self_attns = () if inputs["output_attentions"] else None
all_cross_attns = () if (inputs["output_attentions"] and inputs["encoder_hidden_states"] is not None) else None
present_key_values = () if inputs["use_cache"] else None
# check if head_mask and cross_attn_head_mask have a correct number of layers specified if desired
# The tf.debugging asserts are not compliant with XLA then they
# have to be disabled in other modes than eager.
for attn_mask in ["head_mask", "cross_attn_head_mask"]:
if inputs[attn_mask] is not None and tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(inputs[attn_mask])[0],
len(self.layers),
message=f"The {attn_mask} should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs[attn_mask])[0]}.",
)
for idx, decoder_layer in enumerate(self.layers):
# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
dropout_probability = random.uniform(0, 1)
if inputs["training"] and (dropout_probability < self.layerdrop):
continue
past_key_value = inputs["past_key_values"][idx] if inputs["past_key_values"] is not None else None
hidden_states, layer_self_attn, layer_cross_attn, present_key_value = decoder_layer(
hidden_states,
attention_mask=combined_attention_mask,
encoder_hidden_states=inputs["encoder_hidden_states"],
encoder_attention_mask=inputs["encoder_attention_mask"],
layer_head_mask=inputs["head_mask"][idx] if inputs["head_mask"] is not None else None,
cross_attn_layer_head_mask=inputs["cross_attn_head_mask"][idx]
if inputs["cross_attn_head_mask"] is not None
else None,
past_key_value=past_key_value,
)
if inputs["use_cache"]:
present_key_values += (present_key_value,)
if inputs["output_attentions"]:
all_self_attns += (layer_self_attn,)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns += (layer_cross_attn,)
hidden_states = self.layer_norm(hidden_states)
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
if inputs["output_attentions"]:
all_self_attns = list(all_self_attns)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns = list(all_cross_attns)
if inputs["use_cache"]:
present_key_values = (inputs["encoder_hidden_states"], present_key_values)
if not inputs["return_dict"]:
return hidden_states, present_key_values, all_hidden_states, all_self_attns, all_cross_attns
else:
return TFBaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=hidden_states,
past_key_values=present_key_values,
hidden_states=all_hidden_states,
attentions=all_self_attns,
cross_attentions=all_cross_attns,
)
@keras_serializable
class TFPegasusMainLayer(tf.keras.layers.Layer):
config_class = PegasusConfig
def __init__(self, config: PegasusConfig, **kwargs):
super().__init__(**kwargs)
self.config = config
self.shared = TFSharedEmbeddings(config.vocab_size, config.d_model, config.pad_token_id, name="model.shared")
with tf.compat.v1.variable_scope("model.shared") as shared_abs_scope_name:
pass
# Wraps layer to avoid problems with weight restoring and ensuring we're in the correct TF scope.
embed_tokens = TFWrappedEmbeddings(self.shared, abs_scope_name=shared_abs_scope_name)
embed_tokens.vocab_size = self.shared.vocab_size
embed_tokens.hidden_size = self.shared.hidden_size
self.encoder = TFPegasusEncoder(config, embed_tokens, name="encoder")
self.decoder = TFPegasusDecoder(config, embed_tokens, name="decoder")
def get_input_embeddings(self):
return self.shared
def set_input_embeddings(self, new_embeddings):
self.shared.weight = new_embeddings
self.shared.vocab_size = self.shared.weight.shape[0]
# retrieve correct absolute scope for embed token wrapper
with tf.compat.v1.variable_scope("model.shared") as shared_abs_scope_name:
pass
# Wraps layer to avoid problems with weight restoring and ensuring we're in the correct TF scope.
embed_tokens = TFWrappedEmbeddings(self.shared, abs_scope_name=shared_abs_scope_name)
self.encoder.set_embed_tokens(embed_tokens)
self.decoder.set_embed_tokens(embed_tokens)
def call(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None,
past_key_values=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
training=False,
**kwargs
):
inputs = input_processing(
func=self.call,
config=self.config,
input_ids=input_ids,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
head_mask=head_mask,
decoder_head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
encoder_outputs=encoder_outputs,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
decoder_inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
training=training,
kwargs_call=kwargs,
)
if inputs["decoder_input_ids"] is None and inputs["decoder_inputs_embeds"] is None:
inputs["use_cache"] = False
inputs["output_hidden_states"] = (
inputs["output_hidden_states"]
if inputs["output_hidden_states"] is not None
else self.config.output_hidden_states
)
if inputs["encoder_outputs"] is None:
inputs["encoder_outputs"] = self.encoder(
input_ids=inputs["input_ids"],
attention_mask=inputs["attention_mask"],
head_mask=inputs["head_mask"],
inputs_embeds=inputs["inputs_embeds"],
output_attentions=inputs["output_attentions"],
output_hidden_states=inputs["output_hidden_states"],
return_dict=inputs["return_dict"],
training=inputs["training"],
)
# If the user passed a tuple for encoder_outputs, we wrap it in a TFBaseModelOutput when return_dict=True
elif inputs["return_dict"] and not isinstance(inputs["encoder_outputs"], TFBaseModelOutput):
inputs["encoder_outputs"] = TFBaseModelOutput(
last_hidden_state=inputs["encoder_outputs"][0],
hidden_states=inputs["encoder_outputs"][1] if len(inputs["encoder_outputs"]) > 1 else None,
attentions=inputs["encoder_outputs"][2] if len(inputs["encoder_outputs"]) > 2 else None,
)
# If the user passed a TFBaseModelOutput for encoder_outputs, we wrap it in a tuple when return_dict=False
elif not inputs["return_dict"] and not isinstance(inputs["encoder_outputs"], tuple):
inputs["encoder_outputs"] = inputs["encoder_outputs"].to_tuple()
decoder_outputs = self.decoder(
inputs["decoder_input_ids"],
attention_mask=inputs["decoder_attention_mask"],
encoder_hidden_states=inputs["encoder_outputs"][0],
encoder_attention_mask=inputs["attention_mask"],
head_mask=inputs["decoder_head_mask"],
cross_attn_head_mask=inputs["cross_attn_head_mask"],
past_key_values=inputs["past_key_values"],
inputs_embeds=inputs["decoder_inputs_embeds"],
use_cache=inputs["use_cache"],
output_attentions=inputs["output_attentions"],
output_hidden_states=inputs["output_hidden_states"],
return_dict=inputs["return_dict"],
training=inputs["training"],
)
if not inputs["return_dict"]:
return decoder_outputs + inputs["encoder_outputs"]
return TFSeq2SeqModelOutput(
last_hidden_state=decoder_outputs.last_hidden_state,
past_key_values=decoder_outputs.past_key_values,
decoder_hidden_states=decoder_outputs.hidden_states,
decoder_attentions=decoder_outputs.attentions,
cross_attentions=decoder_outputs.cross_attentions,
encoder_last_hidden_state=inputs["encoder_outputs"].last_hidden_state,
encoder_hidden_states=inputs["encoder_outputs"].hidden_states,
encoder_attentions=inputs["encoder_outputs"].attentions,
)
@add_start_docstrings(
"The bare PEGASUS Model outputting raw hidden-states without any specific head on top.",
PEGASUS_START_DOCSTRING,
)
class TFPegasusModel(TFPegasusPreTrainedModel):
def __init__(self, config: PegasusConfig, *inputs, **kwargs):
super().__init__(config, *inputs, **kwargs)
self.model = TFPegasusMainLayer(config, name="model")
def get_encoder(self):
return self.model.encoder
def get_decoder(self):
return self.model.decoder
@add_start_docstrings_to_model_forward(PEGASUS_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=TFSeq2SeqModelOutput,
config_class=_CONFIG_FOR_DOC,
)
def call(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None,
past_key_values=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
training=False,
**kwargs
):
inputs = input_processing(
func=self.call,
config=self.config,
input_ids=input_ids,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
head_mask=head_mask,
decoder_head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
encoder_outputs=encoder_outputs,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
decoder_inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
training=training,
kwargs_call=kwargs,
)
outputs = self.model(
input_ids=inputs["input_ids"],
attention_mask=inputs["attention_mask"],
decoder_input_ids=inputs["decoder_input_ids"],
decoder_attention_mask=inputs["decoder_attention_mask"],
head_mask=inputs["head_mask"],
decoder_head_mask=inputs["decoder_head_mask"],
cross_attn_head_mask=inputs["cross_attn_head_mask"],
encoder_outputs=inputs["encoder_outputs"],
past_key_values=inputs["past_key_values"],
inputs_embeds=inputs["inputs_embeds"],
decoder_inputs_embeds=inputs["decoder_inputs_embeds"],
use_cache=inputs["use_cache"],
output_attentions=inputs["output_attentions"],
output_hidden_states=inputs["output_hidden_states"],
return_dict=inputs["return_dict"],
training=inputs["training"],
)
return outputs
# Copied from transformers.models.bart.modeling_tf_bart.TFBartModel.serving_output
def serving_output(self, output):
pkv = tf.tuple(output.past_key_values)[1] if self.config.use_cache else None
dec_hs = tf.convert_to_tensor(output.decoder_hidden_states) if self.config.output_hidden_states else None
dec_attns = tf.convert_to_tensor(output.decoder_attentions) if self.config.output_attentions else None
cross_attns = tf.convert_to_tensor(output.cross_attentions) if self.config.output_attentions else None
enc_hs = tf.convert_to_tensor(output.encoder_hidden_states) if self.config.output_hidden_states else None
enc_attns = tf.convert_to_tensor(output.encoder_attentions) if self.config.output_attentions else None
return TFSeq2SeqModelOutput(
last_hidden_state=output.last_hidden_state,
past_key_values=pkv,
decoder_hidden_states=dec_hs,
decoder_attentions=dec_attns,
cross_attentions=cross_attns,
encoder_last_hidden_state=output.encoder_last_hidden_state,
encoder_hidden_states=enc_hs,
encoder_attentions=enc_attns,
)
@add_start_docstrings(
"The PEGASUS Model with a language modeling head. Can be used for summarization.",
PEGASUS_START_DOCSTRING,
)
class TFPegasusForConditionalGeneration(TFPegasusPreTrainedModel, TFCausalLanguageModelingLoss):
_keys_to_ignore_on_load_unexpected = [
r"model.encoder.embed_tokens.weight",
r"model.decoder.embed_tokens.weight",
]
def __init__(self, config, *inputs, **kwargs):
super().__init__(config, *inputs, **kwargs)
self.model = TFPegasusMainLayer(config, name="model")
self.use_cache = config.use_cache
# final_bias_logits is registered as a buffer in pytorch, so not trainable for the the sake of consistency.
self.final_logits_bias = self.add_weight(
name="final_logits_bias", shape=[1, config.vocab_size], initializer="zeros", trainable=False
)
def get_decoder(self):
return self.model.decoder
def get_encoder(self):
return self.model.encoder
def get_output_embeddings(self):
return self.get_input_embeddings()
def set_output_embeddings(self, value):
self.set_input_embeddings(value)
def get_bias(self):
return {"final_logits_bias": self.final_logits_bias}
def set_bias(self, value):
self.final_logits_bias = value["final_logits_bias"]
@add_start_docstrings_to_model_forward(PEGASUS_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=TFSeq2SeqLMOutput, config_class=_CONFIG_FOR_DOC)
@add_end_docstrings(PEGASUS_GENERATION_EXAMPLE)
def call(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
encoder_outputs: Optional[TFBaseModelOutput] = None,
past_key_values=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
labels=None,
training=False,
**kwargs,
):
"""
labels (:obj:`tf.tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Labels for computing the masked language modeling loss. Indices should either be in ``[0, ...,
config.vocab_size]`` or -100 (see ``input_ids`` docstring). Tokens with indices set to ``-100`` are ignored
(masked), the loss is only computed for the tokens with labels in ``[0, ..., config.vocab_size]``.
Returns:
"""
inputs = input_processing(
func=self.call,
config=self.config,
input_ids=input_ids,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
head_mask=head_mask,
decoder_head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
encoder_outputs=encoder_outputs,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
decoder_inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
labels=labels,
training=training,
kwargs_call=kwargs,
)
if inputs["labels"] is not None:
inputs["labels"] = tf.where(
inputs["labels"] == self.config.pad_token_id,
tf.fill(shape_list(inputs["labels"]), -100),
inputs["labels"],
)
inputs["use_cache"] = False
if inputs["decoder_input_ids"] is None:
inputs["decoder_input_ids"] = shift_tokens_right(
inputs["labels"], self.config.pad_token_id, self.config.decoder_start_token_id
)
outputs = self.model(
inputs["input_ids"],
attention_mask=inputs["attention_mask"],
decoder_input_ids=inputs["decoder_input_ids"],
encoder_outputs=inputs["encoder_outputs"],
decoder_attention_mask=inputs["decoder_attention_mask"],
head_mask=inputs["head_mask"],
decoder_head_mask=inputs["decoder_head_mask"],
cross_attn_head_mask=inputs["cross_attn_head_mask"],
past_key_values=inputs["past_key_values"],
inputs_embeds=inputs["inputs_embeds"],
decoder_inputs_embeds=inputs["decoder_inputs_embeds"],
use_cache=inputs["use_cache"],
output_attentions=inputs["output_attentions"],
output_hidden_states=inputs["output_hidden_states"],
return_dict=inputs["return_dict"],
training=inputs["training"],
)
lm_logits = self.model.shared(outputs[0], mode="linear")
lm_logits = lm_logits + self.final_logits_bias
masked_lm_loss = None if inputs["labels"] is None else self.compute_loss(inputs["labels"], lm_logits)
if not inputs["return_dict"]:
output = (lm_logits,) + outputs[1:]
return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output
return TFSeq2SeqLMOutput(
loss=masked_lm_loss,
logits=lm_logits,
past_key_values=outputs.past_key_values, # index 1 of d outputs
decoder_hidden_states=outputs.decoder_hidden_states, # index 2 of d outputs
decoder_attentions=outputs.decoder_attentions, # index 3 of d outputs
cross_attentions=outputs.cross_attentions, # index 4 of d outputs
encoder_last_hidden_state=outputs.encoder_last_hidden_state, # index 0 of encoder outputs
encoder_hidden_states=outputs.encoder_hidden_states, # 1 of e out
encoder_attentions=outputs.encoder_attentions, # 2 of e out
)
# Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.serving_output
def serving_output(self, output):
pkv = tf.tuple(output.past_key_values)[1] if self.config.use_cache else None
dec_hs = tf.convert_to_tensor(output.decoder_hidden_states) if self.config.output_hidden_states else None
dec_attns = tf.convert_to_tensor(output.decoder_attentions) if self.config.output_attentions else None
cross_attns = tf.convert_to_tensor(output.cross_attentions) if self.config.output_attentions else None
enc_hs = tf.convert_to_tensor(output.encoder_hidden_states) if self.config.output_hidden_states else None
enc_attns = tf.convert_to_tensor(output.encoder_attentions) if self.config.output_attentions else None
return TFSeq2SeqLMOutput(
logits=output.logits,
past_key_values=pkv,
decoder_hidden_states=dec_hs,
decoder_attentions=dec_attns,
cross_attentions=cross_attns,
encoder_last_hidden_state=output.encoder_last_hidden_state,
encoder_hidden_states=enc_hs,
encoder_attentions=enc_attns,
)
# Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.prepare_inputs_for_generation
def prepare_inputs_for_generation(
self,
decoder_input_ids,
past,
attention_mask,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
use_cache=None,
**kwargs,
) -> Dict:
assert past is not None and len(past) in {1, 2}, f"past has to be an iterable of length 1,2 got {past}"
if len(past) == 1:
assert isinstance(past[0], tf.Tensor), f"`past[0]` has to be of type `tf.Tensor`, but is {type(past[0])}"
encoder_outputs = TFBaseModelOutput(last_hidden_state=past[0])
past_key_values = None
else:
assert (
len(past) == 2
), "`past` has to be of length 2 with the encoder_outputs at the first position and past_key_values at the second position."
encoder_outputs, past_key_values = past
if isinstance(encoder_outputs, tuple):
assert isinstance(
encoder_outputs[0], tf.Tensor
), f"`encoder_outputs[0]` has to be of type `tf.Tensor`, but is {type(encoder_outputs[0])}"
encoder_outputs = TFBaseModelOutput(last_hidden_state=encoder_outputs[0])
elif isinstance(encoder_outputs, tf.Tensor):
encoder_outputs = TFBaseModelOutput(last_hidden_state=encoder_outputs)
assert (
past_key_values
), f"decoder cached states must be truthy. got {past_key_values} from the 2nd element of past"
decoder_input_ids = decoder_input_ids[:, -1:]
assert isinstance(
encoder_outputs, TFBaseModelOutput
), f"encoder_outputs should be a TFBaseModelOutput, Instead got {type(encoder_outputs)}."
return {
"input_ids": None, # encoder_outputs is defined. input_ids not needed
"encoder_outputs": encoder_outputs,
"past_key_values": past_key_values,
"decoder_input_ids": decoder_input_ids,
"attention_mask": attention_mask,
"head_mask": head_mask,
"decoder_head_mask": decoder_head_mask,
"cross_attn_head_mask": cross_attn_head_mask,
"use_cache": use_cache, # change this to avoid caching (presumably for debugging)
}
def prepare_decoder_input_ids_from_labels(self, labels: tf.Tensor):
return shift_tokens_right(labels, self.config.pad_token_id, self.config.decoder_start_token_id)
@staticmethod
# Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration._reorder_cache
def _reorder_cache(past, beam_idx):
if len(past) == 1:
return past
past_key_values = past[1]
reordered_past = ()
for layer_past_key_values in past_key_values:
reordered_past += (
tuple(tf.gather(layer_past_key_value, beam_idx) for layer_past_key_value in layer_past_key_values[:2])
+ layer_past_key_values[2:],
)
return (past[0], reordered_past)
| 47.328175
| 236
| 0.660682
|
import random
from typing import Dict, Optional, Tuple, Union
import numpy as np
import tensorflow as tf
from ...activations_tf import get_tf_activation
from ...file_utils import (
add_code_sample_docstrings,
add_end_docstrings,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from ...modeling_tf_outputs import (
TFBaseModelOutput,
TFBaseModelOutputWithPastAndCrossAttentions,
TFSeq2SeqLMOutput,
TFSeq2SeqModelOutput,
)
from ...modeling_tf_utils import (
DUMMY_INPUTS,
TFCausalLanguageModelingLoss,
TFPreTrainedModel,
TFSharedEmbeddings,
TFWrappedEmbeddings,
input_processing,
keras_serializable,
shape_list,
)
from ...utils import logging
from .configuration_pegasus import PegasusConfig
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "google/pegasus-large"
_CONFIG_FOR_DOC = "PegasusConfig"
_TOKENIZER_FOR_DOC = "PegasusTokenizer"
LARGE_NEGATIVE = -1e8
def shift_tokens_right(input_ids: tf.Tensor, pad_token_id: int, decoder_start_token_id: int):
start_tokens = tf.fill((shape_list(input_ids)[0], 1), decoder_start_token_id)
shifted_input_ids = tf.concat([start_tokens, input_ids[:, :-1]], -1)
shifted_input_ids = tf.where(
shifted_input_ids == -100, tf.fill(shape_list(shifted_input_ids), pad_token_id), shifted_input_ids
)
if tf.executing_eagerly():
assert_gte0 = tf.debugging.assert_greater_equal(shifted_input_ids, tf.constant(0))
with tf.control_dependencies([assert_gte0]):
shifted_input_ids = tf.identity(shifted_input_ids)
return shifted_input_ids
def _make_causal_mask(input_ids_shape: tf.TensorShape, past_key_values_length: int = 0):
bsz, tgt_len = input_ids_shape
mask = tf.ones((tgt_len, tgt_len)) * LARGE_NEGATIVE
mask_cond = tf.range(shape_list(mask)[-1])
mask = tf.where(mask_cond < tf.reshape(mask_cond + 1, (shape_list(mask)[-1], 1)), 0.0, mask)
if past_key_values_length > 0:
mask = tf.concat([tf.zeros((tgt_len, past_key_values_length)), mask], axis=-1)
return tf.tile(mask[None, None, :, :], (bsz, 1, 1, 1))
def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int = 0):
src_len = shape_list(mask)[1]
tgt_len = tgt_len if tgt_len is not None else src_len
one_cst = tf.constant(1.0)
mask = tf.cast(mask, dtype=one_cst.dtype)
expanded_mask = tf.tile(mask[:, None, None, :], (1, 1, tgt_len, 1))
return (one_cst - expanded_mask) * LARGE_NEGATIVE
class TFPegasusSinusoidalPositionalEmbedding(tf.keras.layers.Layer):
def __init__(self, num_positions: int, embedding_dim: int, **kwargs):
super().__init__(**kwargs)
if embedding_dim % 2 != 0:
raise NotImplementedError(f"odd embedding_dim {embedding_dim} not supported")
self.embedding_dim = embedding_dim
self.num_positions = num_positions
def build(self, input_shape: tf.TensorShape):
weight = self._init_weight(self.num_positions, self.embedding_dim)
self.weight = self.add_weight(
name="embeddings",
shape=[self.num_positions, self.embedding_dim],
)
weight = tf.cast(weight, dtype=self.weight.dtype)
self.weight.assign(weight)
super().build(input_shape)
@staticmethod
def _init_weight(n_pos: int, dim: int):
position_enc = np.array(
[[pos / np.power(10000, 2 * (j // 2) / dim) for j in range(dim)] for pos in range(n_pos)]
)
position_enc[:, 0 : dim // 2] = np.sin(position_enc[:, 0::2])
position_enc[:, dim // 2 :] = np.cos(position_enc[:, 1::2])
table = tf.convert_to_tensor(position_enc)
tf.stop_gradient(table)
return table
def call(self, input_shape: tf.TensorShape, past_key_values_length: int = 0):
bsz, seq_len = input_shape[:2]
positions = tf.range(past_key_values_length, seq_len + past_key_values_length, delta=1, name="range")
return tf.gather(self.weight, positions)
class TFPegasusAttention(tf.keras.layers.Layer):
def __init__(
self,
embed_dim: int,
num_heads: int,
dropout: float = 0.0,
is_decoder: bool = False,
bias: bool = True,
**kwargs,
):
super().__init__(**kwargs)
self.embed_dim = embed_dim
self.num_heads = num_heads
self.dropout = tf.keras.layers.Dropout(dropout)
self.head_dim = embed_dim // num_heads
assert self.head_dim * num_heads == self.embed_dim, "embed_dim must be divisible by num_heads"
self.scaling = self.head_dim ** -0.5
self.is_decoder = is_decoder
self.k_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="k_proj")
self.q_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="q_proj")
self.v_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="v_proj")
self.out_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name="out_proj")
def _shape(self, tensor: tf.Tensor, seq_len: int, bsz: int):
return tf.transpose(tf.reshape(tensor, (bsz, seq_len, self.num_heads, self.head_dim)), (0, 2, 1, 3))
def call(
self,
hidden_states: tf.Tensor,
key_value_states: Optional[tf.Tensor] = None,
past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None,
attention_mask: Optional[tf.Tensor] = None,
layer_head_mask: Optional[tf.Tensor] = None,
training=False,
) -> Tuple[tf.Tensor, Optional[tf.Tensor]]:
is_cross_attention = key_value_states is not None
bsz, tgt_len, embed_dim = shape_list(hidden_states)
query_states = self.q_proj(hidden_states) * self.scaling
if is_cross_attention and past_key_value is not None:
key_states = past_key_value[0]
value_states = past_key_value[1]
elif is_cross_attention:
key_states = self._shape(self.k_proj(key_value_states), -1, bsz)
value_states = self._shape(self.v_proj(key_value_states), -1, bsz)
elif past_key_value is not None:
key_states = self._shape(self.k_proj(hidden_states), -1, bsz)
value_states = self._shape(self.v_proj(hidden_states), -1, bsz)
key_states = tf.concat([past_key_value[0], key_states], axis=2)
value_states = tf.concat([past_key_value[1], value_states], axis=2)
else:
key_states = self._shape(self.k_proj(hidden_states), -1, bsz)
value_states = self._shape(self.v_proj(hidden_states), -1, bsz)
if self.is_decoder:
past_key_value = (key_states, value_states)
proj_shape = (bsz * self.num_heads, -1, self.head_dim)
query_states = tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape)
key_states = tf.reshape(key_states, proj_shape)
value_states = tf.reshape(value_states, proj_shape)
src_len = shape_list(key_states)[1]
attn_weights = tf.matmul(query_states, key_states, transpose_b=True)
if tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(attn_weights),
[bsz * self.num_heads, tgt_len, src_len],
message=f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}",
)
if attention_mask is not None:
if tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(attention_mask),
[bsz, 1, tgt_len, src_len],
message=f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {shape_list(attention_mask)}",
)
attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype)
attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) + attention_mask
attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len))
attn_weights = tf.nn.softmax(attn_weights, axis=-1)
if layer_head_mask is not None:
if tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(layer_head_mask),
[self.num_heads],
message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}",
)
attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape(
attn_weights, (bsz, self.num_heads, tgt_len, src_len)
)
attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len))
attn_probs = self.dropout(attn_weights, training=training)
attn_output = tf.matmul(attn_probs, value_states)
if tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(attn_output),
[bsz * self.num_heads, tgt_len, self.head_dim],
message=f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}",
)
attn_output = tf.transpose(
tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)), (0, 2, 1, 3)
)
attn_output = tf.reshape(attn_output, (bsz, tgt_len, embed_dim))
attn_output = self.out_proj(attn_output)
attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len))
return attn_output, attn_weights, past_key_value
class TFPegasusEncoderLayer(tf.keras.layers.Layer):
def __init__(self, config: PegasusConfig, **kwargs):
super().__init__(**kwargs)
self.embed_dim = config.d_model
self.self_attn = TFPegasusAttention(
self.embed_dim, config.encoder_attention_heads, dropout=config.attention_dropout, name="self_attn"
)
self.self_attn_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="self_attn_layer_norm")
self.dropout = tf.keras.layers.Dropout(config.dropout)
self.activation_fn = get_tf_activation(config.activation_function)
self.activation_dropout = tf.keras.layers.Dropout(config.activation_dropout)
self.fc1 = tf.keras.layers.Dense(config.encoder_ffn_dim, name="fc1")
self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2")
self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm")
def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, training=False):
residual = hidden_states
hidden_states = self.self_attn_layer_norm(hidden_states)
hidden_states, self_attn_weights, _ = self.self_attn(
hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask
)
if tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(hidden_states),
shape_list(residual),
message=f"Self attn modified the shape of query {shape_list(residual)} to {shape_list(hidden_states)}",
)
hidden_states = self.dropout(hidden_states, training=training)
hidden_states = residual + hidden_states
residual = hidden_states
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.activation_fn(self.fc1(hidden_states))
hidden_states = self.activation_dropout(hidden_states, training=training)
hidden_states = self.fc2(hidden_states)
hidden_states = self.dropout(hidden_states, training=training)
hidden_states = residual + hidden_states
return hidden_states, self_attn_weights
class TFPegasusDecoderLayer(tf.keras.layers.Layer):
def __init__(self, config: PegasusConfig, **kwargs):
super().__init__(**kwargs)
self.embed_dim = config.d_model
self.self_attn = TFPegasusAttention(
embed_dim=self.embed_dim,
num_heads=config.decoder_attention_heads,
dropout=config.attention_dropout,
name="self_attn",
is_decoder=True,
)
self.dropout = tf.keras.layers.Dropout(config.dropout)
self.activation_fn = get_tf_activation(config.activation_function)
self.activation_dropout = tf.keras.layers.Dropout(config.activation_dropout)
self.self_attn_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="self_attn_layer_norm")
self.encoder_attn = TFPegasusAttention(
self.embed_dim,
config.decoder_attention_heads,
dropout=config.attention_dropout,
name="encoder_attn",
is_decoder=True,
)
self.encoder_attn_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="encoder_attn_layer_norm")
self.fc1 = tf.keras.layers.Dense(config.decoder_ffn_dim, name="fc1")
self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2")
self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm")
def call(
self,
hidden_states,
attention_mask: Optional[tf.Tensor] = None,
encoder_hidden_states: Optional[tf.Tensor] = None,
encoder_attention_mask: Optional[tf.Tensor] = None,
layer_head_mask: Optional[tf.Tensor] = None,
cross_attn_layer_head_mask: Optional[tf.Tensor] = None,
past_key_value: Optional[Tuple[tf.Tensor]] = None,
training=False,
) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]:
residual = hidden_states
hidden_states = self.self_attn_layer_norm(hidden_states)
self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None
hidden_states, self_attn_weights, present_key_value = self.self_attn(
hidden_states=hidden_states,
past_key_value=self_attn_past_key_value,
attention_mask=attention_mask,
layer_head_mask=layer_head_mask,
)
hidden_states = self.dropout(hidden_states, training=training)
hidden_states = residual + hidden_states
cross_attn_present_key_value = None
cross_attn_weights = None
if encoder_hidden_states is not None:
residual = hidden_states
hidden_states = self.encoder_attn_layer_norm(hidden_states)
cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None
hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn(
hidden_states=hidden_states,
key_value_states=encoder_hidden_states,
attention_mask=encoder_attention_mask,
layer_head_mask=cross_attn_layer_head_mask,
past_key_value=cross_attn_past_key_value,
)
hidden_states = self.dropout(hidden_states, training=training)
hidden_states = residual + hidden_states
present_key_value = present_key_value + cross_attn_present_key_value
residual = hidden_states
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.activation_fn(self.fc1(hidden_states))
hidden_states = self.activation_dropout(hidden_states, training=training)
hidden_states = self.fc2(hidden_states)
hidden_states = self.dropout(hidden_states, training=training)
hidden_states = residual + hidden_states
return (
hidden_states,
self_attn_weights,
cross_attn_weights,
present_key_value,
)
class TFPegasusPreTrainedModel(TFPreTrainedModel):
config_class = PegasusConfig
base_model_prefix = "model"
@property
def dummy_inputs(self):
pad_token = 1
input_ids = tf.cast(tf.convert_to_tensor(DUMMY_INPUTS), tf.int32)
decoder_input_ids = tf.cast(tf.convert_to_tensor(DUMMY_INPUTS), tf.int32)
dummy_inputs = {
"decoder_input_ids": decoder_input_ids,
"attention_mask": tf.math.not_equal(input_ids, pad_token),
"input_ids": input_ids,
}
return dummy_inputs
@tf.function(
input_signature=[
{
"input_ids": tf.TensorSpec((None, None), tf.int32, name="input_ids"),
"attention_mask": tf.TensorSpec((None, None), tf.int32, name="attention_mask"),
"decoder_input_ids": tf.TensorSpec((None, None), tf.int32, name="decoder_input_ids"),
"decoder_attention_mask": tf.TensorSpec((None, None), tf.int32, name="decoder_attention_mask"),
}
]
)
def serving(self, inputs):
output = self.call(inputs)
return self.serving_output(output)
PEGASUS_START_DOCSTRING = r"""
This model inherits from :class:`~transformers.TFPreTrainedModel`. Check the superclass documentation for the
generic methods the library implements for all its model (such as downloading or saving, resizing the input
embeddings, pruning heads etc.)
This model is also a `tf.keras.Model <https://www.tensorflow.org/api_docs/python/tf/keras/Model>`__ subclass. Use
it as a regular TF 2.0 Keras Model and refer to the TF 2.0 documentation for all matter related to general usage
and behavior.
.. note::
TF 2.0 models accepts two formats as inputs:
- having all inputs as keyword arguments (like PyTorch models), or
- having all inputs as a list, tuple or dict in the first positional arguments.
This second option is useful when using :meth:`tf.keras.Model.fit` method which currently requires having all
the tensors in the first argument of the model call function: :obj:`model(inputs)`.
If you choose this second option, there are three possibilities you can use to gather all the input Tensors in
the first positional argument :
- a single Tensor with :obj:`input_ids` only and nothing else: :obj:`model(input_ids)`
- a list of varying length with one or several input Tensors IN THE ORDER given in the docstring:
:obj:`model([input_ids, attention_mask])` or :obj:`model([input_ids, attention_mask, token_type_ids])`
- a dictionary with one or several input Tensors associated to the input names given in the docstring:
:obj:`model({"input_ids": input_ids, "token_type_ids": token_type_ids})`
Args:
config (:class:`~transformers.PegasusConfig`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the :meth:`~transformers.TFPreTrainedModel.from_pretrained` method to load the
model weights.
"""
PEGASUS_GENERATION_EXAMPLE = r"""
Summarization example::
>>> from transformers import PegasusTokenizer, TFPegasusForConditionalGeneration
>>> model = TFPegasusForConditionalGeneration.from_pretrained('google/pegasus-xsum')
>>> tokenizer = PegasusTokenizer.from_pretrained('google/pegasus-xsum')
>>> ARTICLE_TO_SUMMARIZE = (
... "PG&E stated it scheduled the blackouts in response to forecasts for high winds "
... "amid dry conditions. The aim is to reduce the risk of wildfires. Nearly 800 thousand customers were "
... "scheduled to be affected by the shutoffs which were expected to last through at least midday tomorrow."
... )
>>> inputs = tokenizer([ARTICLE_TO_SUMMARIZE], max_length=1024, return_tensors='tf')
>>> # Generate Summary
>>> summary_ids = model.generate(inputs['input_ids'])
>>> print([tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=False) for g in summary_ids])
"""
PEGASUS_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`tf.Tensor` of shape :obj:`({0})`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`tf.Tensor` of shape :obj:`({0})`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
decoder_input_ids (:obj:`tf.Tensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`):
Indices of decoder input sequence tokens in the vocabulary.
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are decoder input IDs? <../glossary.html#decoder-input-ids>`__
Pegasus uses the :obj:`pad_token_id` as the starting token for :obj:`decoder_input_ids` generation. If
:obj:`past_key_values` is used, optionally only the last :obj:`decoder_input_ids` have to be input (see
:obj:`past_key_values`).
decoder_attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`):
will be made by default and ignore pad tokens. It is not recommended to set this for most use cases.
head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
decoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
cross_attn_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the cross-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
encoder_outputs (:obj:`tf.FloatTensor`, `optional`):
hidden states at the output of the last layer of the encoder. Used in the cross-attention of the decoder.
of shape :obj:`(batch_size, sequence_length, hidden_size)` is a sequence of
past_key_values (:obj:`Tuple[Tuple[tf.Tensor]]` of length :obj:`config.n_layers`)
contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids`
(those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`
instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`.
use_cache (:obj:`bool`, `optional`, defaults to :obj:`True`):
If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up
decoding (see :obj:`past_key_values`). Set to :obj:`False` during training, :obj:`True` during generation
output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all
attention layers. See ``attentions`` under returned tensors for more detail. This argument can be used only
in eager mode, in graph mode the value in the config will be used instead.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the
config will be used instead.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail. This argument can be used only in eager mode, in graph mode the value in the config will be
used instead.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple. This
argument can be used in eager mode, in graph mode the value will always be set to True.
training (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to use the model in training mode (some modules like dropout modules have different
behaviors between training and evaluation).
"""
@keras_serializable
class TFPegasusEncoder(tf.keras.layers.Layer):
config_class = PegasusConfig
def __init__(self, config: PegasusConfig, embed_tokens: Optional[TFSharedEmbeddings] = None, **kwargs):
super().__init__(**kwargs)
self.config = config
self.dropout = tf.keras.layers.Dropout(config.dropout)
self.layerdrop = config.encoder_layerdrop
self.padding_idx = config.pad_token_id
self.max_source_positions = config.max_position_embeddings
self.embed_scale = tf.math.sqrt(float(config.d_model)) if config.scale_embedding else 1.0
self.embed_tokens = embed_tokens
self.embed_positions = TFPegasusSinusoidalPositionalEmbedding(
config.max_position_embeddings,
config.d_model,
name="embed_positions",
)
self.layers = [TFPegasusEncoderLayer(config, name=f"layers.{i}") for i in range(config.encoder_layers)]
self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="layer_norm")
def get_embed_tokens(self):
return self.embed_tokens
def set_embed_tokens(self, embed_tokens):
self.embed_tokens = embed_tokens
def call(
self,
input_ids=None,
inputs_embeds=None,
attention_mask=None,
head_mask=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
training=False,
**kwargs,
):
inputs = input_processing(
func=self.call,
config=self.config,
input_ids=input_ids,
attention_mask=attention_mask,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
training=training,
kwargs_call=kwargs,
)
if inputs["input_ids"] is not None and inputs["inputs_embeds"] is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif inputs["input_ids"] is not None:
input_shape = shape_list(inputs["input_ids"])
elif inputs["inputs_embeds"] is not None:
input_shape = shape_list(inputs["inputs_embeds"])[:-1]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
if inputs["inputs_embeds"] is None:
inputs["inputs_embeds"] = self.embed_tokens(inputs["input_ids"]) * self.embed_scale
embed_pos = self.embed_positions(input_shape)
hidden_states = inputs["inputs_embeds"] + embed_pos
hidden_states = self.dropout(hidden_states, training=inputs["training"])
# check attention mask and invert
if inputs["attention_mask"] is not None:
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
attention_mask = _expand_mask(inputs["attention_mask"])
else:
attention_mask = None
encoder_states = () if inputs["output_hidden_states"] else None
all_attentions = () if inputs["output_attentions"] else None
# check if head_mask has a correct number of layers specified if desired
# The tf.debugging asserts are not compliant with XLA then they
# have to be disabled in other modes than eager.
if inputs["head_mask"] is not None and tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(inputs["head_mask"])[0],
len(self.layers),
message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.",
)
# encoder layers
for idx, encoder_layer in enumerate(self.layers):
if inputs["output_hidden_states"]:
encoder_states = encoder_states + (hidden_states,)
# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)
dropout_probability = random.uniform(0, 1)
if inputs["training"] and (dropout_probability < self.layerdrop): # skip the layer
continue
hidden_states, attn = encoder_layer(
hidden_states,
attention_mask,
inputs["head_mask"][idx] if inputs["head_mask"] is not None else None,
)
if inputs["output_attentions"]:
all_attentions += (attn,)
hidden_states = self.layer_norm(hidden_states)
if inputs["output_hidden_states"]:
encoder_states = encoder_states + (hidden_states,)
if not inputs["return_dict"]:
return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None)
return TFBaseModelOutput(
last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions
)
@keras_serializable
class TFPegasusDecoder(tf.keras.layers.Layer):
config_class = PegasusConfig
def __init__(self, config: PegasusConfig, embed_tokens: Optional[TFSharedEmbeddings] = None, **kwargs):
super().__init__(**kwargs)
self.config = config
self.padding_idx = config.pad_token_id
self.embed_tokens = embed_tokens
self.layerdrop = config.decoder_layerdrop
self.embed_positions = TFPegasusSinusoidalPositionalEmbedding(
config.max_position_embeddings,
config.d_model,
name="embed_positions",
)
self.embed_scale = tf.math.sqrt(float(config.d_model)) if config.scale_embedding else 1.0
self.layers = [TFPegasusDecoderLayer(config, name=f"layers.{i}") for i in range(config.decoder_layers)]
self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="layer_norm")
self.dropout = tf.keras.layers.Dropout(config.dropout)
def get_embed_tokens(self):
return self.embed_tokens
def set_embed_tokens(self, embed_tokens):
self.embed_tokens = embed_tokens
def call(
self,
input_ids=None,
inputs_embeds=None,
attention_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
head_mask=None,
cross_attn_head_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
training=False,
**kwargs,
):
inputs = input_processing(
func=self.call,
config=self.config,
input_ids=input_ids,
attention_mask=attention_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
head_mask=head_mask,
cross_attn_head_mask=cross_attn_head_mask,
inputs_embeds=inputs_embeds,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
training=training,
kwargs_call=kwargs,
)
if inputs["input_ids"] is not None and inputs["inputs_embeds"] is not None:
raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time")
elif inputs["input_ids"] is not None:
input_shape = shape_list(inputs["input_ids"])
elif inputs["inputs_embeds"] is not None:
input_shape = shape_list(inputs["inputs_embeds"])[:-1]
else:
raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds")
past_key_values_length = (
shape_list(inputs["past_key_values"][0][0])[2] if inputs["past_key_values"] is not None else 0
)
# embed positions
positions = self.embed_positions(input_shape, past_key_values_length)
if inputs["inputs_embeds"] is None:
inputs["inputs_embeds"] = self.embed_tokens(inputs["input_ids"]) * self.embed_scale
hidden_states = inputs["inputs_embeds"]
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
if input_shape[-1] > 1:
combined_attention_mask = _make_causal_mask(input_shape, past_key_values_length=past_key_values_length)
else:
combined_attention_mask = _expand_mask(
tf.ones((input_shape[0], input_shape[1] + past_key_values_length)), tgt_len=input_shape[-1]
)
if inputs["attention_mask"] is not None:
combined_attention_mask = combined_attention_mask + _expand_mask(
inputs["attention_mask"], tgt_len=input_shape[-1]
)
if inputs["encoder_hidden_states"] is not None and inputs["encoder_attention_mask"] is not None:
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
inputs["encoder_attention_mask"] = _expand_mask(inputs["encoder_attention_mask"], tgt_len=input_shape[-1])
hidden_states = self.dropout(hidden_states + positions, training=inputs["training"])
# decoder layers
all_hidden_states = () if inputs["output_hidden_states"] else None
all_self_attns = () if inputs["output_attentions"] else None
all_cross_attns = () if (inputs["output_attentions"] and inputs["encoder_hidden_states"] is not None) else None
present_key_values = () if inputs["use_cache"] else None
# check if head_mask and cross_attn_head_mask have a correct number of layers specified if desired
# The tf.debugging asserts are not compliant with XLA then they
# have to be disabled in other modes than eager.
for attn_mask in ["head_mask", "cross_attn_head_mask"]:
if inputs[attn_mask] is not None and tf.executing_eagerly():
tf.debugging.assert_equal(
shape_list(inputs[attn_mask])[0],
len(self.layers),
message=f"The {attn_mask} should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs[attn_mask])[0]}.",
)
for idx, decoder_layer in enumerate(self.layers):
# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
dropout_probability = random.uniform(0, 1)
if inputs["training"] and (dropout_probability < self.layerdrop):
continue
past_key_value = inputs["past_key_values"][idx] if inputs["past_key_values"] is not None else None
hidden_states, layer_self_attn, layer_cross_attn, present_key_value = decoder_layer(
hidden_states,
attention_mask=combined_attention_mask,
encoder_hidden_states=inputs["encoder_hidden_states"],
encoder_attention_mask=inputs["encoder_attention_mask"],
layer_head_mask=inputs["head_mask"][idx] if inputs["head_mask"] is not None else None,
cross_attn_layer_head_mask=inputs["cross_attn_head_mask"][idx]
if inputs["cross_attn_head_mask"] is not None
else None,
past_key_value=past_key_value,
)
if inputs["use_cache"]:
present_key_values += (present_key_value,)
if inputs["output_attentions"]:
all_self_attns += (layer_self_attn,)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns += (layer_cross_attn,)
hidden_states = self.layer_norm(hidden_states)
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
if inputs["output_attentions"]:
all_self_attns = list(all_self_attns)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns = list(all_cross_attns)
if inputs["use_cache"]:
present_key_values = (inputs["encoder_hidden_states"], present_key_values)
if not inputs["return_dict"]:
return hidden_states, present_key_values, all_hidden_states, all_self_attns, all_cross_attns
else:
return TFBaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=hidden_states,
past_key_values=present_key_values,
hidden_states=all_hidden_states,
attentions=all_self_attns,
cross_attentions=all_cross_attns,
)
@keras_serializable
class TFPegasusMainLayer(tf.keras.layers.Layer):
config_class = PegasusConfig
def __init__(self, config: PegasusConfig, **kwargs):
super().__init__(**kwargs)
self.config = config
self.shared = TFSharedEmbeddings(config.vocab_size, config.d_model, config.pad_token_id, name="model.shared")
with tf.compat.v1.variable_scope("model.shared") as shared_abs_scope_name:
pass
# Wraps layer to avoid problems with weight restoring and ensuring we're in the correct TF scope.
embed_tokens = TFWrappedEmbeddings(self.shared, abs_scope_name=shared_abs_scope_name)
embed_tokens.vocab_size = self.shared.vocab_size
embed_tokens.hidden_size = self.shared.hidden_size
self.encoder = TFPegasusEncoder(config, embed_tokens, name="encoder")
self.decoder = TFPegasusDecoder(config, embed_tokens, name="decoder")
def get_input_embeddings(self):
return self.shared
def set_input_embeddings(self, new_embeddings):
self.shared.weight = new_embeddings
self.shared.vocab_size = self.shared.weight.shape[0]
with tf.compat.v1.variable_scope("model.shared") as shared_abs_scope_name:
pass
embed_tokens = TFWrappedEmbeddings(self.shared, abs_scope_name=shared_abs_scope_name)
self.encoder.set_embed_tokens(embed_tokens)
self.decoder.set_embed_tokens(embed_tokens)
def call(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None,
past_key_values=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
training=False,
**kwargs
):
inputs = input_processing(
func=self.call,
config=self.config,
input_ids=input_ids,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
head_mask=head_mask,
decoder_head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
encoder_outputs=encoder_outputs,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
decoder_inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
training=training,
kwargs_call=kwargs,
)
if inputs["decoder_input_ids"] is None and inputs["decoder_inputs_embeds"] is None:
inputs["use_cache"] = False
inputs["output_hidden_states"] = (
inputs["output_hidden_states"]
if inputs["output_hidden_states"] is not None
else self.config.output_hidden_states
)
if inputs["encoder_outputs"] is None:
inputs["encoder_outputs"] = self.encoder(
input_ids=inputs["input_ids"],
attention_mask=inputs["attention_mask"],
head_mask=inputs["head_mask"],
inputs_embeds=inputs["inputs_embeds"],
output_attentions=inputs["output_attentions"],
output_hidden_states=inputs["output_hidden_states"],
return_dict=inputs["return_dict"],
training=inputs["training"],
)
# If the user passed a tuple for encoder_outputs, we wrap it in a TFBaseModelOutput when return_dict=True
elif inputs["return_dict"] and not isinstance(inputs["encoder_outputs"], TFBaseModelOutput):
inputs["encoder_outputs"] = TFBaseModelOutput(
last_hidden_state=inputs["encoder_outputs"][0],
hidden_states=inputs["encoder_outputs"][1] if len(inputs["encoder_outputs"]) > 1 else None,
attentions=inputs["encoder_outputs"][2] if len(inputs["encoder_outputs"]) > 2 else None,
)
# If the user passed a TFBaseModelOutput for encoder_outputs, we wrap it in a tuple when return_dict=False
elif not inputs["return_dict"] and not isinstance(inputs["encoder_outputs"], tuple):
inputs["encoder_outputs"] = inputs["encoder_outputs"].to_tuple()
decoder_outputs = self.decoder(
inputs["decoder_input_ids"],
attention_mask=inputs["decoder_attention_mask"],
encoder_hidden_states=inputs["encoder_outputs"][0],
encoder_attention_mask=inputs["attention_mask"],
head_mask=inputs["decoder_head_mask"],
cross_attn_head_mask=inputs["cross_attn_head_mask"],
past_key_values=inputs["past_key_values"],
inputs_embeds=inputs["decoder_inputs_embeds"],
use_cache=inputs["use_cache"],
output_attentions=inputs["output_attentions"],
output_hidden_states=inputs["output_hidden_states"],
return_dict=inputs["return_dict"],
training=inputs["training"],
)
if not inputs["return_dict"]:
return decoder_outputs + inputs["encoder_outputs"]
return TFSeq2SeqModelOutput(
last_hidden_state=decoder_outputs.last_hidden_state,
past_key_values=decoder_outputs.past_key_values,
decoder_hidden_states=decoder_outputs.hidden_states,
decoder_attentions=decoder_outputs.attentions,
cross_attentions=decoder_outputs.cross_attentions,
encoder_last_hidden_state=inputs["encoder_outputs"].last_hidden_state,
encoder_hidden_states=inputs["encoder_outputs"].hidden_states,
encoder_attentions=inputs["encoder_outputs"].attentions,
)
@add_start_docstrings(
"The bare PEGASUS Model outputting raw hidden-states without any specific head on top.",
PEGASUS_START_DOCSTRING,
)
class TFPegasusModel(TFPegasusPreTrainedModel):
def __init__(self, config: PegasusConfig, *inputs, **kwargs):
super().__init__(config, *inputs, **kwargs)
self.model = TFPegasusMainLayer(config, name="model")
def get_encoder(self):
return self.model.encoder
def get_decoder(self):
return self.model.decoder
@add_start_docstrings_to_model_forward(PEGASUS_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=TFSeq2SeqModelOutput,
config_class=_CONFIG_FOR_DOC,
)
def call(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None,
past_key_values=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
training=False,
**kwargs
):
inputs = input_processing(
func=self.call,
config=self.config,
input_ids=input_ids,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
head_mask=head_mask,
decoder_head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
encoder_outputs=encoder_outputs,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
decoder_inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
training=training,
kwargs_call=kwargs,
)
outputs = self.model(
input_ids=inputs["input_ids"],
attention_mask=inputs["attention_mask"],
decoder_input_ids=inputs["decoder_input_ids"],
decoder_attention_mask=inputs["decoder_attention_mask"],
head_mask=inputs["head_mask"],
decoder_head_mask=inputs["decoder_head_mask"],
cross_attn_head_mask=inputs["cross_attn_head_mask"],
encoder_outputs=inputs["encoder_outputs"],
past_key_values=inputs["past_key_values"],
inputs_embeds=inputs["inputs_embeds"],
decoder_inputs_embeds=inputs["decoder_inputs_embeds"],
use_cache=inputs["use_cache"],
output_attentions=inputs["output_attentions"],
output_hidden_states=inputs["output_hidden_states"],
return_dict=inputs["return_dict"],
training=inputs["training"],
)
return outputs
# Copied from transformers.models.bart.modeling_tf_bart.TFBartModel.serving_output
def serving_output(self, output):
pkv = tf.tuple(output.past_key_values)[1] if self.config.use_cache else None
dec_hs = tf.convert_to_tensor(output.decoder_hidden_states) if self.config.output_hidden_states else None
dec_attns = tf.convert_to_tensor(output.decoder_attentions) if self.config.output_attentions else None
cross_attns = tf.convert_to_tensor(output.cross_attentions) if self.config.output_attentions else None
enc_hs = tf.convert_to_tensor(output.encoder_hidden_states) if self.config.output_hidden_states else None
enc_attns = tf.convert_to_tensor(output.encoder_attentions) if self.config.output_attentions else None
return TFSeq2SeqModelOutput(
last_hidden_state=output.last_hidden_state,
past_key_values=pkv,
decoder_hidden_states=dec_hs,
decoder_attentions=dec_attns,
cross_attentions=cross_attns,
encoder_last_hidden_state=output.encoder_last_hidden_state,
encoder_hidden_states=enc_hs,
encoder_attentions=enc_attns,
)
@add_start_docstrings(
"The PEGASUS Model with a language modeling head. Can be used for summarization.",
PEGASUS_START_DOCSTRING,
)
class TFPegasusForConditionalGeneration(TFPegasusPreTrainedModel, TFCausalLanguageModelingLoss):
_keys_to_ignore_on_load_unexpected = [
r"model.encoder.embed_tokens.weight",
r"model.decoder.embed_tokens.weight",
]
def __init__(self, config, *inputs, **kwargs):
super().__init__(config, *inputs, **kwargs)
self.model = TFPegasusMainLayer(config, name="model")
self.use_cache = config.use_cache
# final_bias_logits is registered as a buffer in pytorch, so not trainable for the the sake of consistency.
self.final_logits_bias = self.add_weight(
name="final_logits_bias", shape=[1, config.vocab_size], initializer="zeros", trainable=False
)
def get_decoder(self):
return self.model.decoder
def get_encoder(self):
return self.model.encoder
def get_output_embeddings(self):
return self.get_input_embeddings()
def set_output_embeddings(self, value):
self.set_input_embeddings(value)
def get_bias(self):
return {"final_logits_bias": self.final_logits_bias}
def set_bias(self, value):
self.final_logits_bias = value["final_logits_bias"]
@add_start_docstrings_to_model_forward(PEGASUS_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=TFSeq2SeqLMOutput, config_class=_CONFIG_FOR_DOC)
@add_end_docstrings(PEGASUS_GENERATION_EXAMPLE)
def call(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
encoder_outputs: Optional[TFBaseModelOutput] = None,
past_key_values=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
labels=None,
training=False,
**kwargs,
):
inputs = input_processing(
func=self.call,
config=self.config,
input_ids=input_ids,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
head_mask=head_mask,
decoder_head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
encoder_outputs=encoder_outputs,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
decoder_inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
labels=labels,
training=training,
kwargs_call=kwargs,
)
if inputs["labels"] is not None:
inputs["labels"] = tf.where(
inputs["labels"] == self.config.pad_token_id,
tf.fill(shape_list(inputs["labels"]), -100),
inputs["labels"],
)
inputs["use_cache"] = False
if inputs["decoder_input_ids"] is None:
inputs["decoder_input_ids"] = shift_tokens_right(
inputs["labels"], self.config.pad_token_id, self.config.decoder_start_token_id
)
outputs = self.model(
inputs["input_ids"],
attention_mask=inputs["attention_mask"],
decoder_input_ids=inputs["decoder_input_ids"],
encoder_outputs=inputs["encoder_outputs"],
decoder_attention_mask=inputs["decoder_attention_mask"],
head_mask=inputs["head_mask"],
decoder_head_mask=inputs["decoder_head_mask"],
cross_attn_head_mask=inputs["cross_attn_head_mask"],
past_key_values=inputs["past_key_values"],
inputs_embeds=inputs["inputs_embeds"],
decoder_inputs_embeds=inputs["decoder_inputs_embeds"],
use_cache=inputs["use_cache"],
output_attentions=inputs["output_attentions"],
output_hidden_states=inputs["output_hidden_states"],
return_dict=inputs["return_dict"],
training=inputs["training"],
)
lm_logits = self.model.shared(outputs[0], mode="linear")
lm_logits = lm_logits + self.final_logits_bias
masked_lm_loss = None if inputs["labels"] is None else self.compute_loss(inputs["labels"], lm_logits)
if not inputs["return_dict"]:
output = (lm_logits,) + outputs[1:]
return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output
return TFSeq2SeqLMOutput(
loss=masked_lm_loss,
logits=lm_logits,
past_key_values=outputs.past_key_values, # index 1 of d outputs
decoder_hidden_states=outputs.decoder_hidden_states, # index 2 of d outputs
decoder_attentions=outputs.decoder_attentions, # index 3 of d outputs
cross_attentions=outputs.cross_attentions, # index 4 of d outputs
encoder_last_hidden_state=outputs.encoder_last_hidden_state, # index 0 of encoder outputs
encoder_hidden_states=outputs.encoder_hidden_states, # 1 of e out
encoder_attentions=outputs.encoder_attentions, # 2 of e out
)
# Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.serving_output
def serving_output(self, output):
pkv = tf.tuple(output.past_key_values)[1] if self.config.use_cache else None
dec_hs = tf.convert_to_tensor(output.decoder_hidden_states) if self.config.output_hidden_states else None
dec_attns = tf.convert_to_tensor(output.decoder_attentions) if self.config.output_attentions else None
cross_attns = tf.convert_to_tensor(output.cross_attentions) if self.config.output_attentions else None
enc_hs = tf.convert_to_tensor(output.encoder_hidden_states) if self.config.output_hidden_states else None
enc_attns = tf.convert_to_tensor(output.encoder_attentions) if self.config.output_attentions else None
return TFSeq2SeqLMOutput(
logits=output.logits,
past_key_values=pkv,
decoder_hidden_states=dec_hs,
decoder_attentions=dec_attns,
cross_attentions=cross_attns,
encoder_last_hidden_state=output.encoder_last_hidden_state,
encoder_hidden_states=enc_hs,
encoder_attentions=enc_attns,
)
# Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.prepare_inputs_for_generation
def prepare_inputs_for_generation(
self,
decoder_input_ids,
past,
attention_mask,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
use_cache=None,
**kwargs,
) -> Dict:
assert past is not None and len(past) in {1, 2}, f"past has to be an iterable of length 1,2 got {past}"
if len(past) == 1:
assert isinstance(past[0], tf.Tensor), f"`past[0]` has to be of type `tf.Tensor`, but is {type(past[0])}"
encoder_outputs = TFBaseModelOutput(last_hidden_state=past[0])
past_key_values = None
else:
assert (
len(past) == 2
), "`past` has to be of length 2 with the encoder_outputs at the first position and past_key_values at the second position."
encoder_outputs, past_key_values = past
if isinstance(encoder_outputs, tuple):
assert isinstance(
encoder_outputs[0], tf.Tensor
), f"`encoder_outputs[0]` has to be of type `tf.Tensor`, but is {type(encoder_outputs[0])}"
encoder_outputs = TFBaseModelOutput(last_hidden_state=encoder_outputs[0])
elif isinstance(encoder_outputs, tf.Tensor):
encoder_outputs = TFBaseModelOutput(last_hidden_state=encoder_outputs)
assert (
past_key_values
), f"decoder cached states must be truthy. got {past_key_values} from the 2nd element of past"
decoder_input_ids = decoder_input_ids[:, -1:]
assert isinstance(
encoder_outputs, TFBaseModelOutput
), f"encoder_outputs should be a TFBaseModelOutput, Instead got {type(encoder_outputs)}."
return {
"input_ids": None, # encoder_outputs is defined. input_ids not needed
"encoder_outputs": encoder_outputs,
"past_key_values": past_key_values,
"decoder_input_ids": decoder_input_ids,
"attention_mask": attention_mask,
"head_mask": head_mask,
"decoder_head_mask": decoder_head_mask,
"cross_attn_head_mask": cross_attn_head_mask,
"use_cache": use_cache, # change this to avoid caching (presumably for debugging)
}
def prepare_decoder_input_ids_from_labels(self, labels: tf.Tensor):
return shift_tokens_right(labels, self.config.pad_token_id, self.config.decoder_start_token_id)
@staticmethod
# Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration._reorder_cache
def _reorder_cache(past, beam_idx):
if len(past) == 1:
return past
past_key_values = past[1]
reordered_past = ()
for layer_past_key_values in past_key_values:
reordered_past += (
tuple(tf.gather(layer_past_key_value, beam_idx) for layer_past_key_value in layer_past_key_values[:2])
+ layer_past_key_values[2:],
)
return (past[0], reordered_past)
| true
| true
|
f7187934ae933740a1f4b6303e02e4e7822c4691
| 9,491
|
py
|
Python
|
scripts/compile_prosivic_results.py
|
mrksbrg/adas-pro-sivic
|
fb4bbd4f39b58e42c3d47494fb4116a3e7fced0d
|
[
"BSD-2-Clause"
] | 4
|
2020-04-05T01:49:24.000Z
|
2021-11-15T03:01:55.000Z
|
scripts/compile_prosivic_results.py
|
sukhvir-chauhan-1999/adas-pro-sivic
|
fb4bbd4f39b58e42c3d47494fb4116a3e7fced0d
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/compile_prosivic_results.py
|
sukhvir-chauhan-1999/adas-pro-sivic
|
fb4bbd4f39b58e42c3d47494fb4116a3e7fced0d
|
[
"BSD-2-Clause"
] | 3
|
2020-04-05T01:49:26.000Z
|
2021-09-28T07:09:41.000Z
|
import os
import statistics
import csv
from collections import Counter
import pandas as pd
import numpy as np
class ExpSetup:
def __init__(self, ped_x, ped_y, ped_orient, ped_speed, car_speed, min_dist, min_ttc, min_dist_awa, det, col):
self.ped_x = ped_x
self.ped_y = ped_y
self.ped_orient = ped_orient
self.ped_speed = ped_speed
self.car_speed = car_speed
self.min_dist_counter = Counter([min_dist])
self.min_dist = [min_dist]
self.min_ttc = [min_ttc]
self.min_ttc_counter = Counter([min_ttc])
self.min_dist_awa = [min_dist_awa]
self.min_dist_awa_counter = Counter(([min_dist_awa]))
self.detection = [det]
self.collision = [col]
self.nbr_results = 1
self.results = Counter([ExpResult(min_dist, min_ttc, min_dist_awa, det, col)])
def __str__(self):
return "### Scenario (x0P=" + str(self.ped_x) + ", y0P=" + str(self.ped_y) + ", Th0P=" + str(self.ped_orient) + ", v0P=" + str(self.ped_speed) + ", v0C=" + str(self.car_speed) + ") ###"
def __eq__(self, other):
return self.ped_x == other.ped_x and self.ped_y == other.ped_y and self.ped_orient == other.ped_orient \
and self.ped_speed == other.ped_speed and self.car_speed == other.car_speed
def __lt__(self, other):
return self.ped_x < other.ped_x
def add_result(self, min_dist, min_ttc, min_dist_awa, det, col):
self.min_dist.append(min_dist)
self.min_dist_counter.update([min_dist])
self.min_ttc.append(min_ttc)
self.min_ttc_counter.update([min_ttc])
self.min_dist_awa.append(min_dist_awa)
self.min_dist_awa_counter.update([min_dist_awa])
self.detection.append(det)
self.collision.append(col)
self.nbr_results += 1
self.results.update([ExpResult(min_dist, min_ttc, min_dist_awa, det, col)])
def get_nbr_results(self):
return self.nbr_results
def get_results(self):
return self.results
def get_nbr_unique_results(self):
unique_list_of1 = []
unique_list_of2 = []
unique_list_of3 = []
for x in self.min_dist:
if x not in unique_list_of1:
unique_list_of1.append(x)
for y in self.min_ttc:
if y not in unique_list_of2:
unique_list_of2.append(y)
for z in self.min_dist_awa:
if z not in unique_list_of3:
unique_list_of3.append(z)
return {'of1': unique_list_of1, 'of2': unique_list_of2, 'of3': unique_list_of3}
def get_avg_min_dist(self):
sum = 0
for res in self.min_dist:
sum += res
return sum / len(self.min_dist)
def get_sd_min_dist(self):
if len(self.min_dist) == 1:
return 0
else:
return statistics.stdev(self.min_dist)
def get_avg_min_ttc(self):
sum = 0
for res in self.min_ttc:
sum += res
return sum / len(self.min_ttc)
def get_sd_min_ttc(self):
if len(self.min_ttc) == 1:
return 0
else:
return statistics.stdev(self.min_ttc)
def get_avg_min_dist_awa(self):
sum = 0
for res in self.min_dist_awa:
sum += res
return sum / len(self.min_dist_awa)
def get_sd_min_dist_awa(self):
if len(self.min_dist_awa) == 1:
return 0
else:
return statistics.stdev(self.min_dist_awa)
def get_nbr_detections(self):
sum = 0
for res in self.detection:
sum += res
return sum
def get_nbr_collisions(self):
sum = 0
for res in self.collision:
sum += res
return sum
@property
def get_ped_x(self):
return self.ped_x
@property
def get_ped_y(self):
return self.ped_y
@property
def get_ped_orient(self):
return self.ped_orient
@property
def get_ped_speed(self):
return self.ped_speed
@property
def get_car_speed(self):
return self.car_speed
@property
def get_of1_counter(self):
return self.min_dist_counter
class ExpResult:
def __init__(self, min_dist, min_ttc, min_dist_awa, det, col):
self.min_dist = min_dist
self.min_ttc = min_ttc
self.min_dist_awa = min_dist_awa
self.detection = det
self.collision = col
@property
def get_min_dist(self):
return self.min_dist
@property
def get_min_ttc(self):
return self.min_ttc
@property
def get_min_dist_awa(self):
return self.min_dist_awa
@property
def get_detected(self):
return self.detection
@property
def get_collision(self):
return self.collision
def __str__(self):
return "\tOF1=" + str(self.min_dist) + ", OF2=" + str(self.min_ttc) + ", OF3=" + str(self.min_dist_awa) + ", Detection=" + str(self.detection) + ", Collision=" + str(self.collision)
def __eq__(self, other):
return self.min_dist == other.min_dist and self.min_ttc == other.min_ttc and self.min_dist_awa == other.min_dist_awa \
and self.detection == other.detection and self.collision == other.collision
def __lt__(self, other):
return self.min_dist < other.min_dist
def __hash__(self):
return hash((self.min_dist, self.min_ttc, self.min_dist_awa, self.detection, self.collision))
dir_name = 'prosivic_results'
result_dataframes = []
scenario_results = []
for filename in os.listdir(dir_name):
if filename.endswith(".csv"):
df = pd.read_csv(dir_name + "\\" + filename)
for index, row in df.iterrows():
exp_setup = ExpSetup(row['ped_x'], row['ped_y'], row['ped_orient'], row['ped_speed'], row['car_speed'], row['of1'], row['of2'], row['of3'], row['detection'], row['collision'])
if exp_setup not in scenario_results:
scenario_results.append(exp_setup)
else:
#print("Adding results to: " + str(conf))
i = scenario_results.index(exp_setup)
scenario_results[i].add_result(row['of1'], row['of2'], row['of3'], row['detection'], row['collision'])
with open('mode_prosivic_results.csv', mode='w') as merged_file:
mode_writer = csv.writer(merged_file, delimiter=',')
mode_writer.writerow(['x0P', 'y0P', 'Th0P', 'v0P', 'v0C', 'OF1', 'OF2', 'OF3', 'det', 'col', 'conf'])
#merge_writer.writerow(['x0P', 'y0P', 'Th0P', 'v0P', 'v0C', 'nbr', 'OF1_unique', 'OF1_avg', 'OF1_sd', 'OF2_unique', 'OF2_avg', 'OF2_sd', 'OF3_unique', 'OF3_avg', 'OF3_sd', 'det_true', 'det_false', 'col_true', 'col_false'])
for exp_setup in scenario_results:
print("\n" + str(exp_setup))
print("\tNumber of results: " + str(exp_setup.get_nbr_results()))
res = exp_setup.get_results()
for result, count in res.most_common():
print("\t" + str(count) + "x:" + str(result))
unique_per_of = exp_setup.get_nbr_unique_results()
print("\t\t# Result per objective function #")
print("\t\tmin_dist:\t\tUnique = " + str(len(unique_per_of["of1"])) + "\tAvg = " + str(exp_setup.get_avg_min_dist()) + "\tSD = " + str(exp_setup.get_sd_min_dist()))
print("\t\t\tCounter min_dist: " + str(exp_setup.min_dist_counter))
print("\t\tmin_ttc:\t\tUnique = " + str(len(unique_per_of["of2"])) + "\tAvg = " + str(exp_setup.get_avg_min_ttc()) + "\tSD = " + str(exp_setup.get_sd_min_ttc()))
print("\t\t\tCounter min_ttc: " + str(exp_setup.min_ttc_counter))
print("\t\tmin_dist_awa:\tUnique = " + str(len(unique_per_of["of3"])) + "\tAvg = " + str(exp_setup.get_avg_min_dist_awa()) + "\tSD = " + str(exp_setup.get_sd_min_dist_awa()))
print("\t\t\tCounter min_dist_awa: " + str(exp_setup.min_dist_awa_counter))
print("\t\tNumber detections: " + str(exp_setup.get_nbr_detections()) + " (out of " + str(exp_setup.get_nbr_results()) + " = " + str(100 * (exp_setup.get_nbr_detections()/exp_setup.get_nbr_results())) + "%)")
print("\t\tNumber collisions: " + str(exp_setup.get_nbr_collisions()) + " (out of " + str(exp_setup.get_nbr_results()) + " = " + str(100 * (exp_setup.get_nbr_collisions()/exp_setup.get_nbr_results())) + "%)")
mode_result = res.most_common(1)[0][0] # this is the most common ExpResult (first element in first tuple in first element in the Counter)
conf = (res.most_common(1)[0][1]/exp_setup.get_nbr_results()) # this is the count of the most common results divided by the total number
mode_writer.writerow([exp_setup.ped_x, exp_setup.ped_y, exp_setup.ped_orient, exp_setup.ped_speed, exp_setup.car_speed, mode_result.min_dist, mode_result.min_ttc, mode_result.min_dist_awa, mode_result.detection, mode_result.collision, conf])
#merge_writer.writerow([exp_setup.ped_x, exp_setup.ped_y, exp_setup.ped_orient, exp_setup.ped_speed, exp_setup.car_speed, exp_setup.get_nbr_results(), len(unique_per_of["of1"]), exp_setup.get_avg_min_dist(), exp_setup.get_sd_min_dist(), len(unique_per_of["of2"]), exp_setup.get_avg_min_ttc(), exp_setup.get_sd_min_ttc(), len(unique_per_of["of3"]), exp_setup.get_avg_min_dist_awa(), exp_setup.get_sd_min_dist_awa(), exp_setup.get_nbr_detections(), (exp_setup.get_nbr_results() - exp_setup.get_nbr_detections()), exp_setup.get_nbr_collisions(), (exp_setup.get_nbr_results() - exp_setup.get_nbr_collisions())])
| 40.909483
| 615
| 0.640185
|
import os
import statistics
import csv
from collections import Counter
import pandas as pd
import numpy as np
class ExpSetup:
def __init__(self, ped_x, ped_y, ped_orient, ped_speed, car_speed, min_dist, min_ttc, min_dist_awa, det, col):
self.ped_x = ped_x
self.ped_y = ped_y
self.ped_orient = ped_orient
self.ped_speed = ped_speed
self.car_speed = car_speed
self.min_dist_counter = Counter([min_dist])
self.min_dist = [min_dist]
self.min_ttc = [min_ttc]
self.min_ttc_counter = Counter([min_ttc])
self.min_dist_awa = [min_dist_awa]
self.min_dist_awa_counter = Counter(([min_dist_awa]))
self.detection = [det]
self.collision = [col]
self.nbr_results = 1
self.results = Counter([ExpResult(min_dist, min_ttc, min_dist_awa, det, col)])
def __str__(self):
return "### Scenario (x0P=" + str(self.ped_x) + ", y0P=" + str(self.ped_y) + ", Th0P=" + str(self.ped_orient) + ", v0P=" + str(self.ped_speed) + ", v0C=" + str(self.car_speed) + ") ###"
def __eq__(self, other):
return self.ped_x == other.ped_x and self.ped_y == other.ped_y and self.ped_orient == other.ped_orient \
and self.ped_speed == other.ped_speed and self.car_speed == other.car_speed
def __lt__(self, other):
return self.ped_x < other.ped_x
def add_result(self, min_dist, min_ttc, min_dist_awa, det, col):
self.min_dist.append(min_dist)
self.min_dist_counter.update([min_dist])
self.min_ttc.append(min_ttc)
self.min_ttc_counter.update([min_ttc])
self.min_dist_awa.append(min_dist_awa)
self.min_dist_awa_counter.update([min_dist_awa])
self.detection.append(det)
self.collision.append(col)
self.nbr_results += 1
self.results.update([ExpResult(min_dist, min_ttc, min_dist_awa, det, col)])
def get_nbr_results(self):
return self.nbr_results
def get_results(self):
return self.results
def get_nbr_unique_results(self):
unique_list_of1 = []
unique_list_of2 = []
unique_list_of3 = []
for x in self.min_dist:
if x not in unique_list_of1:
unique_list_of1.append(x)
for y in self.min_ttc:
if y not in unique_list_of2:
unique_list_of2.append(y)
for z in self.min_dist_awa:
if z not in unique_list_of3:
unique_list_of3.append(z)
return {'of1': unique_list_of1, 'of2': unique_list_of2, 'of3': unique_list_of3}
def get_avg_min_dist(self):
sum = 0
for res in self.min_dist:
sum += res
return sum / len(self.min_dist)
def get_sd_min_dist(self):
if len(self.min_dist) == 1:
return 0
else:
return statistics.stdev(self.min_dist)
def get_avg_min_ttc(self):
sum = 0
for res in self.min_ttc:
sum += res
return sum / len(self.min_ttc)
def get_sd_min_ttc(self):
if len(self.min_ttc) == 1:
return 0
else:
return statistics.stdev(self.min_ttc)
def get_avg_min_dist_awa(self):
sum = 0
for res in self.min_dist_awa:
sum += res
return sum / len(self.min_dist_awa)
def get_sd_min_dist_awa(self):
if len(self.min_dist_awa) == 1:
return 0
else:
return statistics.stdev(self.min_dist_awa)
def get_nbr_detections(self):
sum = 0
for res in self.detection:
sum += res
return sum
def get_nbr_collisions(self):
sum = 0
for res in self.collision:
sum += res
return sum
@property
def get_ped_x(self):
return self.ped_x
@property
def get_ped_y(self):
return self.ped_y
@property
def get_ped_orient(self):
return self.ped_orient
@property
def get_ped_speed(self):
return self.ped_speed
@property
def get_car_speed(self):
return self.car_speed
@property
def get_of1_counter(self):
return self.min_dist_counter
class ExpResult:
def __init__(self, min_dist, min_ttc, min_dist_awa, det, col):
self.min_dist = min_dist
self.min_ttc = min_ttc
self.min_dist_awa = min_dist_awa
self.detection = det
self.collision = col
@property
def get_min_dist(self):
return self.min_dist
@property
def get_min_ttc(self):
return self.min_ttc
@property
def get_min_dist_awa(self):
return self.min_dist_awa
@property
def get_detected(self):
return self.detection
@property
def get_collision(self):
return self.collision
def __str__(self):
return "\tOF1=" + str(self.min_dist) + ", OF2=" + str(self.min_ttc) + ", OF3=" + str(self.min_dist_awa) + ", Detection=" + str(self.detection) + ", Collision=" + str(self.collision)
def __eq__(self, other):
return self.min_dist == other.min_dist and self.min_ttc == other.min_ttc and self.min_dist_awa == other.min_dist_awa \
and self.detection == other.detection and self.collision == other.collision
def __lt__(self, other):
return self.min_dist < other.min_dist
def __hash__(self):
return hash((self.min_dist, self.min_ttc, self.min_dist_awa, self.detection, self.collision))
dir_name = 'prosivic_results'
result_dataframes = []
scenario_results = []
for filename in os.listdir(dir_name):
if filename.endswith(".csv"):
df = pd.read_csv(dir_name + "\\" + filename)
for index, row in df.iterrows():
exp_setup = ExpSetup(row['ped_x'], row['ped_y'], row['ped_orient'], row['ped_speed'], row['car_speed'], row['of1'], row['of2'], row['of3'], row['detection'], row['collision'])
if exp_setup not in scenario_results:
scenario_results.append(exp_setup)
else:
i = scenario_results.index(exp_setup)
scenario_results[i].add_result(row['of1'], row['of2'], row['of3'], row['detection'], row['collision'])
with open('mode_prosivic_results.csv', mode='w') as merged_file:
mode_writer = csv.writer(merged_file, delimiter=',')
mode_writer.writerow(['x0P', 'y0P', 'Th0P', 'v0P', 'v0C', 'OF1', 'OF2', 'OF3', 'det', 'col', 'conf'])
for exp_setup in scenario_results:
print("\n" + str(exp_setup))
print("\tNumber of results: " + str(exp_setup.get_nbr_results()))
res = exp_setup.get_results()
for result, count in res.most_common():
print("\t" + str(count) + "x:" + str(result))
unique_per_of = exp_setup.get_nbr_unique_results()
print("\t\t# Result per objective function #")
print("\t\tmin_dist:\t\tUnique = " + str(len(unique_per_of["of1"])) + "\tAvg = " + str(exp_setup.get_avg_min_dist()) + "\tSD = " + str(exp_setup.get_sd_min_dist()))
print("\t\t\tCounter min_dist: " + str(exp_setup.min_dist_counter))
print("\t\tmin_ttc:\t\tUnique = " + str(len(unique_per_of["of2"])) + "\tAvg = " + str(exp_setup.get_avg_min_ttc()) + "\tSD = " + str(exp_setup.get_sd_min_ttc()))
print("\t\t\tCounter min_ttc: " + str(exp_setup.min_ttc_counter))
print("\t\tmin_dist_awa:\tUnique = " + str(len(unique_per_of["of3"])) + "\tAvg = " + str(exp_setup.get_avg_min_dist_awa()) + "\tSD = " + str(exp_setup.get_sd_min_dist_awa()))
print("\t\t\tCounter min_dist_awa: " + str(exp_setup.min_dist_awa_counter))
print("\t\tNumber detections: " + str(exp_setup.get_nbr_detections()) + " (out of " + str(exp_setup.get_nbr_results()) + " = " + str(100 * (exp_setup.get_nbr_detections()/exp_setup.get_nbr_results())) + "%)")
print("\t\tNumber collisions: " + str(exp_setup.get_nbr_collisions()) + " (out of " + str(exp_setup.get_nbr_results()) + " = " + str(100 * (exp_setup.get_nbr_collisions()/exp_setup.get_nbr_results())) + "%)")
mode_result = res.most_common(1)[0][0]
conf = (res.most_common(1)[0][1]/exp_setup.get_nbr_results())
mode_writer.writerow([exp_setup.ped_x, exp_setup.ped_y, exp_setup.ped_orient, exp_setup.ped_speed, exp_setup.car_speed, mode_result.min_dist, mode_result.min_ttc, mode_result.min_dist_awa, mode_result.detection, mode_result.collision, conf])
| true
| true
|
f7187a6e99e39c6f2ebf4e337d526183e1dbb006
| 521
|
py
|
Python
|
alveo/neptune/service.py
|
asirasa-xilinx/Vitis-AI
|
2ea756d2946d66266c111b09b85f4bcf7fc60764
|
[
"Apache-2.0"
] | null | null | null |
alveo/neptune/service.py
|
asirasa-xilinx/Vitis-AI
|
2ea756d2946d66266c111b09b85f4bcf7fc60764
|
[
"Apache-2.0"
] | null | null | null |
alveo/neptune/service.py
|
asirasa-xilinx/Vitis-AI
|
2ea756d2946d66266c111b09b85f4bcf7fc60764
|
[
"Apache-2.0"
] | null | null | null |
import os
class Service(object):
"""
The base class for all services. All services inherit from this class
"""
def __init__(self, prefix, artifacts, graph):
self._artifacts = artifacts
self._prefix = prefix
self._proc = None
self._graph = graph
def start(self, args):
# os.environ["XDNN_VERBOSE"] = "1"
# os.environ["XBLAS_EMIT_PROFILING_INFO"] = "1"
self._graph.serve(args, background=True)
def stop(self):
self._graph.stop()
| 22.652174
| 73
| 0.608445
|
import os
class Service(object):
def __init__(self, prefix, artifacts, graph):
self._artifacts = artifacts
self._prefix = prefix
self._proc = None
self._graph = graph
def start(self, args):
self._graph.serve(args, background=True)
def stop(self):
self._graph.stop()
| true
| true
|
f7187a7d00539bb05c8029543eca7c0694b8593d
| 1,063
|
py
|
Python
|
ExecutingEntryCallingScriptHash/contractB_compiler2.0.py
|
ONT-Avocados/python-template
|
0acb5032adf8f4968c5d46cf53681f31ac917650
|
[
"Apache-2.0"
] | 17
|
2018-09-26T07:09:16.000Z
|
2020-05-28T06:16:47.000Z
|
ExecutingEntryCallingScriptHash/contractB_compiler2.0.py
|
zhangxiaocong/python-template
|
0acb5032adf8f4968c5d46cf53681f31ac917650
|
[
"Apache-2.0"
] | 8
|
2018-09-26T02:08:04.000Z
|
2021-12-14T02:53:26.000Z
|
ExecutingEntryCallingScriptHash/contractB_compiler2.0.py
|
zhangxiaocong/python-template
|
0acb5032adf8f4968c5d46cf53681f31ac917650
|
[
"Apache-2.0"
] | 30
|
2018-09-25T08:27:42.000Z
|
2020-12-08T09:02:33.000Z
|
OntCversion = '2.0.0'
from ontology.interop.System.ExecutionEngine import GetExecutingScriptHash, GetCallingScriptHash, GetEntryScriptHash
from ontology.interop.System.Runtime import CheckWitness, GetTime, Notify, Serialize, Deserialize
ContractAddress = GetExecutingScriptHash()
def Main(opration, args):
if opration == "invokeB":
return invokeB(args[0])
if opration == "avoidToBeInvokedByContract":
return avoidToBeInvokedByContract()
return False
def invokeB(param):
Notify(["111_invokeB", param])
# to prevent hack from other contract
callerHash = GetCallingScriptHash()
entryHash = GetEntryScriptHash()
Notify([callerHash, entryHash, ContractAddress])
return True
def avoidToBeInvokedByContract():
callerHash = GetCallingScriptHash()
entryHash = GetEntryScriptHash()
if callerHash != entryHash:
Notify(["You are not allowed to invoke this method through contract"])
return False
else:
Notify(["You can implement what you need to do here!"])
return True
| 33.21875
| 116
| 0.728128
|
OntCversion = '2.0.0'
from ontology.interop.System.ExecutionEngine import GetExecutingScriptHash, GetCallingScriptHash, GetEntryScriptHash
from ontology.interop.System.Runtime import CheckWitness, GetTime, Notify, Serialize, Deserialize
ContractAddress = GetExecutingScriptHash()
def Main(opration, args):
if opration == "invokeB":
return invokeB(args[0])
if opration == "avoidToBeInvokedByContract":
return avoidToBeInvokedByContract()
return False
def invokeB(param):
Notify(["111_invokeB", param])
callerHash = GetCallingScriptHash()
entryHash = GetEntryScriptHash()
Notify([callerHash, entryHash, ContractAddress])
return True
def avoidToBeInvokedByContract():
callerHash = GetCallingScriptHash()
entryHash = GetEntryScriptHash()
if callerHash != entryHash:
Notify(["You are not allowed to invoke this method through contract"])
return False
else:
Notify(["You can implement what you need to do here!"])
return True
| true
| true
|
f7187bb4fead8b89d8048f1b523ec3b567c0a9ea
| 25,260
|
py
|
Python
|
src/rosdep2/sources_list.py
|
tianbot/rosdep
|
24c8c76a8cb99b08285192013a165f30af0f5232
|
[
"BSD-3-Clause"
] | 2
|
2021-11-16T10:49:18.000Z
|
2021-11-16T23:38:11.000Z
|
src/rosdep2/sources_list.py
|
tianbot/rosdep
|
24c8c76a8cb99b08285192013a165f30af0f5232
|
[
"BSD-3-Clause"
] | null | null | null |
src/rosdep2/sources_list.py
|
tianbot/rosdep
|
24c8c76a8cb99b08285192013a165f30af0f5232
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2012, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Author Ken Conley/kwc@willowgarage.com
from __future__ import print_function
import os
import sys
import yaml
from rosdep2.shell_utils import FakeURLOpener as urlopen
try:
from urllib.error import URLError
import urllib.request as request
except ImportError:
from urllib2 import URLError
import urllib2 as request
try:
import cPickle as pickle
except ImportError:
import pickle
from .cache_tools import compute_filename_hash, PICKLE_CACHE_EXT, write_atomic, write_cache_file
from .core import InvalidData, DownloadFailure, CachePermissionError
from .gbpdistro_support import get_gbprepo_as_rosdep_data, download_gbpdistro_as_rosdep_data
from .meta import MetaDatabase
from ._version import __version__
try:
import urlparse
except ImportError:
import urllib.parse as urlparse # py3k
try:
import httplib
except ImportError:
import http.client as httplib # py3k
import rospkg
import rospkg.distro
from .loader import RosdepLoader
from .rosdistrohelper import get_index, get_index_url
# default file to download with 'init' command in order to bootstrap
# rosdep
DEFAULT_SOURCES_LIST_URL = 'https://raw.githubusercontent.com/ros/rosdistro/master/rosdep/sources.list.d/20-default.list'
# seconds to wait before aborting download of rosdep data
DOWNLOAD_TIMEOUT = 15.0
SOURCES_LIST_DIR = 'sources.list.d'
SOURCES_CACHE_DIR = 'sources.cache'
# name of index file for sources cache
CACHE_INDEX = 'index'
# extension for binary cache
SOURCE_PATH_ENV = 'ROSDEP_SOURCE_PATH'
def get_sources_list_dirs(source_list_dir):
if SOURCE_PATH_ENV in os.environ:
sdirs = os.environ[SOURCE_PATH_ENV].split(os.pathsep)
else:
sdirs = [source_list_dir]
for p in list(sdirs):
if not os.path.exists(p):
sdirs.remove(p)
return sdirs
def get_sources_list_dir():
# base of where we read config files from
# TODO: windows
if 0:
# we can't use etc/ros because environment config does not carry over under sudo
etc_ros = rospkg.get_etc_ros_dir()
else:
etc_ros = '/etc/ros'
# compute default system wide sources directory
sys_sources_list_dir = os.path.join(etc_ros, 'rosdep', SOURCES_LIST_DIR)
sources_list_dirs = get_sources_list_dirs(sys_sources_list_dir)
if sources_list_dirs:
return sources_list_dirs[0]
else:
return sys_sources_list_dir
def get_default_sources_list_file():
return os.path.join(get_sources_list_dir(), '20-default.list')
def get_sources_cache_dir():
ros_home = rospkg.get_ros_home()
return os.path.join(ros_home, 'rosdep', SOURCES_CACHE_DIR)
# Default rosdep.yaml format. For now this is the only valid type and
# is specified for future compatibility.
TYPE_YAML = 'yaml'
# git-buildpackage repo list
TYPE_GBPDISTRO = 'gbpdistro'
VALID_TYPES = [TYPE_YAML, TYPE_GBPDISTRO]
class DataSource(object):
def __init__(self, type_, url, tags, origin=None):
"""
:param type_: data source type, e.g. TYPE_YAML, TYPE_GBPDISTRO
:param url: URL of data location. For file resources, must
start with the file:// scheme. For remote resources, URL
must include a path.
:param tags: tags for matching data source to configurations
:param origin: filename or other indicator of where data came from for debugging.
:raises: :exc:`ValueError` if parameters do not validate
"""
# validate inputs
if type_ not in VALID_TYPES:
raise ValueError('type must be one of [%s]' % (','.join(VALID_TYPES)))
parsed = urlparse.urlparse(url)
if not parsed.scheme or (parsed.scheme != 'file' and not parsed.netloc) or parsed.path in ('', '/'):
raise ValueError('url must be a fully-specified URL with scheme, hostname, and path: %s' % (str(url)))
if not type(tags) == list:
raise ValueError('tags must be a list: %s' % (str(tags)))
self.type = type_
self.tags = tags
self.url = url
self.origin = origin
def __eq__(self, other):
return isinstance(other, DataSource) and \
self.type == other.type and \
self.tags == other.tags and \
self.url == other.url and \
self.origin == other.origin
def __str__(self):
if self.origin:
return '[%s]:\n%s %s %s' % (self.origin, self.type, self.url, ' '.join(self.tags))
else:
return '%s %s %s' % (self.type, self.url, ' '.join(self.tags))
def __repr__(self):
return repr((self.type, self.url, self.tags, self.origin))
class RosDistroSource(DataSource):
def __init__(self, distro):
self.type = TYPE_GBPDISTRO
self.tags = [distro]
# In this case self.url is a list if REP-143 is being used
self.url = get_index().distributions[distro]['distribution']
self.origin = None
# create function we can pass in as model to parse_source_data. The
# function emulates the CachedDataSource constructor but does the
# necessary full filepath calculation and loading of data.
def cache_data_source_loader(sources_cache_dir, verbose=False):
def create_model(type_, uri, tags, origin=None):
# compute the filename has from the URL
filename = compute_filename_hash(uri)
filepath = os.path.join(sources_cache_dir, filename)
pickle_filepath = filepath + PICKLE_CACHE_EXT
if os.path.exists(pickle_filepath):
if verbose:
print('loading cached data source:\n\t%s\n\t%s' % (uri, pickle_filepath), file=sys.stderr)
with open(pickle_filepath, 'rb') as f:
rosdep_data = pickle.loads(f.read())
elif os.path.exists(filepath):
if verbose:
print('loading cached data source:\n\t%s\n\t%s' % (uri, filepath), file=sys.stderr)
with open(filepath) as f:
rosdep_data = yaml.safe_load(f.read())
else:
rosdep_data = {}
return CachedDataSource(type_, uri, tags, rosdep_data, origin=filepath)
return create_model
class CachedDataSource(object):
def __init__(self, type_, url, tags, rosdep_data, origin=None):
"""
Stores data source and loaded rosdep data for that source.
NOTE: this is not a subclass of DataSource, though it's API is
duck-type compatible with the DataSource API.
"""
self.source = DataSource(type_, url, tags, origin=origin)
self.rosdep_data = rosdep_data
def __eq__(self, other):
try:
return self.source == other.source and \
self.rosdep_data == other.rosdep_data
except AttributeError:
return False
def __str__(self):
return '%s\n%s' % (self.source, self.rosdep_data)
def __repr__(self):
return repr((self.type, self.url, self.tags, self.rosdep_data, self.origin))
@property
def type(self):
"""
:returns: data source type
"""
return self.source.type
@property
def url(self):
"""
:returns: data source URL
"""
return self.source.url
@property
def tags(self):
"""
:returns: data source tags
"""
return self.source.tags
@property
def origin(self):
"""
:returns: data source origin, if set, or ``None``
"""
return self.source.origin
class DataSourceMatcher(object):
def __init__(self, tags):
self.tags = tags
def matches(self, rosdep_data_source):
"""
Check if the datasource matches this configuration.
:param rosdep_data_source: :class:`DataSource`
"""
# all of the rosdep_data_source tags must be in our matcher tags
return not any(set(rosdep_data_source.tags) - set(self.tags))
@staticmethod
def create_default(os_override=None):
"""
Create a :class:`DataSourceMatcher` to match the current
configuration.
:param os_override: (os_name, os_codename) tuple to override
OS detection
:returns: :class:`DataSourceMatcher`
"""
distro_name = rospkg.distro.current_distro_codename()
if os_override is None:
os_detect = rospkg.os_detect.OsDetect()
os_name, os_version, os_codename = os_detect.detect_os()
else:
os_name, os_codename = os_override
tags = [t for t in (distro_name, os_name, os_codename) if t]
return DataSourceMatcher(tags)
def download_rosdep_data(url):
"""
:raises: :exc:`DownloadFailure` If data cannot be
retrieved (e.g. 404, bad YAML format, server down).
"""
try:
# http/https URLs need custom requests to specify the user-agent, since some repositories reject
# requests from the default user-agent.
if url.startswith("http://") or url.startswith("https://"):
url_request = request.Request(url, headers={'User-Agent': 'rosdep/{version}'.format(version=__version__)})
else:
url_request = url
f = urlopen(url_request, timeout=DOWNLOAD_TIMEOUT)
text = f.read()
f.close()
data = yaml.safe_load(text)
if type(data) != dict:
raise DownloadFailure('rosdep data from [%s] is not a YAML dictionary' % (url))
return data
except (URLError, httplib.HTTPException) as e:
raise DownloadFailure(str(e) + ' (%s)' % url)
except yaml.YAMLError as e:
raise DownloadFailure(str(e))
def download_default_sources_list(url=DEFAULT_SOURCES_LIST_URL):
"""
Download (and validate) contents of default sources list.
:param url: override URL of default sources list file
:return: raw sources list data, ``str``
:raises: :exc:`DownloadFailure` If data cannot be
retrieved (e.g. 404, bad YAML format, server down).
:raises: :exc:`urllib2.URLError` If data cannot be
retrieved (e.g. 404, server down).
"""
try:
f = urlopen(url, timeout=DOWNLOAD_TIMEOUT)
except (URLError, httplib.HTTPException) as e:
raise URLError(str(e) + ' (%s)' % url)
data = f.read().decode()
f.close()
if not data:
raise DownloadFailure('cannot download defaults file from %s : empty contents' % url)
# parse just for validation
try:
parse_sources_data(data)
except InvalidData as e:
raise DownloadFailure(
'The content downloaded from %s failed to pass validation.'
' It is likely that the source is invalid unless the data was corrupted during the download.'
' The contents were:{{{%s}}} The error raised was: %s' % (url, data, e))
return data
def parse_sources_data(data, origin='<string>', model=None):
"""
Parse sources file format (tags optional)::
# comments and empty lines allowed
<type> <uri> [tags]
e.g.::
yaml http://foo/rosdep.yaml fuerte lucid ubuntu
If tags are specified, *all* tags must match the current
configuration for the sources data to be used.
:param data: data in sources file format
:param model: model to load data into. Defaults to :class:`DataSource`
:returns: List of data sources, [:class:`DataSource`]
:raises: :exc:`InvalidData`
"""
if model is None:
model = DataSource
sources = []
for line in data.split('\n'):
line = line.strip()
# ignore empty lines or comments
if not line or line.startswith('#'):
continue
splits = line.split(' ')
if len(splits) < 2:
raise InvalidData('invalid line:\n%s' % (line), origin=origin)
type_ = splits[0]
url = splits[1]
tags = splits[2:]
try:
sources.append(model(type_, url, tags, origin=origin))
except ValueError as e:
raise InvalidData('line:\n\t%s\n%s' % (line, e), origin=origin)
return sources
def parse_sources_file(filepath):
"""
Parse file on disk
:returns: List of data sources, [:class:`DataSource`]
:raises: :exc:`InvalidData` If any error occurs reading
file, so an I/O error, non-existent file, or invalid format.
"""
try:
with open(filepath, 'r') as f:
return parse_sources_data(f.read(), origin=filepath)
except IOError as e:
raise InvalidData('I/O error reading sources file: %s' % (str(e)), origin=filepath)
def parse_sources_list(sources_list_dir=None):
"""
Parse data stored in on-disk sources list directory into a list of
:class:`DataSource` for processing.
:returns: List of data sources, [:class:`DataSource`]. If there is
no sources list dir, this returns an empty list.
:raises: :exc:`InvalidData`
:raises: :exc:`OSError` if *sources_list_dir* cannot be read.
:raises: :exc:`IOError` if *sources_list_dir* cannot be read.
"""
if sources_list_dir is None:
sources_list_dir = get_sources_list_dir()
sources_list_dirs = get_sources_list_dirs(sources_list_dir)
filelist = []
for sdir in sources_list_dirs:
filelist += sorted([os.path.join(sdir, f) for f in os.listdir(sdir) if f.endswith('.list')])
sources_list = []
for f in filelist:
sources_list.extend(parse_sources_file(f))
return sources_list
def _generate_key_from_urls(urls):
# urls may be a list of urls or a single string
try:
assert isinstance(urls, (list, basestring))
except NameError:
assert isinstance(urls, (list, str))
# We join the urls by the '^' character because it is not allowed in urls
return '^'.join(urls if isinstance(urls, list) else [urls])
def update_sources_list(sources_list_dir=None, sources_cache_dir=None,
success_handler=None, error_handler=None,
skip_eol_distros=False, ros_distro=None):
"""
Re-downloaded data from remote sources and store in cache. Also
update the cache index based on current sources.
:param sources_list_dir: override source list directory
:param sources_cache_dir: override sources cache directory
:param success_handler: fn(DataSource) to call if a particular
source loads successfully. This hook is mainly for printing
errors to console.
:param error_handler: fn(DataSource, DownloadFailure) to call
if a particular source fails. This hook is mainly for
printing errors to console.
:param skip_eol_distros: skip downloading sources for EOL distros
:returns: list of (`DataSource`, cache_file_path) pairs for cache
files that were updated, ``[str]``
:raises: :exc:`InvalidData` If any of the sources list files is invalid
:raises: :exc:`OSError` if *sources_list_dir* cannot be read.
:raises: :exc:`IOError` If *sources_list_dir* cannot be read or cache data cannot be written
"""
if sources_cache_dir is None:
sources_cache_dir = get_sources_cache_dir()
sources = parse_sources_list(sources_list_dir=sources_list_dir)
retval = []
for source in list(sources):
try:
if source.type == TYPE_YAML:
rosdep_data = download_rosdep_data(source.url)
elif source.type == TYPE_GBPDISTRO: # DEPRECATED, do not use this file. See REP137
if not source.tags[0] in ['electric', 'fuerte']:
print('Ignore legacy gbpdistro "%s"' % source.tags[0])
sources.remove(source)
continue # do not store this entry in the cache
rosdep_data = download_gbpdistro_as_rosdep_data(source.url)
retval.append((source, write_cache_file(sources_cache_dir, source.url, rosdep_data)))
if success_handler is not None:
success_handler(source)
except DownloadFailure as e:
if error_handler is not None:
error_handler(source, e)
# Additional sources for ros distros
# In compliance with REP137 and REP143
python_versions = {}
print('Query rosdistro index %s' % get_index_url())
distribution_names = get_index().distributions.keys()
if ros_distro is not None and ros_distro not in distribution_names:
raise ValueError(
'Requested distribution "%s" is not in the index.' % ros_distro)
for dist_name in sorted(distribution_names):
distribution = get_index().distributions[dist_name]
if dist_name != ros_distro:
if ros_distro is not None:
print('Skip distro "%s" different from requested "%s"' % (dist_name, ros_distro))
continue
if skip_eol_distros:
if distribution.get('distribution_status') == 'end-of-life':
print('Skip end-of-life distro "%s"' % dist_name)
continue
print('Add distro "%s"' % dist_name)
# import pdb; pdb.set_trace()
rds = RosDistroSource(dist_name)
rosdep_data = get_gbprepo_as_rosdep_data(dist_name)
# Store Python version from REP153
if distribution.get('python_version'):
python_versions[dist_name] = distribution.get('python_version')
# dist_files can either be a string (single filename) or a list (list of filenames)
dist_files = distribution['distribution']
key = _generate_key_from_urls(dist_files)
retval.append((rds, write_cache_file(sources_cache_dir, key, rosdep_data)))
sources.append(rds)
# cache metadata that isn't a source list
MetaDatabase().set('ROS_PYTHON_VERSION', python_versions)
# Create a combined index of *all* the sources. We do all the
# sources regardless of failures because a cache from a previous
# attempt may still exist. We have to do this cache index so that
# loads() see consistent data.
if not os.path.exists(sources_cache_dir):
os.makedirs(sources_cache_dir)
cache_index = os.path.join(sources_cache_dir, CACHE_INDEX)
data = "#autogenerated by rosdep, do not edit. use 'rosdep update' instead\n"
for source in sources:
url = _generate_key_from_urls(source.url)
data += 'yaml %s %s\n' % (url, ' '.join(source.tags))
write_atomic(cache_index, data)
# mainly for debugging and testing
return retval
def load_cached_sources_list(sources_cache_dir=None, verbose=False):
"""
Load cached data based on the sources list.
:returns: list of :class:`CachedDataSource` instance with raw
rosdep data loaded.
:raises: :exc:`OSError` if cache cannot be read
:raises: :exc:`IOError` if cache cannot be read
"""
if sources_cache_dir is None:
sources_cache_dir = get_sources_cache_dir()
cache_index = os.path.join(sources_cache_dir, 'index')
if not os.path.exists(cache_index):
if verbose:
print('no cache index present, not loading cached sources', file=sys.stderr)
return []
try:
with open(cache_index, 'r') as f:
cache_data = f.read()
except IOError as e:
if e.strerror == 'Permission denied':
raise CachePermissionError('Failed to write cache file: ' + str(e))
else:
raise
# the loader does all the work
model = cache_data_source_loader(sources_cache_dir, verbose=verbose)
return parse_sources_data(cache_data, origin=cache_index, model=model)
class SourcesListLoader(RosdepLoader):
"""
SourcesList loader implements the general RosdepLoader API. This
implementation is fairly simple as there is only one view the
source list loader can create. It is also a bit degenerate as it
is not capable of mapping resource names to views, thus any
resource-name-based API fails or returns nothing interesting.
This loader should not be used directly; instead, it is more
useful composed with other higher-level implementations, like the
:class:`rosdep2.rospkg_loader.RospkgLoader`. The general intent
is to compose it with another loader by making all of the other
loader's views depends on all the views in this loader.
"""
ALL_VIEW_KEY = 'sources.list'
def __init__(self, sources):
"""
:param sources: cached sources list entries, [:class:`CachedDataSource`]
"""
self.sources = sources
@staticmethod
def create_default(matcher=None, sources_cache_dir=None, os_override=None, verbose=False):
"""
:param matcher: override DataSourceMatcher. Defaults to
DataSourceMatcher.create_default().
:param sources_cache_dir: override location of sources cache
"""
if matcher is None:
matcher = DataSourceMatcher.create_default(os_override=os_override)
if verbose:
print('using matcher with tags [%s]' % (', '.join(matcher.tags)), file=sys.stderr)
sources = load_cached_sources_list(sources_cache_dir=sources_cache_dir, verbose=verbose)
if verbose:
print('loaded %s sources' % (len(sources)), file=sys.stderr)
sources = [x for x in sources if matcher.matches(x)]
if verbose:
print('%s sources match current tags' % (len(sources)), file=sys.stderr)
return SourcesListLoader(sources)
def load_view(self, view_name, rosdep_db, verbose=False):
"""
Load view data into rosdep_db. If the view has already been
loaded into rosdep_db, this method does nothing.
:param view_name: name of ROS stack to load, ``str``
:param rosdep_db: database to load stack data into, :class:`RosdepDatabase`
:raises: :exc:`InvalidData`
"""
if rosdep_db.is_loaded(view_name):
return
source = self.get_source(view_name)
if verbose:
print('loading view [%s] with sources.list loader' % (view_name), file=sys.stderr)
view_dependencies = self.get_view_dependencies(view_name)
rosdep_db.set_view_data(view_name, source.rosdep_data, view_dependencies, view_name)
def get_loadable_resources(self):
return []
def get_loadable_views(self):
return [x.url for x in self.sources]
def get_view_dependencies(self, view_name):
# use dependencies to implement precedence
if view_name != SourcesListLoader.ALL_VIEW_KEY:
# if the view_name matches one of our sources, return
# empty list as none of our sources has deps.
if any([x for x in self.sources if view_name == x.url]):
return []
# not one of our views, so it depends on everything we provide
return [x.url for x in self.sources]
def get_source(self, view_name):
matches = [x for x in self.sources if x.url == view_name]
if matches:
return matches[0]
else:
raise rospkg.ResourceNotFound(view_name)
def get_rosdeps(self, resource_name, implicit=True):
"""
Always raises as SourceListLoader defines no concrete resources with rosdeps.
:raises: :exc:`rospkg.ResourceNotFound`
"""
raise rospkg.ResourceNotFound(resource_name)
def get_view_key(self, resource_name):
"""
Always raises as SourceListLoader defines no concrete resources with rosdeps.
:returns: Name of view that *resource_name* is in, ``None`` if no associated view.
:raises: :exc:`rospkg.ResourceNotFound` if *resource_name* cannot be found.
"""
raise rospkg.ResourceNotFound(resource_name)
| 37.422222
| 121
| 0.662074
|
from __future__ import print_function
import os
import sys
import yaml
from rosdep2.shell_utils import FakeURLOpener as urlopen
try:
from urllib.error import URLError
import urllib.request as request
except ImportError:
from urllib2 import URLError
import urllib2 as request
try:
import cPickle as pickle
except ImportError:
import pickle
from .cache_tools import compute_filename_hash, PICKLE_CACHE_EXT, write_atomic, write_cache_file
from .core import InvalidData, DownloadFailure, CachePermissionError
from .gbpdistro_support import get_gbprepo_as_rosdep_data, download_gbpdistro_as_rosdep_data
from .meta import MetaDatabase
from ._version import __version__
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
try:
import httplib
except ImportError:
import http.client as httplib
import rospkg
import rospkg.distro
from .loader import RosdepLoader
from .rosdistrohelper import get_index, get_index_url
DEFAULT_SOURCES_LIST_URL = 'https://raw.githubusercontent.com/ros/rosdistro/master/rosdep/sources.list.d/20-default.list'
DOWNLOAD_TIMEOUT = 15.0
SOURCES_LIST_DIR = 'sources.list.d'
SOURCES_CACHE_DIR = 'sources.cache'
CACHE_INDEX = 'index'
SOURCE_PATH_ENV = 'ROSDEP_SOURCE_PATH'
def get_sources_list_dirs(source_list_dir):
if SOURCE_PATH_ENV in os.environ:
sdirs = os.environ[SOURCE_PATH_ENV].split(os.pathsep)
else:
sdirs = [source_list_dir]
for p in list(sdirs):
if not os.path.exists(p):
sdirs.remove(p)
return sdirs
def get_sources_list_dir():
if 0:
etc_ros = rospkg.get_etc_ros_dir()
else:
etc_ros = '/etc/ros'
# compute default system wide sources directory
sys_sources_list_dir = os.path.join(etc_ros, 'rosdep', SOURCES_LIST_DIR)
sources_list_dirs = get_sources_list_dirs(sys_sources_list_dir)
if sources_list_dirs:
return sources_list_dirs[0]
else:
return sys_sources_list_dir
def get_default_sources_list_file():
return os.path.join(get_sources_list_dir(), '20-default.list')
def get_sources_cache_dir():
ros_home = rospkg.get_ros_home()
return os.path.join(ros_home, 'rosdep', SOURCES_CACHE_DIR)
# Default rosdep.yaml format. For now this is the only valid type and
# is specified for future compatibility.
TYPE_YAML = 'yaml'
# git-buildpackage repo list
TYPE_GBPDISTRO = 'gbpdistro'
VALID_TYPES = [TYPE_YAML, TYPE_GBPDISTRO]
class DataSource(object):
def __init__(self, type_, url, tags, origin=None):
# validate inputs
if type_ not in VALID_TYPES:
raise ValueError('type must be one of [%s]' % (','.join(VALID_TYPES)))
parsed = urlparse.urlparse(url)
if not parsed.scheme or (parsed.scheme != 'file' and not parsed.netloc) or parsed.path in ('', '/'):
raise ValueError('url must be a fully-specified URL with scheme, hostname, and path: %s' % (str(url)))
if not type(tags) == list:
raise ValueError('tags must be a list: %s' % (str(tags)))
self.type = type_
self.tags = tags
self.url = url
self.origin = origin
def __eq__(self, other):
return isinstance(other, DataSource) and \
self.type == other.type and \
self.tags == other.tags and \
self.url == other.url and \
self.origin == other.origin
def __str__(self):
if self.origin:
return '[%s]:\n%s %s %s' % (self.origin, self.type, self.url, ' '.join(self.tags))
else:
return '%s %s %s' % (self.type, self.url, ' '.join(self.tags))
def __repr__(self):
return repr((self.type, self.url, self.tags, self.origin))
class RosDistroSource(DataSource):
def __init__(self, distro):
self.type = TYPE_GBPDISTRO
self.tags = [distro]
# In this case self.url is a list if REP-143 is being used
self.url = get_index().distributions[distro]['distribution']
self.origin = None
# create function we can pass in as model to parse_source_data. The
# function emulates the CachedDataSource constructor but does the
# necessary full filepath calculation and loading of data.
def cache_data_source_loader(sources_cache_dir, verbose=False):
def create_model(type_, uri, tags, origin=None):
# compute the filename has from the URL
filename = compute_filename_hash(uri)
filepath = os.path.join(sources_cache_dir, filename)
pickle_filepath = filepath + PICKLE_CACHE_EXT
if os.path.exists(pickle_filepath):
if verbose:
print('loading cached data source:\n\t%s\n\t%s' % (uri, pickle_filepath), file=sys.stderr)
with open(pickle_filepath, 'rb') as f:
rosdep_data = pickle.loads(f.read())
elif os.path.exists(filepath):
if verbose:
print('loading cached data source:\n\t%s\n\t%s' % (uri, filepath), file=sys.stderr)
with open(filepath) as f:
rosdep_data = yaml.safe_load(f.read())
else:
rosdep_data = {}
return CachedDataSource(type_, uri, tags, rosdep_data, origin=filepath)
return create_model
class CachedDataSource(object):
def __init__(self, type_, url, tags, rosdep_data, origin=None):
self.source = DataSource(type_, url, tags, origin=origin)
self.rosdep_data = rosdep_data
def __eq__(self, other):
try:
return self.source == other.source and \
self.rosdep_data == other.rosdep_data
except AttributeError:
return False
def __str__(self):
return '%s\n%s' % (self.source, self.rosdep_data)
def __repr__(self):
return repr((self.type, self.url, self.tags, self.rosdep_data, self.origin))
@property
def type(self):
return self.source.type
@property
def url(self):
return self.source.url
@property
def tags(self):
return self.source.tags
@property
def origin(self):
return self.source.origin
class DataSourceMatcher(object):
def __init__(self, tags):
self.tags = tags
def matches(self, rosdep_data_source):
# all of the rosdep_data_source tags must be in our matcher tags
return not any(set(rosdep_data_source.tags) - set(self.tags))
@staticmethod
def create_default(os_override=None):
distro_name = rospkg.distro.current_distro_codename()
if os_override is None:
os_detect = rospkg.os_detect.OsDetect()
os_name, os_version, os_codename = os_detect.detect_os()
else:
os_name, os_codename = os_override
tags = [t for t in (distro_name, os_name, os_codename) if t]
return DataSourceMatcher(tags)
def download_rosdep_data(url):
try:
# http/https URLs need custom requests to specify the user-agent, since some repositories reject
# requests from the default user-agent.
if url.startswith("http://") or url.startswith("https://"):
url_request = request.Request(url, headers={'User-Agent': 'rosdep/{version}'.format(version=__version__)})
else:
url_request = url
f = urlopen(url_request, timeout=DOWNLOAD_TIMEOUT)
text = f.read()
f.close()
data = yaml.safe_load(text)
if type(data) != dict:
raise DownloadFailure('rosdep data from [%s] is not a YAML dictionary' % (url))
return data
except (URLError, httplib.HTTPException) as e:
raise DownloadFailure(str(e) + ' (%s)' % url)
except yaml.YAMLError as e:
raise DownloadFailure(str(e))
def download_default_sources_list(url=DEFAULT_SOURCES_LIST_URL):
try:
f = urlopen(url, timeout=DOWNLOAD_TIMEOUT)
except (URLError, httplib.HTTPException) as e:
raise URLError(str(e) + ' (%s)' % url)
data = f.read().decode()
f.close()
if not data:
raise DownloadFailure('cannot download defaults file from %s : empty contents' % url)
# parse just for validation
try:
parse_sources_data(data)
except InvalidData as e:
raise DownloadFailure(
'The content downloaded from %s failed to pass validation.'
' It is likely that the source is invalid unless the data was corrupted during the download.'
' The contents were:{{{%s}}} The error raised was: %s' % (url, data, e))
return data
def parse_sources_data(data, origin='<string>', model=None):
if model is None:
model = DataSource
sources = []
for line in data.split('\n'):
line = line.strip()
# ignore empty lines or comments
if not line or line.startswith('
continue
splits = line.split(' ')
if len(splits) < 2:
raise InvalidData('invalid line:\n%s' % (line), origin=origin)
type_ = splits[0]
url = splits[1]
tags = splits[2:]
try:
sources.append(model(type_, url, tags, origin=origin))
except ValueError as e:
raise InvalidData('line:\n\t%s\n%s' % (line, e), origin=origin)
return sources
def parse_sources_file(filepath):
try:
with open(filepath, 'r') as f:
return parse_sources_data(f.read(), origin=filepath)
except IOError as e:
raise InvalidData('I/O error reading sources file: %s' % (str(e)), origin=filepath)
def parse_sources_list(sources_list_dir=None):
if sources_list_dir is None:
sources_list_dir = get_sources_list_dir()
sources_list_dirs = get_sources_list_dirs(sources_list_dir)
filelist = []
for sdir in sources_list_dirs:
filelist += sorted([os.path.join(sdir, f) for f in os.listdir(sdir) if f.endswith('.list')])
sources_list = []
for f in filelist:
sources_list.extend(parse_sources_file(f))
return sources_list
def _generate_key_from_urls(urls):
# urls may be a list of urls or a single string
try:
assert isinstance(urls, (list, basestring))
except NameError:
assert isinstance(urls, (list, str))
# We join the urls by the '^' character because it is not allowed in urls
return '^'.join(urls if isinstance(urls, list) else [urls])
def update_sources_list(sources_list_dir=None, sources_cache_dir=None,
success_handler=None, error_handler=None,
skip_eol_distros=False, ros_distro=None):
if sources_cache_dir is None:
sources_cache_dir = get_sources_cache_dir()
sources = parse_sources_list(sources_list_dir=sources_list_dir)
retval = []
for source in list(sources):
try:
if source.type == TYPE_YAML:
rosdep_data = download_rosdep_data(source.url)
elif source.type == TYPE_GBPDISTRO: # DEPRECATED, do not use this file. See REP137
if not source.tags[0] in ['electric', 'fuerte']:
print('Ignore legacy gbpdistro "%s"' % source.tags[0])
sources.remove(source)
continue # do not store this entry in the cache
rosdep_data = download_gbpdistro_as_rosdep_data(source.url)
retval.append((source, write_cache_file(sources_cache_dir, source.url, rosdep_data)))
if success_handler is not None:
success_handler(source)
except DownloadFailure as e:
if error_handler is not None:
error_handler(source, e)
# Additional sources for ros distros
# In compliance with REP137 and REP143
python_versions = {}
print('Query rosdistro index %s' % get_index_url())
distribution_names = get_index().distributions.keys()
if ros_distro is not None and ros_distro not in distribution_names:
raise ValueError(
'Requested distribution "%s" is not in the index.' % ros_distro)
for dist_name in sorted(distribution_names):
distribution = get_index().distributions[dist_name]
if dist_name != ros_distro:
if ros_distro is not None:
print('Skip distro "%s" different from requested "%s"' % (dist_name, ros_distro))
continue
if skip_eol_distros:
if distribution.get('distribution_status') == 'end-of-life':
print('Skip end-of-life distro "%s"' % dist_name)
continue
print('Add distro "%s"' % dist_name)
# import pdb; pdb.set_trace()
rds = RosDistroSource(dist_name)
rosdep_data = get_gbprepo_as_rosdep_data(dist_name)
# Store Python version from REP153
if distribution.get('python_version'):
python_versions[dist_name] = distribution.get('python_version')
# dist_files can either be a string (single filename) or a list (list of filenames)
dist_files = distribution['distribution']
key = _generate_key_from_urls(dist_files)
retval.append((rds, write_cache_file(sources_cache_dir, key, rosdep_data)))
sources.append(rds)
# cache metadata that isn't a source list
MetaDatabase().set('ROS_PYTHON_VERSION', python_versions)
if not os.path.exists(sources_cache_dir):
os.makedirs(sources_cache_dir)
cache_index = os.path.join(sources_cache_dir, CACHE_INDEX)
data = "#autogenerated by rosdep, do not edit. use 'rosdep update' instead\n"
for source in sources:
url = _generate_key_from_urls(source.url)
data += 'yaml %s %s\n' % (url, ' '.join(source.tags))
write_atomic(cache_index, data)
return retval
def load_cached_sources_list(sources_cache_dir=None, verbose=False):
if sources_cache_dir is None:
sources_cache_dir = get_sources_cache_dir()
cache_index = os.path.join(sources_cache_dir, 'index')
if not os.path.exists(cache_index):
if verbose:
print('no cache index present, not loading cached sources', file=sys.stderr)
return []
try:
with open(cache_index, 'r') as f:
cache_data = f.read()
except IOError as e:
if e.strerror == 'Permission denied':
raise CachePermissionError('Failed to write cache file: ' + str(e))
else:
raise
model = cache_data_source_loader(sources_cache_dir, verbose=verbose)
return parse_sources_data(cache_data, origin=cache_index, model=model)
class SourcesListLoader(RosdepLoader):
ALL_VIEW_KEY = 'sources.list'
def __init__(self, sources):
self.sources = sources
@staticmethod
def create_default(matcher=None, sources_cache_dir=None, os_override=None, verbose=False):
if matcher is None:
matcher = DataSourceMatcher.create_default(os_override=os_override)
if verbose:
print('using matcher with tags [%s]' % (', '.join(matcher.tags)), file=sys.stderr)
sources = load_cached_sources_list(sources_cache_dir=sources_cache_dir, verbose=verbose)
if verbose:
print('loaded %s sources' % (len(sources)), file=sys.stderr)
sources = [x for x in sources if matcher.matches(x)]
if verbose:
print('%s sources match current tags' % (len(sources)), file=sys.stderr)
return SourcesListLoader(sources)
def load_view(self, view_name, rosdep_db, verbose=False):
if rosdep_db.is_loaded(view_name):
return
source = self.get_source(view_name)
if verbose:
print('loading view [%s] with sources.list loader' % (view_name), file=sys.stderr)
view_dependencies = self.get_view_dependencies(view_name)
rosdep_db.set_view_data(view_name, source.rosdep_data, view_dependencies, view_name)
def get_loadable_resources(self):
return []
def get_loadable_views(self):
return [x.url for x in self.sources]
def get_view_dependencies(self, view_name):
if view_name != SourcesListLoader.ALL_VIEW_KEY:
if any([x for x in self.sources if view_name == x.url]):
return []
return [x.url for x in self.sources]
def get_source(self, view_name):
matches = [x for x in self.sources if x.url == view_name]
if matches:
return matches[0]
else:
raise rospkg.ResourceNotFound(view_name)
def get_rosdeps(self, resource_name, implicit=True):
raise rospkg.ResourceNotFound(resource_name)
def get_view_key(self, resource_name):
raise rospkg.ResourceNotFound(resource_name)
| true
| true
|
f7187bd033077653af8175fd412b56d5fba443ce
| 30
|
py
|
Python
|
snakeai/gui/__init__.py
|
thankthemaker/snake-ai-reinforcement
|
e74964faf7eb893e35dc85ede10f5d794b740fff
|
[
"MIT"
] | 145
|
2017-04-08T17:48:50.000Z
|
2022-03-21T15:14:12.000Z
|
snakeai/gui/__init__.py
|
thankthemaker/snake-ai-reinforcement
|
e74964faf7eb893e35dc85ede10f5d794b740fff
|
[
"MIT"
] | 7
|
2017-08-10T04:43:30.000Z
|
2020-11-18T07:21:16.000Z
|
snakeai/gui/__init__.py
|
thankthemaker/snake-ai-reinforcement
|
e74964faf7eb893e35dc85ede10f5d794b740fff
|
[
"MIT"
] | 48
|
2017-06-01T07:29:01.000Z
|
2021-09-18T09:05:16.000Z
|
from .pygame import PyGameGUI
| 15
| 29
| 0.833333
|
from .pygame import PyGameGUI
| true
| true
|
f7187c1a413767abebf5c8ea371b99345ec2aceb
| 1,532
|
py
|
Python
|
web/forms.py
|
tusharbohara/simple-user-registration-and-functionality-webapp-using-django
|
f3ad7aef110f4f637955f39b93c066f54ebab231
|
[
"MIT"
] | null | null | null |
web/forms.py
|
tusharbohara/simple-user-registration-and-functionality-webapp-using-django
|
f3ad7aef110f4f637955f39b93c066f54ebab231
|
[
"MIT"
] | null | null | null |
web/forms.py
|
tusharbohara/simple-user-registration-and-functionality-webapp-using-django
|
f3ad7aef110f4f637955f39b93c066f54ebab231
|
[
"MIT"
] | null | null | null |
from django import forms
from tempus_dominus.widgets import DatePicker
class ConsumerRegistrationForm(forms.Form):
GENDER = [
('Male', 'Male'),
('Female', 'Female'),
('Transgender', 'Transgender'),
('Not to Specify', 'Not to Specify'),
]
BLOOD_TYPE = [
('A+', 'A+'),
('B+', 'B+'),
('AB+', 'AB+'),
('O+', 'O+'),
('A-', 'A-'),
('B-', 'B-'),
('AB-', 'AB-'),
('O-', 'O-'),
]
MARITAL_STATUS_TYPE = [
('Single', 'Single'),
('Married', 'Married'),
('Widowed', 'Widowed'),
('Divorced', 'Divorced'),
]
first_name = forms.CharField(label='Enter First Name', required=True)
last_name = forms.CharField(label='Enter Last Name', required=True)
gender = forms.ChoiceField(choices=GENDER)
date_of_birth = forms.DateField(
widget=DatePicker(
options={
'ignoreReadonly': True,
},
attrs={
'append': 'fa fa-calendar',
}
)
)
blood_group = forms.ChoiceField(choices=BLOOD_TYPE)
marital_status = forms.ChoiceField(choices=MARITAL_STATUS_TYPE)
country = forms.CharField(label='Country', required=True)
state = forms.CharField(label='State', required=False)
district = forms.CharField(label='District', required=False)
city = forms.CharField(label='City', required=True)
phone = forms.IntegerField(required=True)
email = forms.EmailField(required=True)
| 30.64
| 73
| 0.555483
|
from django import forms
from tempus_dominus.widgets import DatePicker
class ConsumerRegistrationForm(forms.Form):
GENDER = [
('Male', 'Male'),
('Female', 'Female'),
('Transgender', 'Transgender'),
('Not to Specify', 'Not to Specify'),
]
BLOOD_TYPE = [
('A+', 'A+'),
('B+', 'B+'),
('AB+', 'AB+'),
('O+', 'O+'),
('A-', 'A-'),
('B-', 'B-'),
('AB-', 'AB-'),
('O-', 'O-'),
]
MARITAL_STATUS_TYPE = [
('Single', 'Single'),
('Married', 'Married'),
('Widowed', 'Widowed'),
('Divorced', 'Divorced'),
]
first_name = forms.CharField(label='Enter First Name', required=True)
last_name = forms.CharField(label='Enter Last Name', required=True)
gender = forms.ChoiceField(choices=GENDER)
date_of_birth = forms.DateField(
widget=DatePicker(
options={
'ignoreReadonly': True,
},
attrs={
'append': 'fa fa-calendar',
}
)
)
blood_group = forms.ChoiceField(choices=BLOOD_TYPE)
marital_status = forms.ChoiceField(choices=MARITAL_STATUS_TYPE)
country = forms.CharField(label='Country', required=True)
state = forms.CharField(label='State', required=False)
district = forms.CharField(label='District', required=False)
city = forms.CharField(label='City', required=True)
phone = forms.IntegerField(required=True)
email = forms.EmailField(required=True)
| true
| true
|
f7187cef3a6d64b15e1d52f337b3a219d8d5f4ed
| 7,702
|
py
|
Python
|
tests/cli/commands/test_celery_command.py
|
gtossou/airflow
|
0314a3a218f864f78ec260cc66134e7acae34bc5
|
[
"Apache-2.0"
] | 2
|
2020-10-23T18:55:03.000Z
|
2021-07-13T04:45:49.000Z
|
tests/cli/commands/test_celery_command.py
|
gtossou/airflow
|
0314a3a218f864f78ec260cc66134e7acae34bc5
|
[
"Apache-2.0"
] | 10
|
2021-09-08T21:27:07.000Z
|
2022-03-30T17:54:45.000Z
|
tests/cli/commands/test_celery_command.py
|
gtossou/airflow
|
0314a3a218f864f78ec260cc66134e7acae34bc5
|
[
"Apache-2.0"
] | 2
|
2020-10-23T18:55:05.000Z
|
2022-02-16T21:53:10.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from argparse import Namespace
from tempfile import NamedTemporaryFile
from unittest import mock
import pytest
import sqlalchemy
import airflow
from airflow.cli import cli_parser
from airflow.cli.commands import celery_command
from airflow.configuration import conf
from tests.test_utils.config import conf_vars
class TestWorkerPrecheck(unittest.TestCase):
@mock.patch('airflow.settings.validate_session')
def test_error(self, mock_validate_session):
"""
Test to verify the exit mechanism of airflow-worker cli
by mocking validate_session method
"""
mock_validate_session.return_value = False
with self.assertRaises(SystemExit) as cm:
celery_command.worker(Namespace(queues=1, concurrency=1))
self.assertEqual(cm.exception.code, 1)
@conf_vars({('core', 'worker_precheck'): 'False'})
def test_worker_precheck_exception(self):
"""
Test to check the behaviour of validate_session method
when worker_precheck is absent in airflow configuration
"""
self.assertTrue(airflow.settings.validate_session())
@mock.patch('sqlalchemy.orm.session.Session.execute')
@conf_vars({('core', 'worker_precheck'): 'True'})
def test_validate_session_dbapi_exception(self, mock_session):
"""
Test to validate connection failure scenario on SELECT 1 query
"""
mock_session.side_effect = sqlalchemy.exc.OperationalError("m1", "m2", "m3", "m4")
self.assertEqual(airflow.settings.validate_session(), False)
@pytest.mark.integration("redis")
@pytest.mark.integration("rabbitmq")
@pytest.mark.backend("mysql", "postgres")
class TestWorkerServeLogs(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
@mock.patch('airflow.cli.commands.celery_command.worker_bin')
@conf_vars({("core", "executor"): "CeleryExecutor"})
def test_serve_logs_on_worker_start(self, mock_worker):
with mock.patch('airflow.cli.commands.celery_command.Process') as mock_process:
args = self.parser.parse_args(['celery', 'worker', '--concurrency', '1'])
with mock.patch('celery.platforms.check_privileges') as mock_privil:
mock_privil.return_value = 0
celery_command.worker(args)
mock_process.assert_called()
@mock.patch('airflow.cli.commands.celery_command.worker_bin')
@conf_vars({("core", "executor"): "CeleryExecutor"})
def test_skip_serve_logs_on_worker_start(self, mock_worker):
with mock.patch('airflow.cli.commands.celery_command.Process') as mock_popen:
args = self.parser.parse_args(['celery', 'worker', '--concurrency', '1', '--skip-serve-logs'])
with mock.patch('celery.platforms.check_privileges') as mock_privil:
mock_privil.return_value = 0
celery_command.worker(args)
mock_popen.assert_not_called()
@pytest.mark.backend("mysql", "postgres")
class TestCeleryStopCommand(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
@mock.patch("airflow.cli.commands.celery_command.setup_locations")
@mock.patch("airflow.cli.commands.celery_command.psutil.Process")
@conf_vars({("core", "executor"): "CeleryExecutor"})
def test_if_right_pid_is_read(self, mock_process, mock_setup_locations):
args = self.parser.parse_args(['celery', 'stop'])
pid = "123"
# Calling stop_worker should delete the temporary pid file
with self.assertRaises(FileNotFoundError):
with NamedTemporaryFile("w+") as f:
# Create pid file
f.write(pid)
f.flush()
# Setup mock
mock_setup_locations.return_value = (f.name, None, None, None)
# Check if works as expected
celery_command.stop_worker(args)
mock_process.assert_called_once_with(int(pid))
mock_process.return_value.terminate.assert_called_once_with()
@mock.patch("airflow.cli.commands.celery_command.read_pid_from_pidfile")
@mock.patch("airflow.cli.commands.celery_command.worker_bin.worker")
@mock.patch("airflow.cli.commands.celery_command.setup_locations")
@conf_vars({("core", "executor"): "CeleryExecutor"})
def test_same_pid_file_is_used_in_start_and_stop(
self,
mock_setup_locations,
mock_celery_worker,
mock_read_pid_from_pidfile
):
pid_file = "test_pid_file"
mock_setup_locations.return_value = (pid_file, None, None, None)
mock_read_pid_from_pidfile.return_value = None
# Call worker
worker_args = self.parser.parse_args(['celery', 'worker', '--skip-serve-logs'])
celery_command.worker(worker_args)
run_mock = mock_celery_worker.return_value.run
assert run_mock.call_args
_, kwargs = run_mock.call_args
assert 'pidfile' in kwargs
assert kwargs['pidfile'] == pid_file
# Call stop
stop_args = self.parser.parse_args(['celery', 'stop'])
celery_command.stop_worker(stop_args)
mock_read_pid_from_pidfile.assert_called_once_with(pid_file)
@pytest.mark.backend("mysql", "postgres")
class TestWorkerStart(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
@mock.patch("airflow.cli.commands.celery_command.setup_locations")
@mock.patch('airflow.cli.commands.celery_command.Process')
@mock.patch('airflow.cli.commands.celery_command.worker_bin')
@conf_vars({("core", "executor"): "CeleryExecutor"})
def test_worker_started_with_required_arguments(self, mock_worker, mock_popen, mock_locations):
pid_file = "pid_file"
mock_locations.return_value = (pid_file, None, None, None)
concurrency = '1'
celery_hostname = "celery_hostname"
queues = "queue"
autoscale = "2,5"
args = self.parser.parse_args([
'celery',
'worker',
'--autoscale',
autoscale,
'--concurrency',
concurrency,
'--celery-hostname',
celery_hostname,
'--queues',
queues
])
with mock.patch('celery.platforms.check_privileges') as mock_privil:
mock_privil.return_value = 0
celery_command.worker(args)
mock_worker.worker.return_value.run.assert_called_once_with(
pool='prefork',
optimization='fair',
O='fair', # noqa
queues=queues,
pidfile=pid_file,
concurrency=int(concurrency),
autoscale=autoscale,
hostname=celery_hostname,
loglevel=conf.get('logging', 'LOGGING_LEVEL'),
)
| 39.497436
| 106
| 0.675019
|
import unittest
from argparse import Namespace
from tempfile import NamedTemporaryFile
from unittest import mock
import pytest
import sqlalchemy
import airflow
from airflow.cli import cli_parser
from airflow.cli.commands import celery_command
from airflow.configuration import conf
from tests.test_utils.config import conf_vars
class TestWorkerPrecheck(unittest.TestCase):
@mock.patch('airflow.settings.validate_session')
def test_error(self, mock_validate_session):
mock_validate_session.return_value = False
with self.assertRaises(SystemExit) as cm:
celery_command.worker(Namespace(queues=1, concurrency=1))
self.assertEqual(cm.exception.code, 1)
@conf_vars({('core', 'worker_precheck'): 'False'})
def test_worker_precheck_exception(self):
self.assertTrue(airflow.settings.validate_session())
@mock.patch('sqlalchemy.orm.session.Session.execute')
@conf_vars({('core', 'worker_precheck'): 'True'})
def test_validate_session_dbapi_exception(self, mock_session):
mock_session.side_effect = sqlalchemy.exc.OperationalError("m1", "m2", "m3", "m4")
self.assertEqual(airflow.settings.validate_session(), False)
@pytest.mark.integration("redis")
@pytest.mark.integration("rabbitmq")
@pytest.mark.backend("mysql", "postgres")
class TestWorkerServeLogs(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
@mock.patch('airflow.cli.commands.celery_command.worker_bin')
@conf_vars({("core", "executor"): "CeleryExecutor"})
def test_serve_logs_on_worker_start(self, mock_worker):
with mock.patch('airflow.cli.commands.celery_command.Process') as mock_process:
args = self.parser.parse_args(['celery', 'worker', '--concurrency', '1'])
with mock.patch('celery.platforms.check_privileges') as mock_privil:
mock_privil.return_value = 0
celery_command.worker(args)
mock_process.assert_called()
@mock.patch('airflow.cli.commands.celery_command.worker_bin')
@conf_vars({("core", "executor"): "CeleryExecutor"})
def test_skip_serve_logs_on_worker_start(self, mock_worker):
with mock.patch('airflow.cli.commands.celery_command.Process') as mock_popen:
args = self.parser.parse_args(['celery', 'worker', '--concurrency', '1', '--skip-serve-logs'])
with mock.patch('celery.platforms.check_privileges') as mock_privil:
mock_privil.return_value = 0
celery_command.worker(args)
mock_popen.assert_not_called()
@pytest.mark.backend("mysql", "postgres")
class TestCeleryStopCommand(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
@mock.patch("airflow.cli.commands.celery_command.setup_locations")
@mock.patch("airflow.cli.commands.celery_command.psutil.Process")
@conf_vars({("core", "executor"): "CeleryExecutor"})
def test_if_right_pid_is_read(self, mock_process, mock_setup_locations):
args = self.parser.parse_args(['celery', 'stop'])
pid = "123"
with self.assertRaises(FileNotFoundError):
with NamedTemporaryFile("w+") as f:
f.write(pid)
f.flush()
mock_setup_locations.return_value = (f.name, None, None, None)
celery_command.stop_worker(args)
mock_process.assert_called_once_with(int(pid))
mock_process.return_value.terminate.assert_called_once_with()
@mock.patch("airflow.cli.commands.celery_command.read_pid_from_pidfile")
@mock.patch("airflow.cli.commands.celery_command.worker_bin.worker")
@mock.patch("airflow.cli.commands.celery_command.setup_locations")
@conf_vars({("core", "executor"): "CeleryExecutor"})
def test_same_pid_file_is_used_in_start_and_stop(
self,
mock_setup_locations,
mock_celery_worker,
mock_read_pid_from_pidfile
):
pid_file = "test_pid_file"
mock_setup_locations.return_value = (pid_file, None, None, None)
mock_read_pid_from_pidfile.return_value = None
worker_args = self.parser.parse_args(['celery', 'worker', '--skip-serve-logs'])
celery_command.worker(worker_args)
run_mock = mock_celery_worker.return_value.run
assert run_mock.call_args
_, kwargs = run_mock.call_args
assert 'pidfile' in kwargs
assert kwargs['pidfile'] == pid_file
stop_args = self.parser.parse_args(['celery', 'stop'])
celery_command.stop_worker(stop_args)
mock_read_pid_from_pidfile.assert_called_once_with(pid_file)
@pytest.mark.backend("mysql", "postgres")
class TestWorkerStart(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
@mock.patch("airflow.cli.commands.celery_command.setup_locations")
@mock.patch('airflow.cli.commands.celery_command.Process')
@mock.patch('airflow.cli.commands.celery_command.worker_bin')
@conf_vars({("core", "executor"): "CeleryExecutor"})
def test_worker_started_with_required_arguments(self, mock_worker, mock_popen, mock_locations):
pid_file = "pid_file"
mock_locations.return_value = (pid_file, None, None, None)
concurrency = '1'
celery_hostname = "celery_hostname"
queues = "queue"
autoscale = "2,5"
args = self.parser.parse_args([
'celery',
'worker',
'--autoscale',
autoscale,
'--concurrency',
concurrency,
'--celery-hostname',
celery_hostname,
'--queues',
queues
])
with mock.patch('celery.platforms.check_privileges') as mock_privil:
mock_privil.return_value = 0
celery_command.worker(args)
mock_worker.worker.return_value.run.assert_called_once_with(
pool='prefork',
optimization='fair',
O='fair',
queues=queues,
pidfile=pid_file,
concurrency=int(concurrency),
autoscale=autoscale,
hostname=celery_hostname,
loglevel=conf.get('logging', 'LOGGING_LEVEL'),
)
| true
| true
|
f7187e1c7404425320737222caf5e588e2f3c608
| 2,883
|
py
|
Python
|
subgroups/gamma_zero.py
|
kalinkinisaac/modular
|
301d26ad222a5ef3278aaf251908e0a8537bb58f
|
[
"MIT"
] | null | null | null |
subgroups/gamma_zero.py
|
kalinkinisaac/modular
|
301d26ad222a5ef3278aaf251908e0a8537bb58f
|
[
"MIT"
] | null | null | null |
subgroups/gamma_zero.py
|
kalinkinisaac/modular
|
301d26ad222a5ef3278aaf251908e0a8537bb58f
|
[
"MIT"
] | null | null | null |
from .base_gamma import BaseGamma
from .isomorphism import (one2many, many2one)
from .algo import (factor, get_xy, gcd, inv_element)
from math import log
from fimath import Matrix
import itertools
class GammaZero(BaseGamma):
def __init__(self, *args, **kwargs):
super(__class__, self).__init__(*args, **kwargs)
self.pair_reprs = []
self.fact = factor(self.N)
self.gen_pair_reprs()
def gen_pair_reprs(self):
tmp_pair_reprs = []
for (p_i, m_i) in self.fact:
tmp_pair_reprs += [self._gen_pair_reprs_prime(p_i, m_i)]
for combination in list(itertools.product(*tmp_pair_reprs)):
self.pair_reprs.append(many2one(list(combination)))
def _gen_pair_reprs_prime(self, p, m):
reprs = []
reprs.append([0, 1, [p, m]])
reprs.extend([[1, i, [p, m]] for i in range(p ** m)])
for i in range(1, m):
bs = list(filter(lambda x: x % p != 0, range(1, p ** (m - i))))
reprs.extend([[p ** i, b, [p, m]] for b in bs])
return reprs
def pair_reduced(self, a, b):
many = one2many([a, b, self.N], fact=self.fact)
reduced = []
for one in many:
reduced.append(self._pair_reduced(one))
return many2one(reduced)
def _pair_reduced(self, one):
a, b, [p, m] = one
N = p ** m
if a % N == 0:
return [0, 1, [p, m]]
_gcd = gcd(a, N)
c = a // _gcd
i = int(log(_gcd, p))
bc = (b * inv_element(c, N)) % N
if i == 0:
return [1, bc % N, [p, m]]
else:
return [p ** i, (bc % p ** (m - i)) % N, [p, m]]
class GammaBotZero(GammaZero):
def __init__(self, *args, **kwargs):
super(__class__, self).__init__(*args, **kwargs)
self.gen_reprs()
def gen_reprs(self):
self.reprs = []
for a, b, N in self.pair_reprs:
self.reprs.append(self.reduced(Matrix(0, 0, a, b)))
def not_cached_reduced(self, mat):
a, b = mat.c, mat.d
a, b = self.pair_reduced(a, b)[0:2]
d, c = list(map(lambda x: x % self.N, get_xy(a, b, self.N)))
return Matrix(c, -d, a, b) % self.N
@staticmethod
def sort_key(m):
return [m.c, m.d, m.a, m.b]
class GammaTopZero(GammaZero):
def __init__(self, *args, **kwargs):
super(__class__, self).__init__(*args, **kwargs)
self.gen_reprs()
def gen_reprs(self):
for a, b, N in self.pair_reprs:
self.reprs.append(self.reduced(Matrix(a, b, 0, 0)))
def not_cached_reduced(self, mat):
a, b = mat.a, mat.b
a, b = self.pair_reduced(a, b)[0:2]
d, c = list(map(lambda x : x % self.N, get_xy(a, b, self.N)))
return Matrix(a, b, -c, d) % self.N
@staticmethod
def sort_key(m):
return [m.a, m.b, m.c, m.d]
| 28.83
| 75
| 0.541797
|
from .base_gamma import BaseGamma
from .isomorphism import (one2many, many2one)
from .algo import (factor, get_xy, gcd, inv_element)
from math import log
from fimath import Matrix
import itertools
class GammaZero(BaseGamma):
def __init__(self, *args, **kwargs):
super(__class__, self).__init__(*args, **kwargs)
self.pair_reprs = []
self.fact = factor(self.N)
self.gen_pair_reprs()
def gen_pair_reprs(self):
tmp_pair_reprs = []
for (p_i, m_i) in self.fact:
tmp_pair_reprs += [self._gen_pair_reprs_prime(p_i, m_i)]
for combination in list(itertools.product(*tmp_pair_reprs)):
self.pair_reprs.append(many2one(list(combination)))
def _gen_pair_reprs_prime(self, p, m):
reprs = []
reprs.append([0, 1, [p, m]])
reprs.extend([[1, i, [p, m]] for i in range(p ** m)])
for i in range(1, m):
bs = list(filter(lambda x: x % p != 0, range(1, p ** (m - i))))
reprs.extend([[p ** i, b, [p, m]] for b in bs])
return reprs
def pair_reduced(self, a, b):
many = one2many([a, b, self.N], fact=self.fact)
reduced = []
for one in many:
reduced.append(self._pair_reduced(one))
return many2one(reduced)
def _pair_reduced(self, one):
a, b, [p, m] = one
N = p ** m
if a % N == 0:
return [0, 1, [p, m]]
_gcd = gcd(a, N)
c = a // _gcd
i = int(log(_gcd, p))
bc = (b * inv_element(c, N)) % N
if i == 0:
return [1, bc % N, [p, m]]
else:
return [p ** i, (bc % p ** (m - i)) % N, [p, m]]
class GammaBotZero(GammaZero):
def __init__(self, *args, **kwargs):
super(__class__, self).__init__(*args, **kwargs)
self.gen_reprs()
def gen_reprs(self):
self.reprs = []
for a, b, N in self.pair_reprs:
self.reprs.append(self.reduced(Matrix(0, 0, a, b)))
def not_cached_reduced(self, mat):
a, b = mat.c, mat.d
a, b = self.pair_reduced(a, b)[0:2]
d, c = list(map(lambda x: x % self.N, get_xy(a, b, self.N)))
return Matrix(c, -d, a, b) % self.N
@staticmethod
def sort_key(m):
return [m.c, m.d, m.a, m.b]
class GammaTopZero(GammaZero):
def __init__(self, *args, **kwargs):
super(__class__, self).__init__(*args, **kwargs)
self.gen_reprs()
def gen_reprs(self):
for a, b, N in self.pair_reprs:
self.reprs.append(self.reduced(Matrix(a, b, 0, 0)))
def not_cached_reduced(self, mat):
a, b = mat.a, mat.b
a, b = self.pair_reduced(a, b)[0:2]
d, c = list(map(lambda x : x % self.N, get_xy(a, b, self.N)))
return Matrix(a, b, -c, d) % self.N
@staticmethod
def sort_key(m):
return [m.a, m.b, m.c, m.d]
| true
| true
|
f7187e6897bff9206930e55c479aeb8c17fd6cb9
| 11,160
|
py
|
Python
|
KGEAttack/ConvE/l2_del.py
|
PeruBhardwaj/AttributionAttack
|
0d5ca334c611c5e067029a3f8907f2d91255ddde
|
[
"MIT"
] | 5
|
2021-11-08T07:18:10.000Z
|
2022-03-10T09:06:11.000Z
|
KGEAttack/ConvE/l2_del.py
|
PeruBhardwaj/AttributionAttack
|
0d5ca334c611c5e067029a3f8907f2d91255ddde
|
[
"MIT"
] | null | null | null |
KGEAttack/ConvE/l2_del.py
|
PeruBhardwaj/AttributionAttack
|
0d5ca334c611c5e067029a3f8907f2d91255ddde
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In this notebook, I delete a triple from the neighbourhood of the target triple based on the **L2 metric = euclidean distance** between the candidate triple's embedding and the target triple's embedding
#
# - 'triple' embedding is computed by applying the model's scoring function to embeddings
# - neighbourhood refers to the triples that share the entities with target's entities
#
#
# In[1]:
import pickle
from typing import Dict, Tuple, List
import os
import numpy as np
import pandas as pd
from collections import defaultdict
import operator
import json
import logging
import argparse
import math
from pprint import pprint
import errno
import time
import torch
from torch.utils.data import DataLoader
import torch.backends.cudnn as cudnn
from torch import nn
from torch.nn import CrossEntropyLoss
from torch.nn import functional as F
import torch.autograd as autograd
from evaluation import evaluation
from model import Distmult, Complex, Conve, Transe
import utils
def generate_nghbrs(test_set, train_set):
'''
For every triple in test set, return the index of
neighbouring triple in training set,
i.e. indices in training set are returned
'''
n_dict = {}
for t, triple in enumerate(test_set):
sub = triple[0]
obj = triple[2]
mask = (np.isin(train_set[:,0], [sub, obj]) | np.isin(train_set[:,2], [sub, obj]))
#nghbrs_dict[t] = pro_train[mask]
mask_idx = np.where(mask)[0]
n_dict[t] = mask_idx
return n_dict
def get_deletions(train_data, test_data, neighbours, model, attack_batch_size):
logger.info('------ Generating edits per target triple ------')
start_time = time.time()
logger.info('Start time: {0}'.format(str(start_time)))
triples_to_delete = []
for test_idx, test_trip in enumerate(test_data):
test_nghbrs = neighbours[test_idx]
nghbr_trip = train_data[test_nghbrs]
test_trip = test_trip[None, :] # add a batch dimension
test_trip = torch.from_numpy(test_trip).to(device)
test_s, test_r, test_o = test_trip[:,0], test_trip[:,1], test_trip[:,2]
test_vec = model.score_triples_vec(test_s, test_r, test_o)
b_begin = 0
nghbr_dist = []
if attack_batch_size == -1:
nghbr_batch = nghbr_trip.shape[0]
else:
nghbr_batch = args.attack_batch_size
while b_begin < nghbr_trip.shape[0]:
b_nghbr_trip = nghbr_trip[b_begin : b_begin+nghbr_batch]
b_nghbr_trip = torch.from_numpy(b_nghbr_trip).to(device)
b_nghbr_s, b_nghbr_r, b_nghbr_o = b_nghbr_trip[:,0], b_nghbr_trip[:,1], b_nghbr_trip[:,2]
b_nghbr_vec = model.score_triples_vec(b_nghbr_s, b_nghbr_r, b_nghbr_o)
# shape of nghbr_vec is (num_nghbrs x emb_dim) e.g. (459 x 100)
# shape of test vec is (1 x emb_dim)
#b_dist = -torch.cdist(test_vec, b_nghbr_vec).squeeze()
b_dist = -torch.norm((b_nghbr_vec-test_vec), p=2, dim=-1)
b_dist = b_dist.detach().cpu().numpy().tolist()
nghbr_dist += b_dist
b_begin += nghbr_batch
nghbr_dist = np.array(nghbr_dist)
nghbr_dist = torch.from_numpy(nghbr_dist).to(device)
# we want to remove the neighbour with maximum norm similarity
max_values, argsort = torch.sort(nghbr_dist, -1, descending=True)
del_idx = argsort[0]
triple_to_delete = nghbr_trip[del_idx]
triples_to_delete.append(triple_to_delete)
if test_idx%100 == 0 or test_idx == test_data.shape[0]-1:
logger.info('Processed test triple {0}'.format(str(test_idx)))
logger.info('Time taken: {0}'.format(str(time.time() - start_time)))
logger.info('Time taken to generate edits: {0}'.format(str(time.time() - start_time)))
return triples_to_delete
if __name__ == '__main__':
parser = utils.get_argument_parser()
parser.add_argument('--target-split', type=str, default='0_100_1', help='Ranks to use for target set. Values are 0 for ranks==1; 1 for ranks <=10; 2 for ranks>10 and ranks<=100. Default: 1')
parser.add_argument('--budget', type=int, default=1, help='Budget for each target triple for each corruption side')
parser.add_argument('--rand-run', type=int, default=1, help='A number assigned to the random run of experiment')
parser.add_argument('--attack-batch-size', type=int, default=-1, help='Batch size for processing neighbours of target')
args = parser.parse_args()
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
args.device = device
# args.target_split = '0_100_1' # which target split to use
#Values are 1 for ranks <=10; 2 for ranks>10 and ranks<=100.
# args.budget = 1 #indicates the num of adversarial edits for each target triple for each corruption side
# args.rand_run = 1 # a number assigned to the random run of the experiment
args.seed = args.seed + (args.rand_run - 1) # default seed is 17
# args.model = 'distmult'
# args.data = 'WN18RR'
if args.reproduce_results:
args = utils.set_hyperparams(args)
# Fixing random seeds for reproducibility -https://pytorch.org/docs/stable/notes/randomness.html
torch.manual_seed(args.seed)
cudnn.deterministic = True
cudnn.benchmark = False
np.random.seed(args.seed)
rng = np.random.default_rng(seed=args.seed)
args.epochs = -1 #no training here
model_name = '{0}_{1}_{2}_{3}_{4}'.format(args.model, args.embedding_dim, args.input_drop, args.hidden_drop, args.feat_drop)
model_path = 'saved_models/{0}_{1}.model'.format(args.data, model_name)
log_path = 'logs/attack_logs/l2_del_{0}_{1}_{2}_{3}_{4}'.format( args.model, args.data,
args.target_split, args.budget, args.rand_run)
logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt = '%m/%d/%Y %H:%M:%S',
level = logging.INFO,
filename = log_path
)
logger = logging.getLogger(__name__)
data_path = 'data/target_{0}_{1}_{2}'.format(args.model, args.data, args.target_split)
n_ent, n_rel, ent_to_id, rel_to_id = utils.generate_dicts(data_path)
##### load data####
data = utils.load_data(data_path)
train_data, valid_data, test_data = data['train'], data['valid'], data['test']
inp_f = open(os.path.join(data_path, 'to_skip_eval.pickle'), 'rb')
to_skip_eval: Dict[str, Dict[Tuple[int, int], List[int]]] = pickle.load(inp_f)
inp_f.close()
to_skip_eval['lhs'] = {(int(k[0]), int(k[1])): v for k,v in to_skip_eval['lhs'].items()}
to_skip_eval['rhs'] = {(int(k[0]), int(k[1])): v for k,v in to_skip_eval['rhs'].items()}
model = utils.load_model(model_path, args, n_ent, n_rel, device)
neighbours = generate_nghbrs(test_data, train_data)
# test set is the target set because we loaded data from target_...
triples_to_delete = get_deletions(train_data, test_data, neighbours,
model, args.attack_batch_size)
df = pd.DataFrame(data=triples_to_delete)
df = df.drop_duplicates()
# print(df.shape)
trips_to_delete = df.values
# print(trips_to_delete.shape)
num_duplicates = len(triples_to_delete) - trips_to_delete.shape[0]
# print(num_duplicates)
per_tr_1, n_ignored_edits = utils.perturb_data(train_data,
trips_to_delete)
# Perturbed dataset
logger.info('Shape of perturbed training set: {0}'.format(per_tr_1.shape))
logger.info('Number of adversarial deletions ignored (because of singleton nodes): {0}'.format(n_ignored_edits))
logger.info('Number of duplicate adversarial deletions : {0}'.format(num_duplicates))
logger.info ('Length of original training set: ' + str(train_data.shape[0]))
logger.info ('Length of new poisoned training set: ' + str(per_tr_1.shape[0]))
save_path = 'data/l2_del_{0}_{1}_{2}_{3}_{4}'.format( args.model, args.data,
args.target_split, args.budget, args.rand_run)
try :
os.makedirs(save_path)
except OSError as e:
if e.errno == errno.EEXIST:
logger.info(e)
logger.info('Using the existing folder {0} for processed data'.format(save_path))
else:
raise
new_train = per_tr_1
num_en_or = np.unique(np.concatenate((train_data[:,0], train_data[:,2]))).shape[0]
num_en_pos = np.unique(np.concatenate((new_train[:,0], new_train[:,2]))).shape[0]
with open(os.path.join(save_path, 'train.txt'), 'w') as out:
for item in new_train:
out.write("%s\n" % "\t".join(map(str, item)))
out = open(os.path.join(save_path, 'train.pickle'), 'wb')
pickle.dump(new_train.astype('uint64'), out)
out.close()
with open(os.path.join(save_path, 'entities_dict.json'), 'w') as f:
f.write(json.dumps(ent_to_id) + '\n')
with open(os.path.join(save_path, 'relations_dict.json'), 'w') as f:
f.write(json.dumps(rel_to_id) + '\n')
with open(os.path.join(save_path, 'valid.txt'), 'w') as out:
for item in valid_data:
out.write("%s\n" % "\t".join(map(str, item)))
out = open(os.path.join(save_path, 'valid.pickle'), 'wb')
pickle.dump(valid_data.astype('uint64'), out)
out.close()
with open(os.path.join(save_path, 'test.txt'), 'w') as out:
for item in test_data:
out.write("%s\n" % "\t".join(map(str, item)))
out = open(os.path.join(save_path, 'test.pickle'), 'wb')
pickle.dump(test_data.astype('uint64'), out)
out.close()
with open(os.path.join(save_path, 'stats.txt'), 'w') as f:
f.write('Model: {0} \n'.format(args.model))
f.write('Data: {0} \n'.format(args.data))
f.write('Length of original training set: {0} \n'. format(train_data.shape[0]))
f.write('Length of new poisoned training set: {0} \n'. format(new_train.shape[0]))
f.write('Number of duplicate deletions: {0} \n'. format(num_duplicates))
f.write('Number of deletions ignored due to singleton nodes: {0} \n'. format(n_ignored_edits))
f.write('Number of entities in original training set: {0} \n'. format(num_en_or))
f.write('Number of entities in poisoned training set: {0} \n'. format(num_en_pos))
f.write('Length of original test set: {0} \n'. format(test_data.shape[0]))
f.write('---------------------------------------------------------------------- \n')
with open(os.path.join(save_path, 'influential_triples.txt'), 'w') as out:
for item in triples_to_delete:
out.write("%s\n" % "\t".join(map(str, item)))
with open(os.path.join(save_path, 'deletions.txt'), 'w') as out:
for item in trips_to_delete:
out.write("%s\n" % "\t".join(map(str, item)))
# In[ ]:
# In[ ]:
| 37.2
| 204
| 0.63629
|
# - neighbourhood refers to the triples that share the entities with target's entities
import pickle
from typing import Dict, Tuple, List
import os
import numpy as np
import pandas as pd
from collections import defaultdict
import operator
import json
import logging
import argparse
import math
from pprint import pprint
import errno
import time
import torch
from torch.utils.data import DataLoader
import torch.backends.cudnn as cudnn
from torch import nn
from torch.nn import CrossEntropyLoss
from torch.nn import functional as F
import torch.autograd as autograd
from evaluation import evaluation
from model import Distmult, Complex, Conve, Transe
import utils
def generate_nghbrs(test_set, train_set):
n_dict = {}
for t, triple in enumerate(test_set):
sub = triple[0]
obj = triple[2]
mask = (np.isin(train_set[:,0], [sub, obj]) | np.isin(train_set[:,2], [sub, obj]))
mask_idx = np.where(mask)[0]
n_dict[t] = mask_idx
return n_dict
def get_deletions(train_data, test_data, neighbours, model, attack_batch_size):
logger.info('------ Generating edits per target triple ------')
start_time = time.time()
logger.info('Start time: {0}'.format(str(start_time)))
triples_to_delete = []
for test_idx, test_trip in enumerate(test_data):
test_nghbrs = neighbours[test_idx]
nghbr_trip = train_data[test_nghbrs]
test_trip = test_trip[None, :]
test_trip = torch.from_numpy(test_trip).to(device)
test_s, test_r, test_o = test_trip[:,0], test_trip[:,1], test_trip[:,2]
test_vec = model.score_triples_vec(test_s, test_r, test_o)
b_begin = 0
nghbr_dist = []
if attack_batch_size == -1:
nghbr_batch = nghbr_trip.shape[0]
else:
nghbr_batch = args.attack_batch_size
while b_begin < nghbr_trip.shape[0]:
b_nghbr_trip = nghbr_trip[b_begin : b_begin+nghbr_batch]
b_nghbr_trip = torch.from_numpy(b_nghbr_trip).to(device)
b_nghbr_s, b_nghbr_r, b_nghbr_o = b_nghbr_trip[:,0], b_nghbr_trip[:,1], b_nghbr_trip[:,2]
b_nghbr_vec = model.score_triples_vec(b_nghbr_s, b_nghbr_r, b_nghbr_o)
b_dist = -torch.norm((b_nghbr_vec-test_vec), p=2, dim=-1)
b_dist = b_dist.detach().cpu().numpy().tolist()
nghbr_dist += b_dist
b_begin += nghbr_batch
nghbr_dist = np.array(nghbr_dist)
nghbr_dist = torch.from_numpy(nghbr_dist).to(device)
max_values, argsort = torch.sort(nghbr_dist, -1, descending=True)
del_idx = argsort[0]
triple_to_delete = nghbr_trip[del_idx]
triples_to_delete.append(triple_to_delete)
if test_idx%100 == 0 or test_idx == test_data.shape[0]-1:
logger.info('Processed test triple {0}'.format(str(test_idx)))
logger.info('Time taken: {0}'.format(str(time.time() - start_time)))
logger.info('Time taken to generate edits: {0}'.format(str(time.time() - start_time)))
return triples_to_delete
if __name__ == '__main__':
parser = utils.get_argument_parser()
parser.add_argument('--target-split', type=str, default='0_100_1', help='Ranks to use for target set. Values are 0 for ranks==1; 1 for ranks <=10; 2 for ranks>10 and ranks<=100. Default: 1')
parser.add_argument('--budget', type=int, default=1, help='Budget for each target triple for each corruption side')
parser.add_argument('--rand-run', type=int, default=1, help='A number assigned to the random run of experiment')
parser.add_argument('--attack-batch-size', type=int, default=-1, help='Batch size for processing neighbours of target')
args = parser.parse_args()
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
args.device = device
manual_seed(args.seed)
cudnn.deterministic = True
cudnn.benchmark = False
np.random.seed(args.seed)
rng = np.random.default_rng(seed=args.seed)
args.epochs = -1
model_name = '{0}_{1}_{2}_{3}_{4}'.format(args.model, args.embedding_dim, args.input_drop, args.hidden_drop, args.feat_drop)
model_path = 'saved_models/{0}_{1}.model'.format(args.data, model_name)
log_path = 'logs/attack_logs/l2_del_{0}_{1}_{2}_{3}_{4}'.format( args.model, args.data,
args.target_split, args.budget, args.rand_run)
logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt = '%m/%d/%Y %H:%M:%S',
level = logging.INFO,
filename = log_path
)
logger = logging.getLogger(__name__)
data_path = 'data/target_{0}_{1}_{2}'.format(args.model, args.data, args.target_split)
n_ent, n_rel, ent_to_id, rel_to_id = utils.generate_dicts(data_path)
a = data['train'], data['valid'], data['test']
inp_f = open(os.path.join(data_path, 'to_skip_eval.pickle'), 'rb')
to_skip_eval: Dict[str, Dict[Tuple[int, int], List[int]]] = pickle.load(inp_f)
inp_f.close()
to_skip_eval['lhs'] = {(int(k[0]), int(k[1])): v for k,v in to_skip_eval['lhs'].items()}
to_skip_eval['rhs'] = {(int(k[0]), int(k[1])): v for k,v in to_skip_eval['rhs'].items()}
model = utils.load_model(model_path, args, n_ent, n_rel, device)
neighbours = generate_nghbrs(test_data, train_data)
triples_to_delete = get_deletions(train_data, test_data, neighbours,
model, args.attack_batch_size)
df = pd.DataFrame(data=triples_to_delete)
df = df.drop_duplicates()
trips_to_delete = df.values
num_duplicates = len(triples_to_delete) - trips_to_delete.shape[0]
per_tr_1, n_ignored_edits = utils.perturb_data(train_data,
trips_to_delete)
logger.info('Shape of perturbed training set: {0}'.format(per_tr_1.shape))
logger.info('Number of adversarial deletions ignored (because of singleton nodes): {0}'.format(n_ignored_edits))
logger.info('Number of duplicate adversarial deletions : {0}'.format(num_duplicates))
logger.info ('Length of original training set: ' + str(train_data.shape[0]))
logger.info ('Length of new poisoned training set: ' + str(per_tr_1.shape[0]))
save_path = 'data/l2_del_{0}_{1}_{2}_{3}_{4}'.format( args.model, args.data,
args.target_split, args.budget, args.rand_run)
try :
os.makedirs(save_path)
except OSError as e:
if e.errno == errno.EEXIST:
logger.info(e)
logger.info('Using the existing folder {0} for processed data'.format(save_path))
else:
raise
new_train = per_tr_1
num_en_or = np.unique(np.concatenate((train_data[:,0], train_data[:,2]))).shape[0]
num_en_pos = np.unique(np.concatenate((new_train[:,0], new_train[:,2]))).shape[0]
with open(os.path.join(save_path, 'train.txt'), 'w') as out:
for item in new_train:
out.write("%s\n" % "\t".join(map(str, item)))
out = open(os.path.join(save_path, 'train.pickle'), 'wb')
pickle.dump(new_train.astype('uint64'), out)
out.close()
with open(os.path.join(save_path, 'entities_dict.json'), 'w') as f:
f.write(json.dumps(ent_to_id) + '\n')
with open(os.path.join(save_path, 'relations_dict.json'), 'w') as f:
f.write(json.dumps(rel_to_id) + '\n')
with open(os.path.join(save_path, 'valid.txt'), 'w') as out:
for item in valid_data:
out.write("%s\n" % "\t".join(map(str, item)))
out = open(os.path.join(save_path, 'valid.pickle'), 'wb')
pickle.dump(valid_data.astype('uint64'), out)
out.close()
with open(os.path.join(save_path, 'test.txt'), 'w') as out:
for item in test_data:
out.write("%s\n" % "\t".join(map(str, item)))
out = open(os.path.join(save_path, 'test.pickle'), 'wb')
pickle.dump(test_data.astype('uint64'), out)
out.close()
with open(os.path.join(save_path, 'stats.txt'), 'w') as f:
f.write('Model: {0} \n'.format(args.model))
f.write('Data: {0} \n'.format(args.data))
f.write('Length of original training set: {0} \n'. format(train_data.shape[0]))
f.write('Length of new poisoned training set: {0} \n'. format(new_train.shape[0]))
f.write('Number of duplicate deletions: {0} \n'. format(num_duplicates))
f.write('Number of deletions ignored due to singleton nodes: {0} \n'. format(n_ignored_edits))
f.write('Number of entities in original training set: {0} \n'. format(num_en_or))
f.write('Number of entities in poisoned training set: {0} \n'. format(num_en_pos))
f.write('Length of original test set: {0} \n'. format(test_data.shape[0]))
f.write('---------------------------------------------------------------------- \n')
with open(os.path.join(save_path, 'influential_triples.txt'), 'w') as out:
for item in triples_to_delete:
out.write("%s\n" % "\t".join(map(str, item)))
with open(os.path.join(save_path, 'deletions.txt'), 'w') as out:
for item in trips_to_delete:
out.write("%s\n" % "\t".join(map(str, item)))
| true
| true
|
f7187f1fc12ea1ed1b0c5a0ce33d00d4b8ac79c3
| 7,229
|
py
|
Python
|
blocklogic.py
|
rroctavian/blockchain_wdss
|
2d25cb83fac91404da8b7e2404b5668a5877318f
|
[
"MIT"
] | null | null | null |
blocklogic.py
|
rroctavian/blockchain_wdss
|
2d25cb83fac91404da8b7e2404b5668a5877318f
|
[
"MIT"
] | null | null | null |
blocklogic.py
|
rroctavian/blockchain_wdss
|
2d25cb83fac91404da8b7e2404b5668a5877318f
|
[
"MIT"
] | null | null | null |
from hashlib import sha256
import json
import time
import multiprocessing
import time
import numpy as np
class Block:
def __init__(
self, depth, transactions, timestamp,
previous_hash, nonce=0
):
self.depth = depth
self.transactions = transactions
self.timestamp = timestamp
self.previous_hash = previous_hash
self.nonce = nonce
def compute_hash(self):
'''
A function that return the hash of the block contents.
'''
block_str = json.dumps(self.__dict__, sort_keys=True)
return sha256(block_str.encode()).hexdigest()
def __eq__(self, other):
'''
Overloading the equality operator
'''
return self.__dict__ == other.__dict__
class Blockchain:
'''
Blockchain class;
Inspired from IBM version at the moment.
'''
difficulty = 4
block_capacity = 3
def __init__(self):
'''
Choose initial difficulty and
create the genesis block
[1] They are the orphans and stale blocks
It's a list of lists where we also
store the block leading to the orphan.
That block is stored multiple time (also
in the longest chain)
'''
# Transactions to be mined
self.outstanding_transactions = []
# Consensus chain and extensions, see [1]
self.chain = []
self.extensions = []
# Create genesis block
self.create_genesis_block()
def create_genesis_block(self):
"""
A function to generate genesis block and appends it to
the chain. The block has index 0, previous_hash as 0, and
a valid hash.
"""
genesis_block = Block(0, [], 0, "0")
genesis_block.hash = genesis_block.compute_hash()
self.chain.append(genesis_block)
@property
def last_block(self):
return self.chain[-1]
def add_block_longest(self, block, proof):
"""
Attempt to add a block after checking the validity of
the provided proof. Append to longest chain.
"""
# Reject if previous hash not accurate
if self.last_block.hash != block.previous_hash:
return False
# Reject if proof is not valid hash
if not Blockchain.is_valid_proof(block, proof):
return False
block.hash = proof
self.chain.append(block)
return True
def add_block(
self, block, proof, base_block
):
"""
Attempt to add a block after checking the validity of
the provided proof. Append to longest chain.
:param base_block: the base block receiving the potential new block
[1] If base_block is not last block in longest chain,
check all extensions for their last block. If again, none
of the extensions have the base_block as their last, create
another extension. You could have nested extensions because of
this, but shouldn't care.
"""
# If the base block is the last block
# in longest chain, just use regular add
if base_block == self.last_block:
return self.add_block_longest(block, proof)
# Previous hash should be accurate, reject otherwise
if base_block.hash != block.previous_hash:
return False
# Reject if proof is not valid hash of block
if not Blockchain.is_valid_proof(block, proof):
return False
# If checks passed, update the block's hash
block.hash = proof
# Check all extensions for the base block
# See add_block.[1]
for ext_idx in range(self.extensions):
# Check each last block in extensions
if base_block == self.extensions[ext_idx][-1]:
# If found, proceed there
self.extensions[ext_idx].append(block)
return True
# If not found there, create extension
self.extensions.append([base_block, block])
return True
def internal_consensus(self):
'''
Method to update to longest chain using possibly
larger extensions. So it checks if any extension
is longer than current chain. In case of a change,
the tail of the current chain becomes a new extension.
[1] If any update happens, return True and stop
since another one is impossible. This is because
we are calling this at each mine, so changes are
continuously updated.
'''
for ext in self.extensions:
if ext[-1].depth > self.last_block.depth:
fork_depth = ext[0].depth
# Create new extension with chain to be
# dumped
self.extensions.append(
self.chain[fork_depth:]
)
# Remove and store chain tail until
# depth of fork node, then add extension
# tail to now have longest chain
while self.last_block.depth >= fork_depth:
self.chain.pop()
self.chain = self.chain + ext
# See internal_consensus.[1]
return True
# If no internal consensus update, return False
return False
@staticmethod
def proof_of_work(block, work_time = None):
"""
Do proof of work and stop after a work_time seconds.
:param starting_nonce: can store progress
:param work_time: storing progress requires early stopping
and we're using a potentially pre-set time
"""
# Parse work_time None to inf
if work_time is None:
work_time = float('inf')
start = time.time()
# Start from 0, flexibility here to be debated
block.nonce = 0
# Do computational work
computed_hash = block.compute_hash()
while not computed_hash.startswith('0' * Blockchain.difficulty):
block.nonce += 1
computed_hash = block.compute_hash()
# Return if out of time
if (time.time() - start) > work_time:
return
# Return good hash
return computed_hash
def add_new_transaction(self, transaction):
self.outstanding_transactions.append(transaction)
def remove_front_transactions(self):
self.outstanding_transactions = self.outstanding_transactions[Blockchain.block_capacity:]
def get_outstanding_transactions(self):
return self.outstanding_transactions
@classmethod
def is_valid_proof(cls, block, block_hash):
"""
Check if block_hash is valid hash of block and satisfies
the difficulty criteria.
"""
return (block_hash.startswith('0' * Blockchain.difficulty) and
block_hash == block.compute_hash())
| 35.787129
| 98
| 0.580302
|
from hashlib import sha256
import json
import time
import multiprocessing
import time
import numpy as np
class Block:
def __init__(
self, depth, transactions, timestamp,
previous_hash, nonce=0
):
self.depth = depth
self.transactions = transactions
self.timestamp = timestamp
self.previous_hash = previous_hash
self.nonce = nonce
def compute_hash(self):
block_str = json.dumps(self.__dict__, sort_keys=True)
return sha256(block_str.encode()).hexdigest()
def __eq__(self, other):
return self.__dict__ == other.__dict__
class Blockchain:
difficulty = 4
block_capacity = 3
def __init__(self):
self.outstanding_transactions = []
self.chain = []
self.extensions = []
self.create_genesis_block()
def create_genesis_block(self):
genesis_block = Block(0, [], 0, "0")
genesis_block.hash = genesis_block.compute_hash()
self.chain.append(genesis_block)
@property
def last_block(self):
return self.chain[-1]
def add_block_longest(self, block, proof):
if self.last_block.hash != block.previous_hash:
return False
if not Blockchain.is_valid_proof(block, proof):
return False
block.hash = proof
self.chain.append(block)
return True
def add_block(
self, block, proof, base_block
):
if base_block == self.last_block:
return self.add_block_longest(block, proof)
if base_block.hash != block.previous_hash:
return False
if not Blockchain.is_valid_proof(block, proof):
return False
block.hash = proof
# Check all extensions for the base block
# See add_block.[1]
for ext_idx in range(self.extensions):
# Check each last block in extensions
if base_block == self.extensions[ext_idx][-1]:
# If found, proceed there
self.extensions[ext_idx].append(block)
return True
# If not found there, create extension
self.extensions.append([base_block, block])
return True
def internal_consensus(self):
for ext in self.extensions:
if ext[-1].depth > self.last_block.depth:
fork_depth = ext[0].depth
# Create new extension with chain to be
# dumped
self.extensions.append(
self.chain[fork_depth:]
)
# Remove and store chain tail until
# depth of fork node, then add extension
# tail to now have longest chain
while self.last_block.depth >= fork_depth:
self.chain.pop()
self.chain = self.chain + ext
# See internal_consensus.[1]
return True
# If no internal consensus update, return False
return False
@staticmethod
def proof_of_work(block, work_time = None):
# Parse work_time None to inf
if work_time is None:
work_time = float('inf')
start = time.time()
# Start from 0, flexibility here to be debated
block.nonce = 0
# Do computational work
computed_hash = block.compute_hash()
while not computed_hash.startswith('0' * Blockchain.difficulty):
block.nonce += 1
computed_hash = block.compute_hash()
# Return if out of time
if (time.time() - start) > work_time:
return
# Return good hash
return computed_hash
def add_new_transaction(self, transaction):
self.outstanding_transactions.append(transaction)
def remove_front_transactions(self):
self.outstanding_transactions = self.outstanding_transactions[Blockchain.block_capacity:]
def get_outstanding_transactions(self):
return self.outstanding_transactions
@classmethod
def is_valid_proof(cls, block, block_hash):
return (block_hash.startswith('0' * Blockchain.difficulty) and
block_hash == block.compute_hash())
| true
| true
|
f7187f5b5ab4da7e46565bfd415acf79f33f3db2
| 25,583
|
py
|
Python
|
test/with_dummyserver/test_poolmanager.py
|
pquentin/hip
|
89c766d0782f016baeda236149f29477f7237eed
|
[
"MIT"
] | null | null | null |
test/with_dummyserver/test_poolmanager.py
|
pquentin/hip
|
89c766d0782f016baeda236149f29477f7237eed
|
[
"MIT"
] | 1
|
2020-01-21T06:48:37.000Z
|
2020-01-21T06:48:37.000Z
|
test/with_dummyserver/test_poolmanager.py
|
pquentin/hip
|
89c766d0782f016baeda236149f29477f7237eed
|
[
"MIT"
] | null | null | null |
import io
import json
import time
import pytest
from dummyserver.server import HAS_IPV6
from dummyserver.testcase import HTTPDummyServerTestCase, IPv6HTTPDummyServerTestCase
from hip.base import DEFAULT_PORTS
from hip.poolmanager import PoolManager
from hip.exceptions import MaxRetryError, NewConnectionError, UnrewindableBodyError
from hip.util.retry import Retry, RequestHistory
from test import LONG_TIMEOUT
# Retry failed tests
pytestmark = pytest.mark.flaky
class TestPoolManager(HTTPDummyServerTestCase):
@classmethod
def setup_class(cls):
super(TestPoolManager, cls).setup_class()
cls.base_url = "http://%s:%d" % (cls.host, cls.port)
cls.base_url_alt = "http://%s:%d" % (cls.host_alt, cls.port)
def test_redirect(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/" % self.base_url},
redirect=False,
)
assert r.status == 303
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/" % self.base_url},
)
assert r.status == 200
assert r.data == b"Dummy server!"
def test_redirect_twice(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/redirect" % self.base_url},
redirect=False,
)
assert r.status == 303
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={
"target": "%s/redirect?target=%s/" % (self.base_url, self.base_url)
},
)
assert r.status == 200
assert r.data == b"Dummy server!"
def test_redirect_to_relative_url(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "/redirect"},
redirect=False,
)
assert r.status == 303
r = http.request(
"GET", "%s/redirect" % self.base_url, fields={"target": "/redirect"}
)
assert r.status == 200
assert r.data == b"Dummy server!"
def test_cross_host_redirect(self):
with PoolManager() as http:
cross_host_location = "%s/echo?a=b" % self.base_url_alt
with pytest.raises(MaxRetryError):
http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": cross_host_location},
timeout=LONG_TIMEOUT,
retries=0,
)
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/echo?a=b" % self.base_url_alt},
timeout=LONG_TIMEOUT,
retries=1,
)
assert r._pool.host == self.host_alt
def test_too_many_redirects(self):
with PoolManager() as http:
with pytest.raises(MaxRetryError):
http.request(
"GET",
"%s/redirect" % self.base_url,
fields={
"target": "%s/redirect?target=%s/"
% (self.base_url, self.base_url)
},
retries=1,
)
with pytest.raises(MaxRetryError):
http.request(
"GET",
"%s/redirect" % self.base_url,
fields={
"target": "%s/redirect?target=%s/"
% (self.base_url, self.base_url)
},
retries=Retry(total=None, redirect=1),
)
def test_redirect_cross_host_remove_headers(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/headers" % self.base_url_alt},
headers={"Authorization": "foo"},
)
assert r.status == 200
data = json.loads(r.data.decode("utf-8"))
assert "Authorization" not in data
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/headers" % self.base_url_alt},
headers={"authorization": "foo"},
)
assert r.status == 200
data = json.loads(r.data.decode("utf-8"))
assert "authorization" not in data
assert "Authorization" not in data
def test_redirect_cross_host_no_remove_headers(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/headers" % self.base_url_alt},
headers={"Authorization": "foo"},
retries=Retry(remove_headers_on_redirect=[]),
)
assert r.status == 200
data = json.loads(r.data.decode("utf-8"))
assert data["Authorization"] == "foo"
def test_redirect_cross_host_set_removed_headers(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/headers" % self.base_url_alt},
headers={"X-API-Secret": "foo", "Authorization": "bar"},
retries=Retry(remove_headers_on_redirect=["X-API-Secret"]),
)
assert r.status == 200
data = json.loads(r.data.decode("utf-8"))
assert "X-API-Secret" not in data
assert data["Authorization"] == "bar"
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/headers" % self.base_url_alt},
headers={"x-api-secret": "foo", "authorization": "bar"},
retries=Retry(remove_headers_on_redirect=["X-API-Secret"]),
)
assert r.status == 200
data = json.loads(r.data.decode("utf-8"))
assert "x-api-secret" not in data
assert "X-API-Secret" not in data
assert data["Authorization"] == "bar"
def test_raise_on_redirect(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={
"target": "%s/redirect?target=%s/" % (self.base_url, self.base_url)
},
retries=Retry(total=None, redirect=1, raise_on_redirect=False),
)
assert r.status == 303
def test_raise_on_status(self):
with PoolManager() as http:
with pytest.raises(MaxRetryError):
# the default is to raise
r = http.request(
"GET",
"%s/status" % self.base_url,
fields={"status": "500 Internal Server Error"},
retries=Retry(total=1, status_forcelist=range(500, 600)),
)
with pytest.raises(MaxRetryError):
# raise explicitly
r = http.request(
"GET",
"%s/status" % self.base_url,
fields={"status": "500 Internal Server Error"},
retries=Retry(
total=1, status_forcelist=range(500, 600), raise_on_status=True
),
)
# don't raise
r = http.request(
"GET",
"%s/status" % self.base_url,
fields={"status": "500 Internal Server Error"},
retries=Retry(
total=1, status_forcelist=range(500, 600), raise_on_status=False
),
)
assert r.status == 500
def test_missing_port(self):
# Can a URL that lacks an explicit port like ':80' succeed, or
# will all such URLs fail with an error?
with PoolManager() as http:
# By globally adjusting `DEFAULT_PORTS` we pretend for a moment
# that HTTP's default port is not 80, but is the port at which
# our test server happens to be listening.
DEFAULT_PORTS["http"] = self.port
try:
r = http.request("GET", "http://%s/" % self.host, retries=0)
finally:
DEFAULT_PORTS["http"] = 80
assert r.status == 200
assert r.data == b"Dummy server!"
def test_headers(self):
with PoolManager(headers={"Foo": "bar"}) as http:
r = http.request("GET", "%s/headers" % self.base_url)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") == "bar"
r = http.request("POST", "%s/headers" % self.base_url)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") == "bar"
r = http.request_encode_url("GET", "%s/headers" % self.base_url)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") == "bar"
r = http.request_encode_body("POST", "%s/headers" % self.base_url)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") == "bar"
r = http.request_encode_url(
"GET", "%s/headers" % self.base_url, headers={"Baz": "quux"}
)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
r = http.request_encode_body(
"GET", "%s/headers" % self.base_url, headers={"Baz": "quux"}
)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
def test_http_with_ssl_keywords(self):
with PoolManager(ca_certs="REQUIRED") as http:
r = http.request("GET", "http://%s:%s/" % (self.host, self.port))
assert r.status == 200
def test_http_with_ca_cert_dir(self):
with PoolManager(ca_certs="REQUIRED", ca_cert_dir="/nosuchdir") as http:
r = http.request("GET", "http://%s:%s/" % (self.host, self.port))
assert r.status == 200
def test_cleanup_on_connection_error(self):
"""
Test that connections are recycled to the pool on
connection errors where no http response is received.
"""
poolsize = 3
with PoolManager(maxsize=poolsize, block=True) as http:
pool = http.connection_from_host(self.host, self.port)
assert pool.pool.qsize() == poolsize
# force a connection error by supplying a non-existent
# url. We won't get a response for this and so the
# conn won't be implicitly returned to the pool.
url = "%s/redirect" % self.base_url
with pytest.raises(MaxRetryError):
http.request("GET", url, fields={"target": "/"}, retries=0)
r = http.request("GET", url, fields={"target": "/"}, retries=1)
r.release_conn()
# the pool should still contain poolsize elements
assert pool.pool.qsize() == poolsize
class TestRetry(HTTPDummyServerTestCase):
@classmethod
def setup_class(self):
super(TestRetry, self).setup_class()
self.base_url = "http://%s:%d" % (self.host, self.port)
self.base_url_alt = "http://%s:%d" % (self.host_alt, self.port)
def test_max_retry(self):
with PoolManager() as http:
with pytest.raises(MaxRetryError):
http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "/"},
retries=0,
)
def test_disabled_retry(self):
""" Disabled retries should disable redirect handling. """
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "/"},
retries=False,
)
assert r.status == 303
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "/"},
retries=Retry(redirect=False),
)
assert r.status == 303
with pytest.raises(NewConnectionError):
http.request(
"GET",
"http://thishostdoesnotexist.invalid/",
timeout=0.001,
retries=False,
)
def test_read_retries(self):
""" Should retry for status codes in the whitelist """
retry = Retry(read=1, status_forcelist=[418])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers={"test-name": "test_read_retries"},
retries=retry,
)
assert resp.status == 200
def test_read_total_retries(self):
""" HTTP response w/ status code in the whitelist should be retried """
headers = {"test-name": "test_read_total_retries"}
retry = Retry(total=1, status_forcelist=[418])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
)
assert resp.status == 200
def test_retries_wrong_whitelist(self):
"""HTTP response w/ status code not in whitelist shouldn't be retried"""
retry = Retry(total=1, status_forcelist=[202])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers={"test-name": "test_wrong_whitelist"},
retries=retry,
)
assert resp.status == 418
def test_default_method_whitelist_retried(self):
"""Hip should retry methods in the default method whitelist"""
retry = Retry(total=1, status_forcelist=[418])
with PoolManager() as http:
resp = http.request(
"OPTIONS",
"%s/successful_retry" % self.base_url,
headers={"test-name": "test_default_whitelist"},
retries=retry,
)
assert resp.status == 200
def test_retries_wrong_method_list(self):
"""Method not in our whitelist should not be retried, even if code matches"""
headers = {"test-name": "test_wrong_method_whitelist"}
retry = Retry(total=1, status_forcelist=[418], method_whitelist=["POST"])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
)
assert resp.status == 418
def test_read_retries_unsuccessful(self):
headers = {"test-name": "test_read_retries_unsuccessful"}
with PoolManager() as http:
resp = http.request(
"GET", "%s/successful_retry" % self.base_url, headers=headers, retries=1
)
assert resp.status == 418
def test_retry_reuse_safe(self):
""" It should be possible to reuse a Retry object across requests """
headers = {"test-name": "test_retry_safe"}
retry = Retry(total=1, status_forcelist=[418])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
)
assert resp.status == 200
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
)
assert resp.status == 200
def test_retry_return_in_response(self):
headers = {"test-name": "test_retry_return_in_response"}
retry = Retry(total=2, status_forcelist=[418])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
)
assert resp.status == 200
assert resp.retries.total == 1
assert resp.retries.history == (
RequestHistory("GET", "/successful_retry", None, 418, None),
)
def test_retry_redirect_history(self):
with PoolManager() as http:
resp = http.request(
"GET", "%s/redirect" % self.base_url, fields={"target": "/"}
)
assert resp.status == 200
assert resp.retries.history == (
RequestHistory(
"GET", self.base_url + "/redirect?target=%2F", None, 303, "/"
),
)
def test_multi_redirect_history(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/multi_redirect" % self.base_url,
fields={"redirect_codes": "303,302,200"},
redirect=False,
)
assert r.status == 303
assert r.retries.history == tuple()
r = http.request(
"GET",
"%s/multi_redirect" % self.base_url,
retries=10,
fields={"redirect_codes": "303,302,301,307,302,200"},
)
assert r.status == 200
assert r.data == b"Done redirecting"
expected = [
(303, "/multi_redirect?redirect_codes=302,301,307,302,200"),
(302, "/multi_redirect?redirect_codes=301,307,302,200"),
(301, "/multi_redirect?redirect_codes=307,302,200"),
(307, "/multi_redirect?redirect_codes=302,200"),
(302, "/multi_redirect?redirect_codes=200"),
]
actual = [
(history.status, history.redirect_location)
for history in r.retries.history
]
assert actual == expected
def test_redirect_put_file(self):
"""PUT with file object should work with a redirection response"""
retry = Retry(total=3, status_forcelist=[418])
# httplib reads in 8k chunks; use a larger content length
content_length = 65535
data = b"A" * content_length
uploaded_file = io.BytesIO(data)
headers = {
"test-name": "test_redirect_put_file",
"Content-Length": str(content_length),
}
url = "%s/redirect?target=/echo&status=307" % self.base_url
with PoolManager() as http:
resp = http.urlopen(
"PUT", url, headers=headers, retries=retry, body=uploaded_file
)
assert resp.status == 200
assert resp.data == data
class TestRetryAfter(HTTPDummyServerTestCase):
@classmethod
def setup_class(self):
super(TestRetryAfter, self).setup_class()
self.base_url = "http://%s:%d" % (self.host, self.port)
self.base_url_alt = "http://%s:%d" % (self.host_alt, self.port)
def test_retry_after(self):
url = "%s/retry_after" % self.base_url
with PoolManager() as http:
# Request twice in a second to get a 429 response.
r = http.request(
"GET", url, fields={"status": "429 Too Many Requests"}, retries=False
)
r = http.request(
"GET", url, fields={"status": "429 Too Many Requests"}, retries=False
)
assert r.status == 429
r = http.request(
"GET", url, fields={"status": "429 Too Many Requests"}, retries=True
)
assert r.status == 200
# Request twice in a second to get a 503 response.
r = http.request(
"GET", url, fields={"status": "503 Service Unavailable"}, retries=False
)
r = http.request(
"GET", url, fields={"status": "503 Service Unavailable"}, retries=False
)
assert r.status == 503
r = http.request(
"GET", url, fields={"status": "503 Service Unavailable"}, retries=True
)
assert r.status == 200
# Ignore Retry-After header on status which is not defined in
# Retry.RETRY_AFTER_STATUS_CODES.
r = http.request(
"GET", url, fields={"status": "418 I'm a teapot"}, retries=True
)
assert r.status == 418
def test_redirect_after(self):
with PoolManager() as http:
r = http.request("GET", "%s/redirect_after" % self.base_url, retries=False)
assert r.status == 303
t = time.time()
r = http.request("GET", "%s/redirect_after" % self.base_url)
assert r.status == 200
delta = time.time() - t
assert delta >= 1
t = time.time()
timestamp = t + 2
r = http.request(
"GET", self.base_url + "/redirect_after?date=" + str(timestamp)
)
assert r.status == 200
delta = time.time() - t
assert delta >= 1
# Retry-After is past
t = time.time()
timestamp = t - 1
r = http.request(
"GET", self.base_url + "/redirect_after?date=" + str(timestamp)
)
delta = time.time() - t
assert r.status == 200
assert delta < 1
class TestFileBodiesOnRetryOrRedirect(HTTPDummyServerTestCase):
def setup_class(self):
super(TestFileBodiesOnRetryOrRedirect, self).setup_class()
self.base_url = "http://%s:%d" % (self.host, self.port)
self.base_url_alt = "http://%s:%d" % (self.host_alt, self.port)
def test_retries_put_filehandle(self):
"""HTTP PUT retry with a file-like object should not timeout"""
retry = Retry(total=3, status_forcelist=[418])
# httplib reads in 8k chunks; use a larger content length
content_length = 65535
data = b"A" * content_length
uploaded_file = io.BytesIO(data)
headers = {
"test-name": "test_retries_put_filehandle",
"Content-Length": str(content_length),
}
with PoolManager() as http:
resp = http.urlopen(
"PUT",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
body=uploaded_file,
redirect=False,
)
assert resp.status == 200
def test_redirect_with_failed_tell(self):
"""Abort request if failed to get a position from tell()"""
class BadTellObject(io.BytesIO):
def tell(self):
raise IOError
body = BadTellObject(b"the data")
url = "%s/redirect?target=/successful_retry" % self.base_url
# httplib uses fileno if Content-Length isn't supplied,
# which is unsupported by BytesIO.
headers = {"Content-Length": "8"}
with PoolManager() as http:
with pytest.raises(UnrewindableBodyError) as e:
http.urlopen("PUT", url, headers=headers, body=body)
assert "Unable to record file position for" in str(e.value)
@pytest.mark.parametrize(
["target", "expected_target"],
[
("/echo_uri?q=1#fragment", b"/echo_uri?q=1"),
("/echo_uri?#", b"/echo_uri?"),
("/echo_uri#?", b"/echo_uri"),
("/echo_uri#?#", b"/echo_uri"),
("/echo_uri??#", b"/echo_uri??"),
("/echo_uri?%3f#", b"/echo_uri?%3F"),
("/echo_uri?%3F#", b"/echo_uri?%3F"),
("/echo_uri?[]", b"/echo_uri?%5B%5D"),
],
)
def test_encode_http_target(self, target, expected_target):
with PoolManager() as http:
url = "http://%s:%d%s" % (self.host, self.port, target)
r = http.request("GET", url)
assert r.data == expected_target
@pytest.mark.skipif(not HAS_IPV6, reason="IPv6 is not supported on this system")
class TestIPv6PoolManager(IPv6HTTPDummyServerTestCase):
@classmethod
def setup_class(cls):
super(TestIPv6PoolManager, cls).setup_class()
cls.base_url = "http://[%s]:%d" % (cls.host, cls.port)
def test_ipv6(self):
with PoolManager() as http:
http.request("GET", self.base_url)
| 35.581363
| 88
| 0.517883
|
import io
import json
import time
import pytest
from dummyserver.server import HAS_IPV6
from dummyserver.testcase import HTTPDummyServerTestCase, IPv6HTTPDummyServerTestCase
from hip.base import DEFAULT_PORTS
from hip.poolmanager import PoolManager
from hip.exceptions import MaxRetryError, NewConnectionError, UnrewindableBodyError
from hip.util.retry import Retry, RequestHistory
from test import LONG_TIMEOUT
pytestmark = pytest.mark.flaky
class TestPoolManager(HTTPDummyServerTestCase):
@classmethod
def setup_class(cls):
super(TestPoolManager, cls).setup_class()
cls.base_url = "http://%s:%d" % (cls.host, cls.port)
cls.base_url_alt = "http://%s:%d" % (cls.host_alt, cls.port)
def test_redirect(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/" % self.base_url},
redirect=False,
)
assert r.status == 303
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/" % self.base_url},
)
assert r.status == 200
assert r.data == b"Dummy server!"
def test_redirect_twice(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/redirect" % self.base_url},
redirect=False,
)
assert r.status == 303
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={
"target": "%s/redirect?target=%s/" % (self.base_url, self.base_url)
},
)
assert r.status == 200
assert r.data == b"Dummy server!"
def test_redirect_to_relative_url(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "/redirect"},
redirect=False,
)
assert r.status == 303
r = http.request(
"GET", "%s/redirect" % self.base_url, fields={"target": "/redirect"}
)
assert r.status == 200
assert r.data == b"Dummy server!"
def test_cross_host_redirect(self):
with PoolManager() as http:
cross_host_location = "%s/echo?a=b" % self.base_url_alt
with pytest.raises(MaxRetryError):
http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": cross_host_location},
timeout=LONG_TIMEOUT,
retries=0,
)
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/echo?a=b" % self.base_url_alt},
timeout=LONG_TIMEOUT,
retries=1,
)
assert r._pool.host == self.host_alt
def test_too_many_redirects(self):
with PoolManager() as http:
with pytest.raises(MaxRetryError):
http.request(
"GET",
"%s/redirect" % self.base_url,
fields={
"target": "%s/redirect?target=%s/"
% (self.base_url, self.base_url)
},
retries=1,
)
with pytest.raises(MaxRetryError):
http.request(
"GET",
"%s/redirect" % self.base_url,
fields={
"target": "%s/redirect?target=%s/"
% (self.base_url, self.base_url)
},
retries=Retry(total=None, redirect=1),
)
def test_redirect_cross_host_remove_headers(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/headers" % self.base_url_alt},
headers={"Authorization": "foo"},
)
assert r.status == 200
data = json.loads(r.data.decode("utf-8"))
assert "Authorization" not in data
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/headers" % self.base_url_alt},
headers={"authorization": "foo"},
)
assert r.status == 200
data = json.loads(r.data.decode("utf-8"))
assert "authorization" not in data
assert "Authorization" not in data
def test_redirect_cross_host_no_remove_headers(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/headers" % self.base_url_alt},
headers={"Authorization": "foo"},
retries=Retry(remove_headers_on_redirect=[]),
)
assert r.status == 200
data = json.loads(r.data.decode("utf-8"))
assert data["Authorization"] == "foo"
def test_redirect_cross_host_set_removed_headers(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/headers" % self.base_url_alt},
headers={"X-API-Secret": "foo", "Authorization": "bar"},
retries=Retry(remove_headers_on_redirect=["X-API-Secret"]),
)
assert r.status == 200
data = json.loads(r.data.decode("utf-8"))
assert "X-API-Secret" not in data
assert data["Authorization"] == "bar"
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "%s/headers" % self.base_url_alt},
headers={"x-api-secret": "foo", "authorization": "bar"},
retries=Retry(remove_headers_on_redirect=["X-API-Secret"]),
)
assert r.status == 200
data = json.loads(r.data.decode("utf-8"))
assert "x-api-secret" not in data
assert "X-API-Secret" not in data
assert data["Authorization"] == "bar"
def test_raise_on_redirect(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={
"target": "%s/redirect?target=%s/" % (self.base_url, self.base_url)
},
retries=Retry(total=None, redirect=1, raise_on_redirect=False),
)
assert r.status == 303
def test_raise_on_status(self):
with PoolManager() as http:
with pytest.raises(MaxRetryError):
r = http.request(
"GET",
"%s/status" % self.base_url,
fields={"status": "500 Internal Server Error"},
retries=Retry(total=1, status_forcelist=range(500, 600)),
)
with pytest.raises(MaxRetryError):
r = http.request(
"GET",
"%s/status" % self.base_url,
fields={"status": "500 Internal Server Error"},
retries=Retry(
total=1, status_forcelist=range(500, 600), raise_on_status=True
),
)
r = http.request(
"GET",
"%s/status" % self.base_url,
fields={"status": "500 Internal Server Error"},
retries=Retry(
total=1, status_forcelist=range(500, 600), raise_on_status=False
),
)
assert r.status == 500
def test_missing_port(self):
# Can a URL that lacks an explicit port like ':80' succeed, or
# will all such URLs fail with an error?
with PoolManager() as http:
# By globally adjusting `DEFAULT_PORTS` we pretend for a moment
# that HTTP's default port is not 80, but is the port at which
DEFAULT_PORTS["http"] = self.port
try:
r = http.request("GET", "http://%s/" % self.host, retries=0)
finally:
DEFAULT_PORTS["http"] = 80
assert r.status == 200
assert r.data == b"Dummy server!"
def test_headers(self):
with PoolManager(headers={"Foo": "bar"}) as http:
r = http.request("GET", "%s/headers" % self.base_url)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") == "bar"
r = http.request("POST", "%s/headers" % self.base_url)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") == "bar"
r = http.request_encode_url("GET", "%s/headers" % self.base_url)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") == "bar"
r = http.request_encode_body("POST", "%s/headers" % self.base_url)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") == "bar"
r = http.request_encode_url(
"GET", "%s/headers" % self.base_url, headers={"Baz": "quux"}
)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
r = http.request_encode_body(
"GET", "%s/headers" % self.base_url, headers={"Baz": "quux"}
)
returned_headers = json.loads(r.data.decode())
assert returned_headers.get("Foo") is None
assert returned_headers.get("Baz") == "quux"
def test_http_with_ssl_keywords(self):
with PoolManager(ca_certs="REQUIRED") as http:
r = http.request("GET", "http://%s:%s/" % (self.host, self.port))
assert r.status == 200
def test_http_with_ca_cert_dir(self):
with PoolManager(ca_certs="REQUIRED", ca_cert_dir="/nosuchdir") as http:
r = http.request("GET", "http://%s:%s/" % (self.host, self.port))
assert r.status == 200
def test_cleanup_on_connection_error(self):
poolsize = 3
with PoolManager(maxsize=poolsize, block=True) as http:
pool = http.connection_from_host(self.host, self.port)
assert pool.pool.qsize() == poolsize
# conn won't be implicitly returned to the pool.
url = "%s/redirect" % self.base_url
with pytest.raises(MaxRetryError):
http.request("GET", url, fields={"target": "/"}, retries=0)
r = http.request("GET", url, fields={"target": "/"}, retries=1)
r.release_conn()
assert pool.pool.qsize() == poolsize
class TestRetry(HTTPDummyServerTestCase):
@classmethod
def setup_class(self):
super(TestRetry, self).setup_class()
self.base_url = "http://%s:%d" % (self.host, self.port)
self.base_url_alt = "http://%s:%d" % (self.host_alt, self.port)
def test_max_retry(self):
with PoolManager() as http:
with pytest.raises(MaxRetryError):
http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "/"},
retries=0,
)
def test_disabled_retry(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "/"},
retries=False,
)
assert r.status == 303
r = http.request(
"GET",
"%s/redirect" % self.base_url,
fields={"target": "/"},
retries=Retry(redirect=False),
)
assert r.status == 303
with pytest.raises(NewConnectionError):
http.request(
"GET",
"http://thishostdoesnotexist.invalid/",
timeout=0.001,
retries=False,
)
def test_read_retries(self):
retry = Retry(read=1, status_forcelist=[418])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers={"test-name": "test_read_retries"},
retries=retry,
)
assert resp.status == 200
def test_read_total_retries(self):
headers = {"test-name": "test_read_total_retries"}
retry = Retry(total=1, status_forcelist=[418])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
)
assert resp.status == 200
def test_retries_wrong_whitelist(self):
retry = Retry(total=1, status_forcelist=[202])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers={"test-name": "test_wrong_whitelist"},
retries=retry,
)
assert resp.status == 418
def test_default_method_whitelist_retried(self):
retry = Retry(total=1, status_forcelist=[418])
with PoolManager() as http:
resp = http.request(
"OPTIONS",
"%s/successful_retry" % self.base_url,
headers={"test-name": "test_default_whitelist"},
retries=retry,
)
assert resp.status == 200
def test_retries_wrong_method_list(self):
headers = {"test-name": "test_wrong_method_whitelist"}
retry = Retry(total=1, status_forcelist=[418], method_whitelist=["POST"])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
)
assert resp.status == 418
def test_read_retries_unsuccessful(self):
headers = {"test-name": "test_read_retries_unsuccessful"}
with PoolManager() as http:
resp = http.request(
"GET", "%s/successful_retry" % self.base_url, headers=headers, retries=1
)
assert resp.status == 418
def test_retry_reuse_safe(self):
headers = {"test-name": "test_retry_safe"}
retry = Retry(total=1, status_forcelist=[418])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
)
assert resp.status == 200
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
)
assert resp.status == 200
def test_retry_return_in_response(self):
headers = {"test-name": "test_retry_return_in_response"}
retry = Retry(total=2, status_forcelist=[418])
with PoolManager() as http:
resp = http.request(
"GET",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
)
assert resp.status == 200
assert resp.retries.total == 1
assert resp.retries.history == (
RequestHistory("GET", "/successful_retry", None, 418, None),
)
def test_retry_redirect_history(self):
with PoolManager() as http:
resp = http.request(
"GET", "%s/redirect" % self.base_url, fields={"target": "/"}
)
assert resp.status == 200
assert resp.retries.history == (
RequestHistory(
"GET", self.base_url + "/redirect?target=%2F", None, 303, "/"
),
)
def test_multi_redirect_history(self):
with PoolManager() as http:
r = http.request(
"GET",
"%s/multi_redirect" % self.base_url,
fields={"redirect_codes": "303,302,200"},
redirect=False,
)
assert r.status == 303
assert r.retries.history == tuple()
r = http.request(
"GET",
"%s/multi_redirect" % self.base_url,
retries=10,
fields={"redirect_codes": "303,302,301,307,302,200"},
)
assert r.status == 200
assert r.data == b"Done redirecting"
expected = [
(303, "/multi_redirect?redirect_codes=302,301,307,302,200"),
(302, "/multi_redirect?redirect_codes=301,307,302,200"),
(301, "/multi_redirect?redirect_codes=307,302,200"),
(307, "/multi_redirect?redirect_codes=302,200"),
(302, "/multi_redirect?redirect_codes=200"),
]
actual = [
(history.status, history.redirect_location)
for history in r.retries.history
]
assert actual == expected
def test_redirect_put_file(self):
retry = Retry(total=3, status_forcelist=[418])
content_length = 65535
data = b"A" * content_length
uploaded_file = io.BytesIO(data)
headers = {
"test-name": "test_redirect_put_file",
"Content-Length": str(content_length),
}
url = "%s/redirect?target=/echo&status=307" % self.base_url
with PoolManager() as http:
resp = http.urlopen(
"PUT", url, headers=headers, retries=retry, body=uploaded_file
)
assert resp.status == 200
assert resp.data == data
class TestRetryAfter(HTTPDummyServerTestCase):
@classmethod
def setup_class(self):
super(TestRetryAfter, self).setup_class()
self.base_url = "http://%s:%d" % (self.host, self.port)
self.base_url_alt = "http://%s:%d" % (self.host_alt, self.port)
def test_retry_after(self):
url = "%s/retry_after" % self.base_url
with PoolManager() as http:
r = http.request(
"GET", url, fields={"status": "429 Too Many Requests"}, retries=False
)
r = http.request(
"GET", url, fields={"status": "429 Too Many Requests"}, retries=False
)
assert r.status == 429
r = http.request(
"GET", url, fields={"status": "429 Too Many Requests"}, retries=True
)
assert r.status == 200
r = http.request(
"GET", url, fields={"status": "503 Service Unavailable"}, retries=False
)
r = http.request(
"GET", url, fields={"status": "503 Service Unavailable"}, retries=False
)
assert r.status == 503
r = http.request(
"GET", url, fields={"status": "503 Service Unavailable"}, retries=True
)
assert r.status == 200
r = http.request(
"GET", url, fields={"status": "418 I'm a teapot"}, retries=True
)
assert r.status == 418
def test_redirect_after(self):
with PoolManager() as http:
r = http.request("GET", "%s/redirect_after" % self.base_url, retries=False)
assert r.status == 303
t = time.time()
r = http.request("GET", "%s/redirect_after" % self.base_url)
assert r.status == 200
delta = time.time() - t
assert delta >= 1
t = time.time()
timestamp = t + 2
r = http.request(
"GET", self.base_url + "/redirect_after?date=" + str(timestamp)
)
assert r.status == 200
delta = time.time() - t
assert delta >= 1
# Retry-After is past
t = time.time()
timestamp = t - 1
r = http.request(
"GET", self.base_url + "/redirect_after?date=" + str(timestamp)
)
delta = time.time() - t
assert r.status == 200
assert delta < 1
class TestFileBodiesOnRetryOrRedirect(HTTPDummyServerTestCase):
def setup_class(self):
super(TestFileBodiesOnRetryOrRedirect, self).setup_class()
self.base_url = "http://%s:%d" % (self.host, self.port)
self.base_url_alt = "http://%s:%d" % (self.host_alt, self.port)
def test_retries_put_filehandle(self):
retry = Retry(total=3, status_forcelist=[418])
# httplib reads in 8k chunks; use a larger content length
content_length = 65535
data = b"A" * content_length
uploaded_file = io.BytesIO(data)
headers = {
"test-name": "test_retries_put_filehandle",
"Content-Length": str(content_length),
}
with PoolManager() as http:
resp = http.urlopen(
"PUT",
"%s/successful_retry" % self.base_url,
headers=headers,
retries=retry,
body=uploaded_file,
redirect=False,
)
assert resp.status == 200
def test_redirect_with_failed_tell(self):
class BadTellObject(io.BytesIO):
def tell(self):
raise IOError
body = BadTellObject(b"the data")
url = "%s/redirect?target=/successful_retry" % self.base_url
# httplib uses fileno if Content-Length isn't supplied,
headers = {"Content-Length": "8"}
with PoolManager() as http:
with pytest.raises(UnrewindableBodyError) as e:
http.urlopen("PUT", url, headers=headers, body=body)
assert "Unable to record file position for" in str(e.value)
@pytest.mark.parametrize(
["target", "expected_target"],
[
("/echo_uri?q=1#fragment", b"/echo_uri?q=1"),
("/echo_uri?#", b"/echo_uri?"),
("/echo_uri#?", b"/echo_uri"),
("/echo_uri#?#", b"/echo_uri"),
("/echo_uri??#", b"/echo_uri??"),
("/echo_uri?%3f#", b"/echo_uri?%3F"),
("/echo_uri?%3F#", b"/echo_uri?%3F"),
("/echo_uri?[]", b"/echo_uri?%5B%5D"),
],
)
def test_encode_http_target(self, target, expected_target):
with PoolManager() as http:
url = "http://%s:%d%s" % (self.host, self.port, target)
r = http.request("GET", url)
assert r.data == expected_target
@pytest.mark.skipif(not HAS_IPV6, reason="IPv6 is not supported on this system")
class TestIPv6PoolManager(IPv6HTTPDummyServerTestCase):
@classmethod
def setup_class(cls):
super(TestIPv6PoolManager, cls).setup_class()
cls.base_url = "http://[%s]:%d" % (cls.host, cls.port)
def test_ipv6(self):
with PoolManager() as http:
http.request("GET", self.base_url)
| true
| true
|
f7187fe8f56ad4cf2d4f4dd6df4dd33406b5cf84
| 13,872
|
py
|
Python
|
tools/my_runner.py
|
ydiller/NoMoreNMS
|
1c1557357e5312c287f0971c840060deb1bcd039
|
[
"Apache-2.0"
] | null | null | null |
tools/my_runner.py
|
ydiller/NoMoreNMS
|
1c1557357e5312c287f0971c840060deb1bcd039
|
[
"Apache-2.0"
] | null | null | null |
tools/my_runner.py
|
ydiller/NoMoreNMS
|
1c1557357e5312c287f0971c840060deb1bcd039
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) OpenMMLab. All rights reserved.
import os.path as osp
import platform
import shutil
import time
import warnings
import torch
import mmcv
import wandb
from mmcv.runner.hooks import HOOKS, Hook
from mmcv.runner.base_runner import BaseRunner
from mmcv.runner.builder import RUNNERS
from mmcv.runner.checkpoint import save_checkpoint
from mmcv.runner.utils import get_host_info
import copy
import logging
import os.path as osp
import warnings
from abc import ABCMeta, abstractmethod
import torch
from torch.optim import Optimizer
import mmcv
from mmcv.parallel import is_module_wrapper
from mmcv.runner.checkpoint import load_checkpoint
from mmcv.runner.dist_utils import get_dist_info
from mmcv.runner.hooks import HOOKS, Hook
from mmcv.runner.log_buffer import LogBuffer
from mmcv.runner.priority import Priority, get_priority
from mmcv.runner.utils import get_time_str
@RUNNERS.register_module()
class MyRunner(BaseRunner):
"""Epoch-based Runner.
This runner train models epoch by epoch.
"""
def __init__(self,
model,
batch_processor=None,
optimizer=None,
work_dir=None,
logger=None,
meta=None,
max_iters=None,
max_epochs=None,
with_wandb=None):
if batch_processor is not None:
if not callable(batch_processor):
raise TypeError('batch_processor must be callable, '
f'but got {type(batch_processor)}')
warnings.warn(
'batch_processor is deprecated, please implement '
'train_step() and val_step() in the model instead.',
DeprecationWarning)
# raise an error is `batch_processor` is not None and
# `model.train_step()` exists.
if is_module_wrapper(model):
_model = model.module
else:
_model = model
if hasattr(_model, 'train_step') or hasattr(_model, 'val_step'):
raise RuntimeError(
'batch_processor and model.train_step()/model.val_step() '
'cannot be both available.')
else:
assert hasattr(model, 'train_step')
# check the type of `optimizer`
if isinstance(optimizer, dict):
for name, optim in optimizer.items():
if not isinstance(optim, Optimizer):
raise TypeError(
f'optimizer must be a dict of torch.optim.Optimizers, '
f'but optimizer["{name}"] is a {type(optim)}')
elif not isinstance(optimizer, Optimizer) and optimizer is not None:
raise TypeError(
f'optimizer must be a torch.optim.Optimizer object '
f'or dict or None, but got {type(optimizer)}')
# check the type of `logger`
if not isinstance(logger, logging.Logger):
raise TypeError(f'logger must be a logging.Logger object, '
f'but got {type(logger)}')
# check the type of `meta`
if meta is not None and not isinstance(meta, dict):
raise TypeError(
f'meta must be a dict or None, but got {type(meta)}')
self.model = model
self.batch_processor = batch_processor
self.optimizer = optimizer
self.logger = logger
self.meta = meta
self.with_wandb = with_wandb
# create work_dir
if mmcv.is_str(work_dir):
self.work_dir = osp.abspath(work_dir)
mmcv.mkdir_or_exist(self.work_dir)
elif work_dir is None:
self.work_dir = None
else:
raise TypeError('"work_dir" must be a str or None')
# get model name from the model class
if hasattr(self.model, 'module'):
self._model_name = self.model.module.__class__.__name__
else:
self._model_name = self.model.__class__.__name__
self._rank, self._world_size = get_dist_info()
self.timestamp = get_time_str()
self.mode = None
self._hooks = []
self._epoch = 0
self._iter = 0
self._inner_iter = 0
if max_epochs is not None and max_iters is not None:
raise ValueError(
'Only one of `max_epochs` or `max_iters` can be set.')
self._max_epochs = max_epochs
self._max_iters = max_iters
# TODO: Redesign LogBuffer, it is not flexible and elegant enough
self.log_buffer = LogBuffer()
def register_optimizer_hook(self, optimizer_config):
if optimizer_config is None:
return
if isinstance(optimizer_config, dict):
optimizer_config.setdefault('type', 'MyHook')
hook = mmcv.build_from_cfg(optimizer_config, HOOKS)
else:
hook = optimizer_config
self.register_hook(hook, priority='ABOVE_NORMAL')
def run_iter(self, data_batch, train_mode, **kwargs):
if self.batch_processor is not None:
outputs = self.batch_processor(
self.model, data_batch, train_mode=train_mode, **kwargs)
elif train_mode:
outputs = self.model.train_step(data_batch, self.optimizer,
**kwargs)
else:
outputs = self.model.val_step(data_batch, self.optimizer, **kwargs)
if not isinstance(outputs, dict):
raise TypeError('"batch_processor()" or "model.train_step()"'
'and "model.val_step()" must return a dict')
if 'log_vars' in outputs:
self.log_buffer.update(outputs['log_vars'], outputs['num_samples'])
self.outputs = outputs
def train(self, data_loader, **kwargs):
self.model.train()
self.mode = 'train'
self.data_loader = data_loader
self._max_iters = self._max_epochs * len(self.data_loader)
self.call_hook('before_train_epoch')
time.sleep(2) # Prevent possible deadlock during epoch transition
for i, data_batch in enumerate(self.data_loader):
self._inner_iter = i
self.call_hook('before_train_iter')
self.run_iter(data_batch, train_mode=True, **kwargs)
self.call_hook('after_train_iter')
self._iter += 1
self.call_hook('after_train_epoch')
self._epoch += 1
@torch.no_grad()
def val(self, data_loader, **kwargs):
self.model.eval()
self.mode = 'val'
self.data_loader = data_loader
self.call_hook('before_val_epoch')
time.sleep(2) # Prevent possible deadlock during epoch transition
for i, data_batch in enumerate(self.data_loader):
self._inner_iter = i
self.call_hook('before_val_iter')
self.run_iter(data_batch, train_mode=False)
self.call_hook('after_val_iter')
self.call_hook('after_val_epoch')
if torch.distributed.is_initialized():
if torch.distributed.get_rank() == 0:
if self.with_wandb:
wandb.log({"CE val loss": sum(self.log_buffer.val_history['loss_deepsets_ce'])/
len(self.log_buffer.val_history['loss_deepsets_ce']),
"val ds_acc": sum(self.log_buffer.val_history['ds_acc'])/
len(self.log_buffer.val_history['ds_acc']),
"val iou_error": sum(self.log_buffer.val_history['iou_error'])/len(self.log_buffer.val_history['iou_error']),
"val max score predictions": sum(self.log_buffer.val_history['ds_pred_on_max'])/
len(self.log_buffer.val_history['ds_pred_on_max'])
})
else: # single gpu
if self.with_wandb:
wandb.log({"CE val loss": sum(self.log_buffer.val_history['loss_deepsets_ce']) /
len(self.log_buffer.val_history['loss_deepsets_ce']),
"val ds_acc": sum(self.log_buffer.val_history['ds_acc']) /
len(self.log_buffer.val_history['ds_acc']),
"val iou_error": sum(self.log_buffer.val_history['iou_error']) / len(
self.log_buffer.val_history['iou_error']),
"val max score predictions": sum(self.log_buffer.val_history['ds_pred_on_max']) /
len(self.log_buffer.val_history['ds_pred_on_max'])})
def run(self, data_loaders, workflow, max_epochs=None, **kwargs):
"""Start running.
Args:
data_loaders (list[:obj:`DataLoader`]): Dataloaders for training
and validation.
workflow (list[tuple]): A list of (phase, epochs) to specify the
running order and epochs. E.g, [('train', 2), ('val', 1)] means
running 2 epochs for training and 1 epoch for validation,
iteratively.
"""
assert isinstance(data_loaders, list)
assert mmcv.is_list_of(workflow, tuple)
assert len(data_loaders) == len(workflow)
if max_epochs is not None:
warnings.warn(
'setting max_epochs in run is deprecated, '
'please set max_epochs in runner_config', DeprecationWarning)
self._max_epochs = max_epochs
assert self._max_epochs is not None, (
'max_epochs must be specified during instantiation')
for i, flow in enumerate(workflow):
mode, epochs = flow
if mode == 'train':
self._max_iters = self._max_epochs * len(data_loaders[i])
break
work_dir = self.work_dir if self.work_dir is not None else 'NONE'
self.logger.info('Start running, host: %s, work_dir: %s',
get_host_info(), work_dir)
self.logger.info('Hooks will be executed in the following order:\n%s',
self.get_hook_info())
self.logger.info('workflow: %s, max: %d epochs', workflow,
self._max_epochs)
self.call_hook('before_run')
while self.epoch < self._max_epochs:
for i, flow in enumerate(workflow):
mode, epochs = flow
if isinstance(mode, str): # self.train()
if not hasattr(self, mode):
raise ValueError(
f'runner has no method named "{mode}" to run an '
'epoch')
epoch_runner = getattr(self, mode)
else:
raise TypeError(
'mode in workflow must be a str, but got {}'.format(
type(mode)))
for _ in range(epochs):
if mode == 'train' and self.epoch >= self._max_epochs:
break
epoch_runner(data_loaders[i], **kwargs)
time.sleep(1) # wait for some hooks like loggers to finish
self.call_hook('after_run')
def save_checkpoint(self,
out_dir,
filename_tmpl='end2end_epoch_{}.pth',
save_optimizer=True,
meta=None,
create_symlink=True):
"""Save the checkpoint.
Args:
out_dir (str): The directory that checkpoints are saved.
filename_tmpl (str, optional): The checkpoint filename template,
which contains a placeholder for the epoch number.
Defaults to 'epoch_{}.pth'.
save_optimizer (bool, optional): Whether to save the optimizer to
the checkpoint. Defaults to True.
meta (dict, optional): The meta information to be saved in the
checkpoint. Defaults to None.
create_symlink (bool, optional): Whether to create a symlink
"latest.pth" to point to the latest checkpoint.
Defaults to True.
"""
if meta is None:
meta = {}
elif not isinstance(meta, dict):
raise TypeError(
f'meta should be a dict or None, but got {type(meta)}')
if self.meta is not None:
meta.update(self.meta)
# Note: meta.update(self.meta) should be done before
# meta.update(epoch=self.epoch + 1, iter=self.iter) otherwise
# there will be problems with resumed checkpoints.
# More details in https://github.com/open-mmlab/mmcv/pull/1108
meta.update(epoch=self.epoch + 1, iter=self.iter)
filename = filename_tmpl.format(self.epoch + 1)
filepath = osp.join(out_dir, filename)
optimizer = self.optimizer if save_optimizer else None
save_checkpoint(self.model, filepath, optimizer=optimizer, meta=meta)
# in some environments, `os.symlink` is not supported, you may need to
# set `create_symlink` to False
if create_symlink:
dst_file = osp.join(out_dir, 'latest.pth')
if platform.system() != 'Windows':
mmcv.symlink(filename, dst_file)
else:
shutil.copy(filepath, dst_file)
# @RUNNERS.register_module()
# class Runner(MyRunner):
# """Deprecated name of EpochBasedRunner."""
#
# def __init__(self, *args, **kwargs):
# warnings.warn(
# 'Runner was deprecated, please use EpochBasedRunner instead',
# DeprecationWarning)
# super().__init__(*args, **kwargs)
| 42.292683
| 140
| 0.575115
|
import os.path as osp
import platform
import shutil
import time
import warnings
import torch
import mmcv
import wandb
from mmcv.runner.hooks import HOOKS, Hook
from mmcv.runner.base_runner import BaseRunner
from mmcv.runner.builder import RUNNERS
from mmcv.runner.checkpoint import save_checkpoint
from mmcv.runner.utils import get_host_info
import copy
import logging
import os.path as osp
import warnings
from abc import ABCMeta, abstractmethod
import torch
from torch.optim import Optimizer
import mmcv
from mmcv.parallel import is_module_wrapper
from mmcv.runner.checkpoint import load_checkpoint
from mmcv.runner.dist_utils import get_dist_info
from mmcv.runner.hooks import HOOKS, Hook
from mmcv.runner.log_buffer import LogBuffer
from mmcv.runner.priority import Priority, get_priority
from mmcv.runner.utils import get_time_str
@RUNNERS.register_module()
class MyRunner(BaseRunner):
def __init__(self,
model,
batch_processor=None,
optimizer=None,
work_dir=None,
logger=None,
meta=None,
max_iters=None,
max_epochs=None,
with_wandb=None):
if batch_processor is not None:
if not callable(batch_processor):
raise TypeError('batch_processor must be callable, '
f'but got {type(batch_processor)}')
warnings.warn(
'batch_processor is deprecated, please implement '
'train_step() and val_step() in the model instead.',
DeprecationWarning)
if is_module_wrapper(model):
_model = model.module
else:
_model = model
if hasattr(_model, 'train_step') or hasattr(_model, 'val_step'):
raise RuntimeError(
'batch_processor and model.train_step()/model.val_step() '
'cannot be both available.')
else:
assert hasattr(model, 'train_step')
if isinstance(optimizer, dict):
for name, optim in optimizer.items():
if not isinstance(optim, Optimizer):
raise TypeError(
f'optimizer must be a dict of torch.optim.Optimizers, '
f'but optimizer["{name}"] is a {type(optim)}')
elif not isinstance(optimizer, Optimizer) and optimizer is not None:
raise TypeError(
f'optimizer must be a torch.optim.Optimizer object '
f'or dict or None, but got {type(optimizer)}')
if not isinstance(logger, logging.Logger):
raise TypeError(f'logger must be a logging.Logger object, '
f'but got {type(logger)}')
if meta is not None and not isinstance(meta, dict):
raise TypeError(
f'meta must be a dict or None, but got {type(meta)}')
self.model = model
self.batch_processor = batch_processor
self.optimizer = optimizer
self.logger = logger
self.meta = meta
self.with_wandb = with_wandb
if mmcv.is_str(work_dir):
self.work_dir = osp.abspath(work_dir)
mmcv.mkdir_or_exist(self.work_dir)
elif work_dir is None:
self.work_dir = None
else:
raise TypeError('"work_dir" must be a str or None')
if hasattr(self.model, 'module'):
self._model_name = self.model.module.__class__.__name__
else:
self._model_name = self.model.__class__.__name__
self._rank, self._world_size = get_dist_info()
self.timestamp = get_time_str()
self.mode = None
self._hooks = []
self._epoch = 0
self._iter = 0
self._inner_iter = 0
if max_epochs is not None and max_iters is not None:
raise ValueError(
'Only one of `max_epochs` or `max_iters` can be set.')
self._max_epochs = max_epochs
self._max_iters = max_iters
self.log_buffer = LogBuffer()
def register_optimizer_hook(self, optimizer_config):
if optimizer_config is None:
return
if isinstance(optimizer_config, dict):
optimizer_config.setdefault('type', 'MyHook')
hook = mmcv.build_from_cfg(optimizer_config, HOOKS)
else:
hook = optimizer_config
self.register_hook(hook, priority='ABOVE_NORMAL')
def run_iter(self, data_batch, train_mode, **kwargs):
if self.batch_processor is not None:
outputs = self.batch_processor(
self.model, data_batch, train_mode=train_mode, **kwargs)
elif train_mode:
outputs = self.model.train_step(data_batch, self.optimizer,
**kwargs)
else:
outputs = self.model.val_step(data_batch, self.optimizer, **kwargs)
if not isinstance(outputs, dict):
raise TypeError('"batch_processor()" or "model.train_step()"'
'and "model.val_step()" must return a dict')
if 'log_vars' in outputs:
self.log_buffer.update(outputs['log_vars'], outputs['num_samples'])
self.outputs = outputs
def train(self, data_loader, **kwargs):
self.model.train()
self.mode = 'train'
self.data_loader = data_loader
self._max_iters = self._max_epochs * len(self.data_loader)
self.call_hook('before_train_epoch')
time.sleep(2)
for i, data_batch in enumerate(self.data_loader):
self._inner_iter = i
self.call_hook('before_train_iter')
self.run_iter(data_batch, train_mode=True, **kwargs)
self.call_hook('after_train_iter')
self._iter += 1
self.call_hook('after_train_epoch')
self._epoch += 1
@torch.no_grad()
def val(self, data_loader, **kwargs):
self.model.eval()
self.mode = 'val'
self.data_loader = data_loader
self.call_hook('before_val_epoch')
time.sleep(2)
for i, data_batch in enumerate(self.data_loader):
self._inner_iter = i
self.call_hook('before_val_iter')
self.run_iter(data_batch, train_mode=False)
self.call_hook('after_val_iter')
self.call_hook('after_val_epoch')
if torch.distributed.is_initialized():
if torch.distributed.get_rank() == 0:
if self.with_wandb:
wandb.log({"CE val loss": sum(self.log_buffer.val_history['loss_deepsets_ce'])/
len(self.log_buffer.val_history['loss_deepsets_ce']),
"val ds_acc": sum(self.log_buffer.val_history['ds_acc'])/
len(self.log_buffer.val_history['ds_acc']),
"val iou_error": sum(self.log_buffer.val_history['iou_error'])/len(self.log_buffer.val_history['iou_error']),
"val max score predictions": sum(self.log_buffer.val_history['ds_pred_on_max'])/
len(self.log_buffer.val_history['ds_pred_on_max'])
})
else:
if self.with_wandb:
wandb.log({"CE val loss": sum(self.log_buffer.val_history['loss_deepsets_ce']) /
len(self.log_buffer.val_history['loss_deepsets_ce']),
"val ds_acc": sum(self.log_buffer.val_history['ds_acc']) /
len(self.log_buffer.val_history['ds_acc']),
"val iou_error": sum(self.log_buffer.val_history['iou_error']) / len(
self.log_buffer.val_history['iou_error']),
"val max score predictions": sum(self.log_buffer.val_history['ds_pred_on_max']) /
len(self.log_buffer.val_history['ds_pred_on_max'])})
def run(self, data_loaders, workflow, max_epochs=None, **kwargs):
assert isinstance(data_loaders, list)
assert mmcv.is_list_of(workflow, tuple)
assert len(data_loaders) == len(workflow)
if max_epochs is not None:
warnings.warn(
'setting max_epochs in run is deprecated, '
'please set max_epochs in runner_config', DeprecationWarning)
self._max_epochs = max_epochs
assert self._max_epochs is not None, (
'max_epochs must be specified during instantiation')
for i, flow in enumerate(workflow):
mode, epochs = flow
if mode == 'train':
self._max_iters = self._max_epochs * len(data_loaders[i])
break
work_dir = self.work_dir if self.work_dir is not None else 'NONE'
self.logger.info('Start running, host: %s, work_dir: %s',
get_host_info(), work_dir)
self.logger.info('Hooks will be executed in the following order:\n%s',
self.get_hook_info())
self.logger.info('workflow: %s, max: %d epochs', workflow,
self._max_epochs)
self.call_hook('before_run')
while self.epoch < self._max_epochs:
for i, flow in enumerate(workflow):
mode, epochs = flow
if isinstance(mode, str):
if not hasattr(self, mode):
raise ValueError(
f'runner has no method named "{mode}" to run an '
'epoch')
epoch_runner = getattr(self, mode)
else:
raise TypeError(
'mode in workflow must be a str, but got {}'.format(
type(mode)))
for _ in range(epochs):
if mode == 'train' and self.epoch >= self._max_epochs:
break
epoch_runner(data_loaders[i], **kwargs)
time.sleep(1)
self.call_hook('after_run')
def save_checkpoint(self,
out_dir,
filename_tmpl='end2end_epoch_{}.pth',
save_optimizer=True,
meta=None,
create_symlink=True):
if meta is None:
meta = {}
elif not isinstance(meta, dict):
raise TypeError(
f'meta should be a dict or None, but got {type(meta)}')
if self.meta is not None:
meta.update(self.meta)
meta.update(epoch=self.epoch + 1, iter=self.iter)
filename = filename_tmpl.format(self.epoch + 1)
filepath = osp.join(out_dir, filename)
optimizer = self.optimizer if save_optimizer else None
save_checkpoint(self.model, filepath, optimizer=optimizer, meta=meta)
if create_symlink:
dst_file = osp.join(out_dir, 'latest.pth')
if platform.system() != 'Windows':
mmcv.symlink(filename, dst_file)
else:
shutil.copy(filepath, dst_file)
| true
| true
|
f7187ff435949165d6ffa9b6741462e446524819
| 6,374
|
py
|
Python
|
data_process/kdtree.py
|
MortonWang/geo_IF
|
4e27aeb9e005cdfb151777bc730de6d8372d1b7f
|
[
"MIT"
] | 5
|
2020-06-19T13:39:59.000Z
|
2022-03-04T13:05:58.000Z
|
data_process/kdtree.py
|
MortonWang/geo_IF
|
4e27aeb9e005cdfb151777bc730de6d8372d1b7f
|
[
"MIT"
] | null | null | null |
data_process/kdtree.py
|
MortonWang/geo_IF
|
4e27aeb9e005cdfb151777bc730de6d8372d1b7f
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
import copy
import numpy as np
from scipy._lib.six import xrange
class KDTree:
def __init__(self, bucket_size, dimensions, parent=None):
self.bucket_size = bucket_size
self.parent = None
self.left = None
self.right = None
self.split_dimension = None
self.split_value = None
self.index_locations = []
self.location_count = 0
self.min_limit = [np.Inf] * dimensions
self.max_limit = [-np.Inf] * dimensions
self.dimensions = dimensions
def get_leaf(self, location):
if not self.left and not self.right:
return self
elif location[self.split_dimension] <= self.split_value:
return self.left.get_leaf(location)
else:
return self.right.get_leaf(location)
def add_point(self, index_location_tuple):
self.index_locations.append(index_location_tuple)
self.location_count += 1
self.extendBounds(index_location_tuple[1])
self.min_boundary = copy.deepcopy(self.min_limit)
self.max_boundary = copy.deepcopy(self.max_limit)
def extendBounds(self, location):
# empty
if self.min_limit == None:
self.min_limit = copy.deepcopy(location)
self.max_limit = copy.deepcopy(location)
return
for i in xrange(self.dimensions):
self.min_limit[i] = min(self.min_limit[i], location[i])
self.max_limit[i] = max(self.max_limit[i], location[i])
def findWidestAxis(self):
widths = [self.max_limit[i] - self.min_limit[i] for i in range(self.dimensions)]
widest_axis = np.argmax(widths)
return widest_axis
def getNodes(self):
nodes = []
self.getNodesHelper(nodes)
return nodes
def getNodesHelper(self, nodes):
nodes.append(self)
if self.left:
self.left.getNodesHelper(nodes)
if self.right:
self.right.getNodesHelper(nodes)
def getLeaves(self):
leaves = []
self.getLeavesHelper(leaves)
return leaves
def getLeavesHelper(self, leaves):
if not self.right and not self.left:
leaves.append(self)
else:
if self.left:
self.left.getLeavesHelper(leaves)
if self.right:
self.right.getLeavesHelper(leaves)
def balance(self):
self.nodeSplit(self)
def nodeSplit(self, cursor, empty_non_leaf=True):
if cursor.location_count > cursor.bucket_size:
cursor.split_dimension = cursor.findWidestAxis()
# the partition method is the median of all values in the widest dimension
cursor.split_value = np.median([cursor.index_locations[i][1][cursor.split_dimension] for i in range(cursor.location_count)])
# if width is 0 (all the values are the same) don't partition
if cursor.min_limit[cursor.split_dimension] == cursor.max_limit[cursor.split_dimension]:
return
# Don't let the split value be the same as the upper value as
# can happen due to rounding errors!
if cursor.split_value == cursor.max_limit[cursor.split_dimension]:
cursor.split_value = cursor.min_limit[cursor.split_dimension]
cursor.left = KDTree(bucket_size=cursor.bucket_size, dimensions=cursor.dimensions, parent=cursor)
cursor.right = KDTree(bucket_size=cursor.bucket_size, dimensions=cursor.dimensions, parent=cursor)
cursor.left.min_boundary = copy.deepcopy(cursor.min_boundary)
cursor.left.max_boundary = copy.deepcopy(cursor.max_boundary)
cursor.right.min_boundary = copy.deepcopy(cursor.min_boundary)
cursor.right.max_boundary = copy.deepcopy(cursor.max_boundary)
cursor.left.max_boundary[cursor.split_dimension] = cursor.split_value
cursor.right.min_boundary[cursor.split_dimension] = cursor.split_value
for index_loc in cursor.index_locations:
if index_loc[1][cursor.split_dimension] > cursor.split_value:
cursor.right.index_locations.append(index_loc)
cursor.right.location_count += 1
cursor.right.extendBounds(index_loc[1])
else:
cursor.left.index_locations.append(index_loc)
cursor.left.location_count += 1
cursor.left.extendBounds(index_loc[1])
if empty_non_leaf:
cursor.index_locations = []
cursor.nodeSplit(cursor.left)
cursor.nodeSplit(cursor.right)
class KDTreeClustering:
def __init__(self, bucket_size=10):
self.bucket_size = bucket_size
self.is_fitted = False
def fit(self, X):
# X is an array
if hasattr(X, 'shape'):
n_samples = X.shape[0]
dimensions = X.shape[1]
else:
n_samples = len(X)
dimensions = len(X[0])
self.kdtree = KDTree(bucket_size=self.bucket_size, dimensions=dimensions, parent=None)
for i in xrange(n_samples):
self.kdtree.add_point((i, X[i]))
self.kdtree.nodeSplit(cursor=self.kdtree, empty_non_leaf=True)
self.clusters = [leave.index_locations for leave in self.kdtree.getLeaves()]
clusters = [cluster.index_locations for cluster in self.kdtree.getLeaves()]
results = np.zeros((n_samples,), dtype=int)
for i, id_locs in enumerate(clusters):
for id, l in id_locs:
results[id] = i
self.clusters = results
self.num_clusters = len(clusters)
self.is_fitted = True
def get_clusters(self):
if self.is_fitted:
return self.clusters
if __name__ == '__main__':
# tree = KDTree(300, 2)
import params
import geolocate
geolocate.initialize(granularity=params.BUCKET_SIZE, write=False, readText=True, reload_init=False, regression=False)
locations = [geolocate.locationStr2Float(loc) for loc in params.trainUsers.values()]
clusterer = KDTreeClustering(bucket_size=params.BUCKET_SIZE)
clusterer.fit(locations)
clusters = clusterer.get_clusters()
| 39.8375
| 136
| 0.623784
|
import copy
import numpy as np
from scipy._lib.six import xrange
class KDTree:
def __init__(self, bucket_size, dimensions, parent=None):
self.bucket_size = bucket_size
self.parent = None
self.left = None
self.right = None
self.split_dimension = None
self.split_value = None
self.index_locations = []
self.location_count = 0
self.min_limit = [np.Inf] * dimensions
self.max_limit = [-np.Inf] * dimensions
self.dimensions = dimensions
def get_leaf(self, location):
if not self.left and not self.right:
return self
elif location[self.split_dimension] <= self.split_value:
return self.left.get_leaf(location)
else:
return self.right.get_leaf(location)
def add_point(self, index_location_tuple):
self.index_locations.append(index_location_tuple)
self.location_count += 1
self.extendBounds(index_location_tuple[1])
self.min_boundary = copy.deepcopy(self.min_limit)
self.max_boundary = copy.deepcopy(self.max_limit)
def extendBounds(self, location):
if self.min_limit == None:
self.min_limit = copy.deepcopy(location)
self.max_limit = copy.deepcopy(location)
return
for i in xrange(self.dimensions):
self.min_limit[i] = min(self.min_limit[i], location[i])
self.max_limit[i] = max(self.max_limit[i], location[i])
def findWidestAxis(self):
widths = [self.max_limit[i] - self.min_limit[i] for i in range(self.dimensions)]
widest_axis = np.argmax(widths)
return widest_axis
def getNodes(self):
nodes = []
self.getNodesHelper(nodes)
return nodes
def getNodesHelper(self, nodes):
nodes.append(self)
if self.left:
self.left.getNodesHelper(nodes)
if self.right:
self.right.getNodesHelper(nodes)
def getLeaves(self):
leaves = []
self.getLeavesHelper(leaves)
return leaves
def getLeavesHelper(self, leaves):
if not self.right and not self.left:
leaves.append(self)
else:
if self.left:
self.left.getLeavesHelper(leaves)
if self.right:
self.right.getLeavesHelper(leaves)
def balance(self):
self.nodeSplit(self)
def nodeSplit(self, cursor, empty_non_leaf=True):
if cursor.location_count > cursor.bucket_size:
cursor.split_dimension = cursor.findWidestAxis()
cursor.split_value = np.median([cursor.index_locations[i][1][cursor.split_dimension] for i in range(cursor.location_count)])
if cursor.min_limit[cursor.split_dimension] == cursor.max_limit[cursor.split_dimension]:
return
# Don't let the split value be the same as the upper value as
if cursor.split_value == cursor.max_limit[cursor.split_dimension]:
cursor.split_value = cursor.min_limit[cursor.split_dimension]
cursor.left = KDTree(bucket_size=cursor.bucket_size, dimensions=cursor.dimensions, parent=cursor)
cursor.right = KDTree(bucket_size=cursor.bucket_size, dimensions=cursor.dimensions, parent=cursor)
cursor.left.min_boundary = copy.deepcopy(cursor.min_boundary)
cursor.left.max_boundary = copy.deepcopy(cursor.max_boundary)
cursor.right.min_boundary = copy.deepcopy(cursor.min_boundary)
cursor.right.max_boundary = copy.deepcopy(cursor.max_boundary)
cursor.left.max_boundary[cursor.split_dimension] = cursor.split_value
cursor.right.min_boundary[cursor.split_dimension] = cursor.split_value
for index_loc in cursor.index_locations:
if index_loc[1][cursor.split_dimension] > cursor.split_value:
cursor.right.index_locations.append(index_loc)
cursor.right.location_count += 1
cursor.right.extendBounds(index_loc[1])
else:
cursor.left.index_locations.append(index_loc)
cursor.left.location_count += 1
cursor.left.extendBounds(index_loc[1])
if empty_non_leaf:
cursor.index_locations = []
cursor.nodeSplit(cursor.left)
cursor.nodeSplit(cursor.right)
class KDTreeClustering:
def __init__(self, bucket_size=10):
self.bucket_size = bucket_size
self.is_fitted = False
def fit(self, X):
if hasattr(X, 'shape'):
n_samples = X.shape[0]
dimensions = X.shape[1]
else:
n_samples = len(X)
dimensions = len(X[0])
self.kdtree = KDTree(bucket_size=self.bucket_size, dimensions=dimensions, parent=None)
for i in xrange(n_samples):
self.kdtree.add_point((i, X[i]))
self.kdtree.nodeSplit(cursor=self.kdtree, empty_non_leaf=True)
self.clusters = [leave.index_locations for leave in self.kdtree.getLeaves()]
clusters = [cluster.index_locations for cluster in self.kdtree.getLeaves()]
results = np.zeros((n_samples,), dtype=int)
for i, id_locs in enumerate(clusters):
for id, l in id_locs:
results[id] = i
self.clusters = results
self.num_clusters = len(clusters)
self.is_fitted = True
def get_clusters(self):
if self.is_fitted:
return self.clusters
if __name__ == '__main__':
import params
import geolocate
geolocate.initialize(granularity=params.BUCKET_SIZE, write=False, readText=True, reload_init=False, regression=False)
locations = [geolocate.locationStr2Float(loc) for loc in params.trainUsers.values()]
clusterer = KDTreeClustering(bucket_size=params.BUCKET_SIZE)
clusterer.fit(locations)
clusters = clusterer.get_clusters()
| true
| true
|
f7188051fe659ac1411c3c3c3d773672836caf24
| 13,881
|
py
|
Python
|
tensorflow_probability/python/distributions/student_t_process_regression_model_test.py
|
jakee417/probability-1
|
ae7117f37ac441bc7a888167ea23e5e620c5bcde
|
[
"Apache-2.0"
] | 3,670
|
2018-02-14T03:29:40.000Z
|
2022-03-30T01:19:52.000Z
|
tensorflow_probability/python/distributions/student_t_process_regression_model_test.py
|
jakee417/probability-1
|
ae7117f37ac441bc7a888167ea23e5e620c5bcde
|
[
"Apache-2.0"
] | 1,395
|
2018-02-24T02:28:49.000Z
|
2022-03-31T16:12:06.000Z
|
tensorflow_probability/python/distributions/student_t_process_regression_model_test.py
|
jakee417/probability-1
|
ae7117f37ac441bc7a888167ea23e5e620c5bcde
|
[
"Apache-2.0"
] | 1,135
|
2018-02-14T01:51:10.000Z
|
2022-03-28T02:24:11.000Z
|
# Copyright 2021 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Dependency imports
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import distributions as tfd
from tensorflow_probability.python.internal import test_util
from tensorflow_probability.python.math import psd_kernels
@test_util.test_all_tf_execution_regimes
class StudentTProcessRegressionModelTest(test_util.TestCase):
def testInstantiate(self):
df = np.float64(1.)
# 5x5 grid of index points in R^2 and flatten to 25x2
index_points = np.linspace(-4., 4., 5, dtype=np.float64)
index_points = np.stack(np.meshgrid(index_points, index_points), axis=-1)
index_points = np.reshape(index_points, [-1, 2])
# ==> shape = [25, 2]
# Kernel with batch_shape [2, 4, 1, 3]
amplitude = np.array([1., 2.], np.float64).reshape([2, 1, 1, 1])
length_scale = np.array([.1, .2, .3, .4], np.float64).reshape(
[1, 4, 1, 1])
observation_noise_variance = np.array(
[1e-5, 1e-6, 1e-9], np.float64).reshape([1, 1, 1, 3])
observation_index_points = (
np.random.uniform(-1., 1., (3, 7, 2)).astype(np.float64))
observations = np.random.uniform(-1., 1., (3, 7)).astype(np.float64)
def cholesky_fn(x):
return tf.linalg.cholesky(
tf.linalg.set_diag(x, tf.linalg.diag_part(x) + 1.))
kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)
stprm = tfd.StudentTProcessRegressionModel(
df=df,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance,
cholesky_fn=cholesky_fn)
batch_shape = [2, 4, 1, 3]
event_shape = [25]
sample_shape = [7, 2]
print(stprm.batch_shape)
print(stprm.kernel.batch_shape)
print(stprm.kernel.schur_complement.batch_shape)
print(stprm.kernel.schur_complement.base_kernel.batch_shape)
self.assertIs(cholesky_fn, stprm.cholesky_fn)
samples = stprm.sample(sample_shape, seed=test_util.test_seed())
self.assertAllEqual(stprm.batch_shape_tensor(), batch_shape)
self.assertAllEqual(stprm.event_shape_tensor(), event_shape)
self.assertAllEqual(self.evaluate(samples).shape,
sample_shape + batch_shape + event_shape)
def testMeanSameAsGPRM(self):
df = np.float64(3.)
index_points = np.linspace(-4., 4., 5, dtype=np.float64)
index_points = np.stack(np.meshgrid(index_points, index_points), axis=-1)
index_points = np.reshape(index_points, [-1, 2])
# Kernel with batch_shape [5, 3]
amplitude = np.array([1., 2., 3., 4., 5.], np.float64).reshape([5, 1])
length_scale = np.array([.1, .2, .3], np.float64).reshape(
[1, 3])
observation_noise_variance = np.array(
[1e-5, 1e-6, 1e-9], np.float64).reshape([1, 3])
observation_index_points = (
np.random.uniform(-1., 1., (3, 7, 2)).astype(np.float64))
observations = np.random.uniform(-1., 1., (3, 7)).astype(np.float64)
kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)
stprm = tfd.StudentTProcessRegressionModel(
df=df,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance)
gprm = tfd.GaussianProcessRegressionModel(
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance)
self.assertAllClose(self.evaluate(stprm.mean()), self.evaluate(gprm.mean()))
def testLogProbNearGPRM(self):
# For large df, the log_prob calculations should be the same.
df = np.float64(1e6)
index_points = np.linspace(-4., 4., 5, dtype=np.float64)
index_points = np.stack(np.meshgrid(index_points, index_points), axis=-1)
index_points = np.reshape(index_points, [-1, 2])
# Kernel with batch_shape [5, 3]
amplitude = np.array([1., 2., 3., 4., 5.], np.float64).reshape([5, 1])
length_scale = np.array([.1, .2, .3], np.float64).reshape(
[1, 3])
observation_noise_variance = np.array(
[1e-5, 1e-6, 1e-9], np.float64).reshape([1, 3])
observation_index_points = (
np.random.uniform(-1., 1., (3, 7, 2)).astype(np.float64))
observations = np.random.uniform(-1., 1., (3, 7)).astype(np.float64)
kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)
stprm = tfd.StudentTProcessRegressionModel(
df=df,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance)
gprm = tfd.GaussianProcessRegressionModel(
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance)
x = np.linspace(-3., 3., 25)
self.assertAllClose(
self.evaluate(stprm.log_prob(x)),
self.evaluate(gprm.log_prob(x)), rtol=2e-5)
def testMeanVarianceAndCovariancePrecomputed(self):
amplitude = np.array([1., 2.], np.float64).reshape([2, 1])
length_scale = np.array([.1, .2, .3], np.float64).reshape([1, 3])
observation_noise_variance = np.array([1e-9], np.float64)
df = np.float64(3.)
observation_index_points = (
np.random.uniform(-1., 1., (1, 1, 7, 2)).astype(np.float64))
observations = np.random.uniform(-1., 1., (1, 1, 7)).astype(np.float64)
index_points = np.random.uniform(-1., 1., (6, 2)).astype(np.float64)
kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)
stprm = tfd.StudentTProcessRegressionModel(
df=df,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance,
validate_args=True)
precomputed_stprm = tfd.StudentTProcessRegressionModel.precompute_regression_model(
df=df,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance,
validate_args=True)
self.assertAllClose(self.evaluate(precomputed_stprm.covariance()),
self.evaluate(stprm.covariance()))
self.assertAllClose(self.evaluate(precomputed_stprm.variance()),
self.evaluate(stprm.variance()))
self.assertAllClose(self.evaluate(precomputed_stprm.mean()),
self.evaluate(stprm.mean()))
@test_util.disable_test_for_backend(
disable_numpy=True, disable_jax=True,
reason='Numpy and JAX have no notion of CompositeTensor/saved_model')
def testPrecomputedCompositeTensor(self):
amplitude = np.array([1., 2.], np.float64).reshape([2, 1])
length_scale = np.array([.1, .2, .3], np.float64).reshape([1, 3])
observation_noise_variance = np.array([1e-9], np.float64)
observation_index_points = (
np.random.uniform(-1., 1., (1, 1, 7, 2)).astype(np.float64))
observations = np.random.uniform(-1., 1., (1, 1, 7)).astype(np.float64)
index_points = np.random.uniform(-1., 1., (6, 2)).astype(np.float64)
kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)
precomputed_stprm = tfd.StudentTProcessRegressionModel.precompute_regression_model(
df=3.,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance,
validate_args=True)
flat = tf.nest.flatten(precomputed_stprm, expand_composites=True)
unflat = tf.nest.pack_sequence_as(
precomputed_stprm, flat, expand_composites=True)
self.assertIsInstance(unflat, tfd.StudentTProcessRegressionModel)
# Check that we don't recompute the divisor matrix on flattening /
# unflattening.
self.assertIs(
precomputed_stprm.kernel.schur_complement._precomputed_divisor_matrix_cholesky, # pylint:disable=line-too-long
unflat.kernel.schur_complement._precomputed_divisor_matrix_cholesky)
# TODO(b/196219597): Enable this test once STPRM works across TF function
# boundaries.
# index_observations = np.random.uniform(-1., 1., (6,)).astype(np.float64)
# @tf.function
# def log_prob(d):
# return d.log_prob(index_observations)
# lp = self.evaluate(precomputed_stprm.log_prob(index_observations))
# self.assertAllClose(lp, self.evaluate(log_prob(precomputed_stprm)))
# self.assertAllClose(lp, self.evaluate(log_prob(unflat)))
def testEmptyDataMatchesStPPrior(self):
df = np.float64(3.5)
amp = np.float64(.5)
len_scale = np.float64(.2)
index_points = np.random.uniform(-1., 1., (10, 1)).astype(np.float64)
# k_xx - k_xn @ (k_nn + sigma^2) @ k_nx + sigma^2
mean_fn = lambda x: x[:, 0]**2
kernel = psd_kernels.ExponentiatedQuadratic(amp, len_scale)
stp = tfd.StudentTProcess(
df,
kernel,
index_points,
mean_fn=mean_fn,
validate_args=True)
stprm_nones = tfd.StudentTProcessRegressionModel(
df,
kernel=kernel,
index_points=index_points,
mean_fn=mean_fn,
validate_args=True)
stprm_zero_shapes = tfd.StudentTProcessRegressionModel(
df,
kernel=kernel,
index_points=index_points,
observation_index_points=tf.ones([0, 1], tf.float64),
observations=tf.ones([0], tf.float64),
mean_fn=mean_fn,
validate_args=True)
for stprm in [stprm_nones, stprm_zero_shapes]:
self.assertAllClose(
self.evaluate(stp.mean()), self.evaluate(stprm.mean()))
self.assertAllClose(self.evaluate(stp.covariance()),
self.evaluate(stprm.covariance()))
self.assertAllClose(self.evaluate(stp.variance()),
self.evaluate(stprm.variance()))
observations = np.random.uniform(-1., 1., 10).astype(np.float64)
self.assertAllClose(self.evaluate(stp.log_prob(observations)),
self.evaluate(stprm.log_prob(observations)))
def testCopy(self):
# 5 random index points in R^2
index_points_1 = np.random.uniform(-4., 4., (5, 2)).astype(np.float32)
# 10 random index points in R^2
index_points_2 = np.random.uniform(-4., 4., (10, 2)).astype(np.float32)
observation_index_points_1 = (
np.random.uniform(-4., 4., (7, 2)).astype(np.float32))
observation_index_points_2 = (
np.random.uniform(-4., 4., (9, 2)).astype(np.float32))
observations_1 = np.random.uniform(-1., 1., 7).astype(np.float32)
observations_2 = np.random.uniform(-1., 1., 9).astype(np.float32)
# ==> shape = [6, 25, 2]
mean_fn = lambda x: np.array([0.], np.float32)
kernel_1 = psd_kernels.ExponentiatedQuadratic()
kernel_2 = psd_kernels.ExpSinSquared()
stprm1 = tfd.StudentTProcessRegressionModel(
df=5.,
kernel=kernel_1,
index_points=index_points_1,
observation_index_points=observation_index_points_1,
observations=observations_1,
mean_fn=mean_fn,
validate_args=True)
stprm2 = stprm1.copy(
kernel=kernel_2,
index_points=index_points_2,
observation_index_points=observation_index_points_2,
observations=observations_2)
precomputed_stprm1 = (
tfd.StudentTProcessRegressionModel.precompute_regression_model(
df=5.,
kernel=kernel_1,
index_points=index_points_1,
observation_index_points=observation_index_points_1,
observations=observations_1,
mean_fn=mean_fn,
validate_args=True))
precomputed_stprm2 = precomputed_stprm1.copy(index_points=index_points_2)
self.assertIs(precomputed_stprm1.mean_fn, precomputed_stprm2.mean_fn)
self.assertIs(precomputed_stprm1.kernel, precomputed_stprm2.kernel)
event_shape_1 = [5]
event_shape_2 = [10]
self.assertIsInstance(stprm1.kernel.schur_complement.base_kernel,
psd_kernels.ExponentiatedQuadratic)
self.assertIsInstance(stprm2.kernel.schur_complement.base_kernel,
psd_kernels.ExpSinSquared)
self.assertAllEqual(self.evaluate(stprm1.batch_shape_tensor()),
self.evaluate(stprm2.batch_shape_tensor()))
self.assertAllEqual(self.evaluate(stprm1.event_shape_tensor()),
event_shape_1)
self.assertAllEqual(self.evaluate(stprm2.event_shape_tensor()),
event_shape_2)
self.assertAllEqual(self.evaluate(stprm1.index_points), index_points_1)
self.assertAllEqual(self.evaluate(stprm2.index_points), index_points_2)
if __name__ == '__main__':
test_util.main()
| 40.469388
| 119
| 0.680931
|
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import distributions as tfd
from tensorflow_probability.python.internal import test_util
from tensorflow_probability.python.math import psd_kernels
@test_util.test_all_tf_execution_regimes
class StudentTProcessRegressionModelTest(test_util.TestCase):
def testInstantiate(self):
df = np.float64(1.)
index_points = np.linspace(-4., 4., 5, dtype=np.float64)
index_points = np.stack(np.meshgrid(index_points, index_points), axis=-1)
index_points = np.reshape(index_points, [-1, 2])
amplitude = np.array([1., 2.], np.float64).reshape([2, 1, 1, 1])
length_scale = np.array([.1, .2, .3, .4], np.float64).reshape(
[1, 4, 1, 1])
observation_noise_variance = np.array(
[1e-5, 1e-6, 1e-9], np.float64).reshape([1, 1, 1, 3])
observation_index_points = (
np.random.uniform(-1., 1., (3, 7, 2)).astype(np.float64))
observations = np.random.uniform(-1., 1., (3, 7)).astype(np.float64)
def cholesky_fn(x):
return tf.linalg.cholesky(
tf.linalg.set_diag(x, tf.linalg.diag_part(x) + 1.))
kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)
stprm = tfd.StudentTProcessRegressionModel(
df=df,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance,
cholesky_fn=cholesky_fn)
batch_shape = [2, 4, 1, 3]
event_shape = [25]
sample_shape = [7, 2]
print(stprm.batch_shape)
print(stprm.kernel.batch_shape)
print(stprm.kernel.schur_complement.batch_shape)
print(stprm.kernel.schur_complement.base_kernel.batch_shape)
self.assertIs(cholesky_fn, stprm.cholesky_fn)
samples = stprm.sample(sample_shape, seed=test_util.test_seed())
self.assertAllEqual(stprm.batch_shape_tensor(), batch_shape)
self.assertAllEqual(stprm.event_shape_tensor(), event_shape)
self.assertAllEqual(self.evaluate(samples).shape,
sample_shape + batch_shape + event_shape)
def testMeanSameAsGPRM(self):
df = np.float64(3.)
index_points = np.linspace(-4., 4., 5, dtype=np.float64)
index_points = np.stack(np.meshgrid(index_points, index_points), axis=-1)
index_points = np.reshape(index_points, [-1, 2])
amplitude = np.array([1., 2., 3., 4., 5.], np.float64).reshape([5, 1])
length_scale = np.array([.1, .2, .3], np.float64).reshape(
[1, 3])
observation_noise_variance = np.array(
[1e-5, 1e-6, 1e-9], np.float64).reshape([1, 3])
observation_index_points = (
np.random.uniform(-1., 1., (3, 7, 2)).astype(np.float64))
observations = np.random.uniform(-1., 1., (3, 7)).astype(np.float64)
kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)
stprm = tfd.StudentTProcessRegressionModel(
df=df,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance)
gprm = tfd.GaussianProcessRegressionModel(
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance)
self.assertAllClose(self.evaluate(stprm.mean()), self.evaluate(gprm.mean()))
def testLogProbNearGPRM(self):
df = np.float64(1e6)
index_points = np.linspace(-4., 4., 5, dtype=np.float64)
index_points = np.stack(np.meshgrid(index_points, index_points), axis=-1)
index_points = np.reshape(index_points, [-1, 2])
amplitude = np.array([1., 2., 3., 4., 5.], np.float64).reshape([5, 1])
length_scale = np.array([.1, .2, .3], np.float64).reshape(
[1, 3])
observation_noise_variance = np.array(
[1e-5, 1e-6, 1e-9], np.float64).reshape([1, 3])
observation_index_points = (
np.random.uniform(-1., 1., (3, 7, 2)).astype(np.float64))
observations = np.random.uniform(-1., 1., (3, 7)).astype(np.float64)
kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)
stprm = tfd.StudentTProcessRegressionModel(
df=df,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance)
gprm = tfd.GaussianProcessRegressionModel(
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance)
x = np.linspace(-3., 3., 25)
self.assertAllClose(
self.evaluate(stprm.log_prob(x)),
self.evaluate(gprm.log_prob(x)), rtol=2e-5)
def testMeanVarianceAndCovariancePrecomputed(self):
amplitude = np.array([1., 2.], np.float64).reshape([2, 1])
length_scale = np.array([.1, .2, .3], np.float64).reshape([1, 3])
observation_noise_variance = np.array([1e-9], np.float64)
df = np.float64(3.)
observation_index_points = (
np.random.uniform(-1., 1., (1, 1, 7, 2)).astype(np.float64))
observations = np.random.uniform(-1., 1., (1, 1, 7)).astype(np.float64)
index_points = np.random.uniform(-1., 1., (6, 2)).astype(np.float64)
kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)
stprm = tfd.StudentTProcessRegressionModel(
df=df,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance,
validate_args=True)
precomputed_stprm = tfd.StudentTProcessRegressionModel.precompute_regression_model(
df=df,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance,
validate_args=True)
self.assertAllClose(self.evaluate(precomputed_stprm.covariance()),
self.evaluate(stprm.covariance()))
self.assertAllClose(self.evaluate(precomputed_stprm.variance()),
self.evaluate(stprm.variance()))
self.assertAllClose(self.evaluate(precomputed_stprm.mean()),
self.evaluate(stprm.mean()))
@test_util.disable_test_for_backend(
disable_numpy=True, disable_jax=True,
reason='Numpy and JAX have no notion of CompositeTensor/saved_model')
def testPrecomputedCompositeTensor(self):
amplitude = np.array([1., 2.], np.float64).reshape([2, 1])
length_scale = np.array([.1, .2, .3], np.float64).reshape([1, 3])
observation_noise_variance = np.array([1e-9], np.float64)
observation_index_points = (
np.random.uniform(-1., 1., (1, 1, 7, 2)).astype(np.float64))
observations = np.random.uniform(-1., 1., (1, 1, 7)).astype(np.float64)
index_points = np.random.uniform(-1., 1., (6, 2)).astype(np.float64)
kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)
precomputed_stprm = tfd.StudentTProcessRegressionModel.precompute_regression_model(
df=3.,
kernel=kernel,
index_points=index_points,
observation_index_points=observation_index_points,
observations=observations,
observation_noise_variance=observation_noise_variance,
validate_args=True)
flat = tf.nest.flatten(precomputed_stprm, expand_composites=True)
unflat = tf.nest.pack_sequence_as(
precomputed_stprm, flat, expand_composites=True)
self.assertIsInstance(unflat, tfd.StudentTProcessRegressionModel)
# unflattening.
self.assertIs(
precomputed_stprm.kernel.schur_complement._precomputed_divisor_matrix_cholesky, # pylint:disable=line-too-long
unflat.kernel.schur_complement._precomputed_divisor_matrix_cholesky)
# TODO(b/196219597): Enable this test once STPRM works across TF function
# boundaries.
# index_observations = np.random.uniform(-1., 1., (6,)).astype(np.float64)
# @tf.function
# def log_prob(d):
# return d.log_prob(index_observations)
# lp = self.evaluate(precomputed_stprm.log_prob(index_observations))
# self.assertAllClose(lp, self.evaluate(log_prob(precomputed_stprm)))
# self.assertAllClose(lp, self.evaluate(log_prob(unflat)))
def testEmptyDataMatchesStPPrior(self):
df = np.float64(3.5)
amp = np.float64(.5)
len_scale = np.float64(.2)
index_points = np.random.uniform(-1., 1., (10, 1)).astype(np.float64)
# k_xx - k_xn @ (k_nn + sigma^2) @ k_nx + sigma^2
mean_fn = lambda x: x[:, 0]**2
kernel = psd_kernels.ExponentiatedQuadratic(amp, len_scale)
stp = tfd.StudentTProcess(
df,
kernel,
index_points,
mean_fn=mean_fn,
validate_args=True)
stprm_nones = tfd.StudentTProcessRegressionModel(
df,
kernel=kernel,
index_points=index_points,
mean_fn=mean_fn,
validate_args=True)
stprm_zero_shapes = tfd.StudentTProcessRegressionModel(
df,
kernel=kernel,
index_points=index_points,
observation_index_points=tf.ones([0, 1], tf.float64),
observations=tf.ones([0], tf.float64),
mean_fn=mean_fn,
validate_args=True)
for stprm in [stprm_nones, stprm_zero_shapes]:
self.assertAllClose(
self.evaluate(stp.mean()), self.evaluate(stprm.mean()))
self.assertAllClose(self.evaluate(stp.covariance()),
self.evaluate(stprm.covariance()))
self.assertAllClose(self.evaluate(stp.variance()),
self.evaluate(stprm.variance()))
observations = np.random.uniform(-1., 1., 10).astype(np.float64)
self.assertAllClose(self.evaluate(stp.log_prob(observations)),
self.evaluate(stprm.log_prob(observations)))
def testCopy(self):
# 5 random index points in R^2
index_points_1 = np.random.uniform(-4., 4., (5, 2)).astype(np.float32)
# 10 random index points in R^2
index_points_2 = np.random.uniform(-4., 4., (10, 2)).astype(np.float32)
observation_index_points_1 = (
np.random.uniform(-4., 4., (7, 2)).astype(np.float32))
observation_index_points_2 = (
np.random.uniform(-4., 4., (9, 2)).astype(np.float32))
observations_1 = np.random.uniform(-1., 1., 7).astype(np.float32)
observations_2 = np.random.uniform(-1., 1., 9).astype(np.float32)
# ==> shape = [6, 25, 2]
mean_fn = lambda x: np.array([0.], np.float32)
kernel_1 = psd_kernels.ExponentiatedQuadratic()
kernel_2 = psd_kernels.ExpSinSquared()
stprm1 = tfd.StudentTProcessRegressionModel(
df=5.,
kernel=kernel_1,
index_points=index_points_1,
observation_index_points=observation_index_points_1,
observations=observations_1,
mean_fn=mean_fn,
validate_args=True)
stprm2 = stprm1.copy(
kernel=kernel_2,
index_points=index_points_2,
observation_index_points=observation_index_points_2,
observations=observations_2)
precomputed_stprm1 = (
tfd.StudentTProcessRegressionModel.precompute_regression_model(
df=5.,
kernel=kernel_1,
index_points=index_points_1,
observation_index_points=observation_index_points_1,
observations=observations_1,
mean_fn=mean_fn,
validate_args=True))
precomputed_stprm2 = precomputed_stprm1.copy(index_points=index_points_2)
self.assertIs(precomputed_stprm1.mean_fn, precomputed_stprm2.mean_fn)
self.assertIs(precomputed_stprm1.kernel, precomputed_stprm2.kernel)
event_shape_1 = [5]
event_shape_2 = [10]
self.assertIsInstance(stprm1.kernel.schur_complement.base_kernel,
psd_kernels.ExponentiatedQuadratic)
self.assertIsInstance(stprm2.kernel.schur_complement.base_kernel,
psd_kernels.ExpSinSquared)
self.assertAllEqual(self.evaluate(stprm1.batch_shape_tensor()),
self.evaluate(stprm2.batch_shape_tensor()))
self.assertAllEqual(self.evaluate(stprm1.event_shape_tensor()),
event_shape_1)
self.assertAllEqual(self.evaluate(stprm2.event_shape_tensor()),
event_shape_2)
self.assertAllEqual(self.evaluate(stprm1.index_points), index_points_1)
self.assertAllEqual(self.evaluate(stprm2.index_points), index_points_2)
if __name__ == '__main__':
test_util.main()
| true
| true
|
f718815b2922106732719f8c9367ef276f71008f
| 24,159
|
py
|
Python
|
contents/tts/content/TensorflowTTS/tensorflow_tts/utils/group_conv.py
|
PIN-devel/inside-kids
|
554e4a0a5654c9a0f5237b904bb2ca6db88a55cb
|
[
"MIT"
] | 2
|
2020-07-03T05:47:47.000Z
|
2020-07-03T19:59:09.000Z
|
contents/tts/content/TensorflowTTS/tensorflow_tts/utils/group_conv.py
|
PIN-devel/inside-kids
|
554e4a0a5654c9a0f5237b904bb2ca6db88a55cb
|
[
"MIT"
] | 1
|
2021-02-26T04:10:19.000Z
|
2021-02-26T04:10:19.000Z
|
contents/tts/content/TensorflowTTS/tensorflow_tts/utils/group_conv.py
|
PIN-devel/inside-kids
|
554e4a0a5654c9a0f5237b904bb2ca6db88a55cb
|
[
"MIT"
] | 4
|
2021-02-23T13:05:59.000Z
|
2021-04-23T05:15:32.000Z
|
# -*- coding: utf-8 -*-
# This code is copy from https://github.com/tensorflow/tensorflow/pull/36773.
"""Group Convolution Modules."""
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import activations
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.engine.input_spec import InputSpec
from tensorflow.python.keras.utils import conv_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import nn_ops
from tensorflow.python.keras.layers import Conv1D
from tensorflow.python.keras.layers import SeparableConv1D
class Convolution(object):
"""Helper class for convolution.
Note that this class assumes that shapes of input and filter passed to
__call__ are compatible with input_shape and filter_shape passed to the
constructor.
Arguments
input_shape: static shape of input. i.e. input.get_shape().
filter_shape: static shape of the filter. i.e. filter.get_shape().
padding: see convolution.
strides: see convolution.
dilation_rate: see convolution.
name: see convolution.
data_format: see convolution.
"""
def __init__(
self,
input_shape,
filter_shape,
padding,
strides=None,
dilation_rate=None,
name=None,
data_format=None,
):
"""Helper function for convolution."""
num_total_dims = filter_shape.ndims
if num_total_dims is None:
num_total_dims = input_shape.ndims
if num_total_dims is None:
raise ValueError("rank of input or filter must be known")
num_spatial_dims = num_total_dims - 2
try:
input_shape.with_rank(num_spatial_dims + 2)
except ValueError:
raise ValueError("input tensor must have rank %d" % (num_spatial_dims + 2))
try:
filter_shape.with_rank(num_spatial_dims + 2)
except ValueError:
raise ValueError("filter tensor must have rank %d" % (num_spatial_dims + 2))
if data_format is None or not data_format.startswith("NC"):
input_channels_dim = tensor_shape.dimension_at_index(
input_shape, num_spatial_dims + 1
)
spatial_dims = range(1, num_spatial_dims + 1)
else:
input_channels_dim = tensor_shape.dimension_at_index(input_shape, 1)
spatial_dims = range(2, num_spatial_dims + 2)
filter_dim = tensor_shape.dimension_at_index(filter_shape, num_spatial_dims)
if not (input_channels_dim % filter_dim).is_compatible_with(0):
raise ValueError(
"number of input channels is not divisible by corresponding "
"dimension of filter, {} % {} != 0".format(
input_channels_dim, filter_dim
)
)
strides, dilation_rate = nn_ops._get_strides_and_dilation_rate(
num_spatial_dims, strides, dilation_rate
)
self.input_shape = input_shape
self.filter_shape = filter_shape
self.data_format = data_format
self.strides = strides
self.padding = padding
self.name = name
self.dilation_rate = dilation_rate
self.conv_op = nn_ops._WithSpaceToBatch(
input_shape,
dilation_rate=dilation_rate,
padding=padding,
build_op=self._build_op,
filter_shape=filter_shape,
spatial_dims=spatial_dims,
data_format=data_format,
)
def _build_op(self, _, padding):
return nn_ops._NonAtrousConvolution(
self.input_shape,
filter_shape=self.filter_shape,
padding=padding,
data_format=self.data_format,
strides=self.strides,
name=self.name,
)
def __call__(self, inp, filter):
return self.conv_op(inp, filter)
class Conv(Layer):
"""Abstract N-D convolution layer (private, used as implementation base).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Note: layer attributes cannot be modified after the layer has been called
once (except the `trainable` attribute).
Arguments:
rank: An integer, the rank of the convolution, e.g. "2" for 2D convolution.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of n integers, specifying the
length of the convolution window.
strides: An integer or tuple/list of n integers,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"`, `"same"`, or `"causal"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch_size, ..., channels)` while `channels_first` corresponds to
inputs with shape `(batch_size, channels, ...)`.
dilation_rate: An integer or tuple/list of n integers, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
groups: Integer, the number of channel groups controlling the connections
between inputs and outputs. Input channels and `filters` must both be
divisible by `groups`. For example,
- At `groups=1`, all inputs are convolved to all outputs.
- At `groups=2`, the operation becomes equivalent to having two
convolutional layers side by side, each seeing half the input
channels, and producing half the output channels, and both
subsequently concatenated.
- At `groups=input_channels`, each input channel is convolved with its
own set of filters, of size `input_channels / filters`
activation: Activation function to use.
If you don't specify anything, no activation is applied.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, the default
initializer will be used.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` the weights of this layer will be marked as
trainable (and listed in `layer.trainable_weights`).
name: A string, the name of the layer.
"""
def __init__(
self,
rank,
filters,
kernel_size,
strides=1,
padding="valid",
data_format=None,
dilation_rate=1,
groups=1,
activation=None,
use_bias=True,
kernel_initializer="glorot_uniform",
bias_initializer="zeros",
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs
):
super(Conv, self).__init__(
trainable=trainable,
name=name,
activity_regularizer=regularizers.get(activity_regularizer),
**kwargs
)
self.rank = rank
if filters is not None and not isinstance(filters, int):
filters = int(filters)
self.filters = filters
self.groups = groups or 1
if filters is not None and filters % self.groups != 0:
raise ValueError(
"The number of filters must be evenly divisible by the number of "
"groups. Received: groups={}, filters={}".format(groups, filters)
)
self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, "kernel_size")
if not all(self.kernel_size):
raise ValueError(
"The argument `kernel_size` cannot contain 0(s). "
"Received: %s" % (kernel_size,)
)
self.strides = conv_utils.normalize_tuple(strides, rank, "strides")
self.padding = conv_utils.normalize_padding(padding)
if self.padding == "causal" and not isinstance(self, (Conv1D, SeparableConv1D)):
raise ValueError(
"Causal padding is only supported for `Conv1D`"
"and ``SeparableConv1D`."
)
self.data_format = conv_utils.normalize_data_format(data_format)
self.dilation_rate = conv_utils.normalize_tuple(
dilation_rate, rank, "dilation_rate"
)
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.input_spec = InputSpec(ndim=self.rank + 2)
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
input_channel = self._get_input_channel(input_shape)
if input_channel % self.groups != 0:
raise ValueError(
"The number of input channels must be evenly divisible by the number "
"of groups. Received groups={}, but the input has {} channels "
"(full input shape is {}).".format(
self.groups, input_channel, input_shape
)
)
kernel_shape = self.kernel_size + (input_channel // self.groups, self.filters)
self.kernel = self.add_weight(
name="kernel",
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True,
dtype=self.dtype,
)
if self.use_bias:
self.bias = self.add_weight(
name="bias",
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype,
)
else:
self.bias = None
channel_axis = self._get_channel_axis()
self.input_spec = InputSpec(
ndim=self.rank + 2, axes={channel_axis: input_channel}
)
self._build_conv_op_input_shape = input_shape
self._build_input_channel = input_channel
self._padding_op = self._get_padding_op()
self._conv_op_data_format = conv_utils.convert_data_format(
self.data_format, self.rank + 2
)
self._convolution_op = Convolution(
input_shape,
filter_shape=self.kernel.shape,
dilation_rate=self.dilation_rate,
strides=self.strides,
padding=self._padding_op,
data_format=self._conv_op_data_format,
)
self.built = True
def call(self, inputs):
if self._recreate_conv_op(inputs):
self._convolution_op = Convolution(
inputs.get_shape(),
filter_shape=self.kernel.shape,
dilation_rate=self.dilation_rate,
strides=self.strides,
padding=self._padding_op,
data_format=self._conv_op_data_format,
)
self._build_conv_op_input_shape = inputs.get_shape()
# Apply causal padding to inputs for Conv1D.
if self.padding == "causal" and self.__class__.__name__ == "Conv1D":
inputs = array_ops.pad(inputs, self._compute_causal_padding())
outputs = self._convolution_op(inputs, self.kernel)
if self.use_bias:
if self.data_format == "channels_first":
if self.rank == 1:
# nn.bias_add does not accept a 1D input tensor.
bias = array_ops.reshape(self.bias, (1, self.filters, 1))
outputs += bias
else:
outputs = nn.bias_add(outputs, self.bias, data_format="NCHW")
else:
outputs = nn.bias_add(outputs, self.bias, data_format="NHWC")
if self.activation is not None:
return self.activation(outputs)
return outputs
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == "channels_last":
space = input_shape[1:-1]
new_space = []
for i in range(len(space)):
new_dim = conv_utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i],
)
new_space.append(new_dim)
return tensor_shape.TensorShape(
[input_shape[0]] + new_space + [self.filters]
)
else:
space = input_shape[2:]
new_space = []
for i in range(len(space)):
new_dim = conv_utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i],
)
new_space.append(new_dim)
return tensor_shape.TensorShape([input_shape[0], self.filters] + new_space)
def get_config(self):
config = {
"filters": self.filters,
"kernel_size": self.kernel_size,
"strides": self.strides,
"padding": self.padding,
"data_format": self.data_format,
"dilation_rate": self.dilation_rate,
"groups": self.groups,
"activation": activations.serialize(self.activation),
"use_bias": self.use_bias,
"kernel_initializer": initializers.serialize(self.kernel_initializer),
"bias_initializer": initializers.serialize(self.bias_initializer),
"kernel_regularizer": regularizers.serialize(self.kernel_regularizer),
"bias_regularizer": regularizers.serialize(self.bias_regularizer),
"activity_regularizer": regularizers.serialize(self.activity_regularizer),
"kernel_constraint": constraints.serialize(self.kernel_constraint),
"bias_constraint": constraints.serialize(self.bias_constraint),
}
base_config = super(Conv, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def _compute_causal_padding(self):
"""Calculates padding for 'causal' option for 1-d conv layers."""
left_pad = self.dilation_rate[0] * (self.kernel_size[0] - 1)
if self.data_format == "channels_last":
causal_padding = [[0, 0], [left_pad, 0], [0, 0]]
else:
causal_padding = [[0, 0], [0, 0], [left_pad, 0]]
return causal_padding
def _get_channel_axis(self):
if self.data_format == "channels_first":
return 1
else:
return -1
def _get_input_channel(self, input_shape):
channel_axis = self._get_channel_axis()
if input_shape.dims[channel_axis].value is None:
raise ValueError(
"The channel dimension of the inputs "
"should be defined. Found `None`."
)
return int(input_shape[channel_axis])
def _get_padding_op(self):
if self.padding == "causal":
op_padding = "valid"
else:
op_padding = self.padding
if not isinstance(op_padding, (list, tuple)):
op_padding = op_padding.upper()
return op_padding
def _recreate_conv_op(self, inputs):
"""Recreate conv_op if necessary.
Check if the input_shape in call() is different from that in build().
For the values that are not None, if they are different, recreate
the _convolution_op to avoid the stateful behavior.
Args:
inputs: The input data to call() method.
Returns:
`True` or `False` to indicate whether to recreate the conv_op.
"""
call_input_shape = inputs.get_shape()
for axis in range(1, len(call_input_shape)):
if (
call_input_shape[axis] is not None
and self._build_conv_op_input_shape[axis] is not None
and call_input_shape[axis] != self._build_conv_op_input_shape[axis]
):
return True
return False
class GroupConv1D(Conv):
"""1D convolution layer (e.g. temporal convolution).
This layer creates a convolution kernel that is convolved
with the layer input over a single spatial (or temporal) dimension
to produce a tensor of outputs.
If `use_bias` is True, a bias vector is created and added to the outputs.
Finally, if `activation` is not `None`,
it is applied to the outputs as well.
When using this layer as the first layer in a model,
provide an `input_shape` argument
(tuple of integers or `None`, e.g.
`(10, 128)` for sequences of 10 vectors of 128-dimensional vectors,
or `(None, 128)` for variable-length sequences of 128-dimensional vectors.
Examples:
>>> # The inputs are 128-length vectors with 10 timesteps, and the batch size
>>> # is 4.
>>> input_shape = (4, 10, 128)
>>> x = tf.random.normal(input_shape)
>>> y = tf.keras.layers.Conv1D(
... 32, 3, activation='relu',input_shape=input_shape)(x)
>>> print(y.shape)
(4, 8, 32)
Arguments:
filters: Integer, the dimensionality of the output space
(i.e. the number of output filters in the convolution).
kernel_size: An integer or tuple/list of a single integer,
specifying the length of the 1D convolution window.
strides: An integer or tuple/list of a single integer,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"`, `"causal"` or `"same"` (case-insensitive).
`"causal"` results in causal (dilated) convolutions, e.g. `output[t]`
does not depend on `input[t+1:]`. Useful when modeling temporal data
where the model should not violate the temporal order.
See [WaveNet: A Generative Model for Raw Audio, section
2.1](https://arxiv.org/abs/1609.03499).
data_format: A string,
one of `channels_last` (default) or `channels_first`.
groups: Integer, the number of channel groups controlling the connections
between inputs and outputs. Input channels and `filters` must both be
divisible by `groups`. For example,
- At `groups=1`, all inputs are convolved to all outputs.
- At `groups=2`, the operation becomes equivalent to having two
convolutional layers side by side, each seeing half the input
channels, and producing half the output channels, and both
subsequently concatenated.
- At `groups=input_channels`, each input channel is convolved with its
own set of filters, of size `input_channels / filters`
dilation_rate: an integer or tuple/list of a single integer, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function to use.
If you don't specify anything, no activation is applied (
see `keras.activations`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix (
see `keras.initializers`).
bias_initializer: Initializer for the bias vector (
see `keras.initializers`).
kernel_regularizer: Regularizer function applied to
the `kernel` weights matrix (see `keras.regularizers`).
bias_regularizer: Regularizer function applied to the bias vector (
see `keras.regularizers`).
activity_regularizer: Regularizer function applied to
the output of the layer (its "activation") (
see `keras.regularizers`).
kernel_constraint: Constraint function applied to the kernel matrix (
see `keras.constraints`).
bias_constraint: Constraint function applied to the bias vector (
see `keras.constraints`).
Input shape:
3D tensor with shape: `(batch_size, steps, input_dim)`
Output shape:
3D tensor with shape: `(batch_size, new_steps, filters)`
`steps` value might have changed due to padding or strides.
Returns:
A tensor of rank 3 representing
`activation(conv1d(inputs, kernel) + bias)`.
Raises:
ValueError: when both `strides` > 1 and `dilation_rate` > 1.
"""
def __init__(
self,
filters,
kernel_size,
strides=1,
padding="valid",
data_format="channels_last",
dilation_rate=1,
groups=1,
activation=None,
use_bias=True,
kernel_initializer="glorot_uniform",
bias_initializer="zeros",
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs
):
super().__init__(
rank=1,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
groups=groups,
activation=activations.get(activation),
use_bias=use_bias,
kernel_initializer=initializers.get(kernel_initializer),
bias_initializer=initializers.get(bias_initializer),
kernel_regularizer=regularizers.get(kernel_regularizer),
bias_regularizer=regularizers.get(bias_regularizer),
activity_regularizer=regularizers.get(activity_regularizer),
kernel_constraint=constraints.get(kernel_constraint),
bias_constraint=constraints.get(bias_constraint),
**kwargs
)
| 42.759292
| 88
| 0.633263
|
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import activations
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.engine.input_spec import InputSpec
from tensorflow.python.keras.utils import conv_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import nn_ops
from tensorflow.python.keras.layers import Conv1D
from tensorflow.python.keras.layers import SeparableConv1D
class Convolution(object):
def __init__(
self,
input_shape,
filter_shape,
padding,
strides=None,
dilation_rate=None,
name=None,
data_format=None,
):
num_total_dims = filter_shape.ndims
if num_total_dims is None:
num_total_dims = input_shape.ndims
if num_total_dims is None:
raise ValueError("rank of input or filter must be known")
num_spatial_dims = num_total_dims - 2
try:
input_shape.with_rank(num_spatial_dims + 2)
except ValueError:
raise ValueError("input tensor must have rank %d" % (num_spatial_dims + 2))
try:
filter_shape.with_rank(num_spatial_dims + 2)
except ValueError:
raise ValueError("filter tensor must have rank %d" % (num_spatial_dims + 2))
if data_format is None or not data_format.startswith("NC"):
input_channels_dim = tensor_shape.dimension_at_index(
input_shape, num_spatial_dims + 1
)
spatial_dims = range(1, num_spatial_dims + 1)
else:
input_channels_dim = tensor_shape.dimension_at_index(input_shape, 1)
spatial_dims = range(2, num_spatial_dims + 2)
filter_dim = tensor_shape.dimension_at_index(filter_shape, num_spatial_dims)
if not (input_channels_dim % filter_dim).is_compatible_with(0):
raise ValueError(
"number of input channels is not divisible by corresponding "
"dimension of filter, {} % {} != 0".format(
input_channels_dim, filter_dim
)
)
strides, dilation_rate = nn_ops._get_strides_and_dilation_rate(
num_spatial_dims, strides, dilation_rate
)
self.input_shape = input_shape
self.filter_shape = filter_shape
self.data_format = data_format
self.strides = strides
self.padding = padding
self.name = name
self.dilation_rate = dilation_rate
self.conv_op = nn_ops._WithSpaceToBatch(
input_shape,
dilation_rate=dilation_rate,
padding=padding,
build_op=self._build_op,
filter_shape=filter_shape,
spatial_dims=spatial_dims,
data_format=data_format,
)
def _build_op(self, _, padding):
return nn_ops._NonAtrousConvolution(
self.input_shape,
filter_shape=self.filter_shape,
padding=padding,
data_format=self.data_format,
strides=self.strides,
name=self.name,
)
def __call__(self, inp, filter):
return self.conv_op(inp, filter)
class Conv(Layer):
def __init__(
self,
rank,
filters,
kernel_size,
strides=1,
padding="valid",
data_format=None,
dilation_rate=1,
groups=1,
activation=None,
use_bias=True,
kernel_initializer="glorot_uniform",
bias_initializer="zeros",
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs
):
super(Conv, self).__init__(
trainable=trainable,
name=name,
activity_regularizer=regularizers.get(activity_regularizer),
**kwargs
)
self.rank = rank
if filters is not None and not isinstance(filters, int):
filters = int(filters)
self.filters = filters
self.groups = groups or 1
if filters is not None and filters % self.groups != 0:
raise ValueError(
"The number of filters must be evenly divisible by the number of "
"groups. Received: groups={}, filters={}".format(groups, filters)
)
self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, "kernel_size")
if not all(self.kernel_size):
raise ValueError(
"The argument `kernel_size` cannot contain 0(s). "
"Received: %s" % (kernel_size,)
)
self.strides = conv_utils.normalize_tuple(strides, rank, "strides")
self.padding = conv_utils.normalize_padding(padding)
if self.padding == "causal" and not isinstance(self, (Conv1D, SeparableConv1D)):
raise ValueError(
"Causal padding is only supported for `Conv1D`"
"and ``SeparableConv1D`."
)
self.data_format = conv_utils.normalize_data_format(data_format)
self.dilation_rate = conv_utils.normalize_tuple(
dilation_rate, rank, "dilation_rate"
)
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.input_spec = InputSpec(ndim=self.rank + 2)
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
input_channel = self._get_input_channel(input_shape)
if input_channel % self.groups != 0:
raise ValueError(
"The number of input channels must be evenly divisible by the number "
"of groups. Received groups={}, but the input has {} channels "
"(full input shape is {}).".format(
self.groups, input_channel, input_shape
)
)
kernel_shape = self.kernel_size + (input_channel // self.groups, self.filters)
self.kernel = self.add_weight(
name="kernel",
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True,
dtype=self.dtype,
)
if self.use_bias:
self.bias = self.add_weight(
name="bias",
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype,
)
else:
self.bias = None
channel_axis = self._get_channel_axis()
self.input_spec = InputSpec(
ndim=self.rank + 2, axes={channel_axis: input_channel}
)
self._build_conv_op_input_shape = input_shape
self._build_input_channel = input_channel
self._padding_op = self._get_padding_op()
self._conv_op_data_format = conv_utils.convert_data_format(
self.data_format, self.rank + 2
)
self._convolution_op = Convolution(
input_shape,
filter_shape=self.kernel.shape,
dilation_rate=self.dilation_rate,
strides=self.strides,
padding=self._padding_op,
data_format=self._conv_op_data_format,
)
self.built = True
def call(self, inputs):
if self._recreate_conv_op(inputs):
self._convolution_op = Convolution(
inputs.get_shape(),
filter_shape=self.kernel.shape,
dilation_rate=self.dilation_rate,
strides=self.strides,
padding=self._padding_op,
data_format=self._conv_op_data_format,
)
self._build_conv_op_input_shape = inputs.get_shape()
if self.padding == "causal" and self.__class__.__name__ == "Conv1D":
inputs = array_ops.pad(inputs, self._compute_causal_padding())
outputs = self._convolution_op(inputs, self.kernel)
if self.use_bias:
if self.data_format == "channels_first":
if self.rank == 1:
bias = array_ops.reshape(self.bias, (1, self.filters, 1))
outputs += bias
else:
outputs = nn.bias_add(outputs, self.bias, data_format="NCHW")
else:
outputs = nn.bias_add(outputs, self.bias, data_format="NHWC")
if self.activation is not None:
return self.activation(outputs)
return outputs
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == "channels_last":
space = input_shape[1:-1]
new_space = []
for i in range(len(space)):
new_dim = conv_utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i],
)
new_space.append(new_dim)
return tensor_shape.TensorShape(
[input_shape[0]] + new_space + [self.filters]
)
else:
space = input_shape[2:]
new_space = []
for i in range(len(space)):
new_dim = conv_utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i],
)
new_space.append(new_dim)
return tensor_shape.TensorShape([input_shape[0], self.filters] + new_space)
def get_config(self):
config = {
"filters": self.filters,
"kernel_size": self.kernel_size,
"strides": self.strides,
"padding": self.padding,
"data_format": self.data_format,
"dilation_rate": self.dilation_rate,
"groups": self.groups,
"activation": activations.serialize(self.activation),
"use_bias": self.use_bias,
"kernel_initializer": initializers.serialize(self.kernel_initializer),
"bias_initializer": initializers.serialize(self.bias_initializer),
"kernel_regularizer": regularizers.serialize(self.kernel_regularizer),
"bias_regularizer": regularizers.serialize(self.bias_regularizer),
"activity_regularizer": regularizers.serialize(self.activity_regularizer),
"kernel_constraint": constraints.serialize(self.kernel_constraint),
"bias_constraint": constraints.serialize(self.bias_constraint),
}
base_config = super(Conv, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def _compute_causal_padding(self):
left_pad = self.dilation_rate[0] * (self.kernel_size[0] - 1)
if self.data_format == "channels_last":
causal_padding = [[0, 0], [left_pad, 0], [0, 0]]
else:
causal_padding = [[0, 0], [0, 0], [left_pad, 0]]
return causal_padding
def _get_channel_axis(self):
if self.data_format == "channels_first":
return 1
else:
return -1
def _get_input_channel(self, input_shape):
channel_axis = self._get_channel_axis()
if input_shape.dims[channel_axis].value is None:
raise ValueError(
"The channel dimension of the inputs "
"should be defined. Found `None`."
)
return int(input_shape[channel_axis])
def _get_padding_op(self):
if self.padding == "causal":
op_padding = "valid"
else:
op_padding = self.padding
if not isinstance(op_padding, (list, tuple)):
op_padding = op_padding.upper()
return op_padding
def _recreate_conv_op(self, inputs):
call_input_shape = inputs.get_shape()
for axis in range(1, len(call_input_shape)):
if (
call_input_shape[axis] is not None
and self._build_conv_op_input_shape[axis] is not None
and call_input_shape[axis] != self._build_conv_op_input_shape[axis]
):
return True
return False
class GroupConv1D(Conv):
def __init__(
self,
filters,
kernel_size,
strides=1,
padding="valid",
data_format="channels_last",
dilation_rate=1,
groups=1,
activation=None,
use_bias=True,
kernel_initializer="glorot_uniform",
bias_initializer="zeros",
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs
):
super().__init__(
rank=1,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
groups=groups,
activation=activations.get(activation),
use_bias=use_bias,
kernel_initializer=initializers.get(kernel_initializer),
bias_initializer=initializers.get(bias_initializer),
kernel_regularizer=regularizers.get(kernel_regularizer),
bias_regularizer=regularizers.get(bias_regularizer),
activity_regularizer=regularizers.get(activity_regularizer),
kernel_constraint=constraints.get(kernel_constraint),
bias_constraint=constraints.get(bias_constraint),
**kwargs
)
| true
| true
|
f71882478dfe74c0a4592f297f1ac49bf7457bb9
| 531
|
py
|
Python
|
tests/test_stuff.py
|
alexanderrichards/ProductionSystem
|
ea9b80f13d76be293c8e2a3387d4cb3abc56e314
|
[
"MIT"
] | null | null | null |
tests/test_stuff.py
|
alexanderrichards/ProductionSystem
|
ea9b80f13d76be293c8e2a3387d4cb3abc56e314
|
[
"MIT"
] | 43
|
2018-04-23T08:39:17.000Z
|
2019-11-26T12:17:14.000Z
|
tests/test_stuff.py
|
alexanderrichards/ProductionSystem
|
ea9b80f13d76be293c8e2a3387d4cb3abc56e314
|
[
"MIT"
] | 1
|
2019-02-05T04:17:07.000Z
|
2019-02-05T04:17:07.000Z
|
"""Test Stuff."""
from unittest import TestCase
import pkg_resources
from productionsystem.config import ConfigSystem
# def setup_module(module):
# """ setup any state specific to the execution of the given module."""
# config_instance = ConfigSystem.setup(None) # pylint: disable=no-member
# config_instance.entry_point_map = pkg_resources.get_entry_map('productionsystem')
# return config_instance
class TestStuff(TestCase):
"""Test case."""
def test_bob(self):
"""test bob."""
pass
| 26.55
| 87
| 0.708098
|
from unittest import TestCase
import pkg_resources
from productionsystem.config import ConfigSystem
se):
def test_bob(self):
pass
| true
| true
|
f718825de43905bfc5be6bacd294d07ee49fda2d
| 10,286
|
py
|
Python
|
src/python/zquantum/core/utils.py
|
FredericSauv/z-quantum-core
|
f285b292159fe272d7401ba05baac7bab28475d7
|
[
"Apache-2.0"
] | null | null | null |
src/python/zquantum/core/utils.py
|
FredericSauv/z-quantum-core
|
f285b292159fe272d7401ba05baac7bab28475d7
|
[
"Apache-2.0"
] | null | null | null |
src/python/zquantum/core/utils.py
|
FredericSauv/z-quantum-core
|
f285b292159fe272d7401ba05baac7bab28475d7
|
[
"Apache-2.0"
] | null | null | null |
"""General-purpose utilities."""
import numpy as np
from scipy.linalg import expm
import random
import math
import operator
import sys
import json
import openfermion
from openfermion import hermitian_conjugated
from openfermion.ops import SymbolicOperator
from networkx.readwrite import json_graph
import lea
import collections
import scipy
from typing import List
import importlib
SCHEMA_VERSION = 'zapata-v1'
RNDSEED = 12345
def convert_dict_to_array(dictionary: dict) -> np.ndarray:
"""Convert a dictionary to a numpy array.
Args:
dictionary (dict): the dict containing the data
Returns:
array (numpy.array): a numpy array
"""
array = np.array(dictionary['real'])
if dictionary.get('imag'):
array = array + 1j*np.array(dictionary['imag'])
return array
def convert_array_to_dict(array: np.ndarray) -> dict:
"""Convert a numpy array to a dictionary.
Args:
array (numpy.array): a numpy array
Returns:
dictionary (dict): the dict containing the data
"""
dictionary = {}
if np.iscomplexobj(array):
dictionary['real'] = array.real.tolist()
dictionary['imag'] = array.imag.tolist()
else:
dictionary['real'] = array.tolist()
return dictionary
def dec2bin(number: int, length: int) -> List[int]:
"""Converts a decimal number into a binary representation
of fixed number of bits.
Args:
number: (int) the input decimal number
length: (int) number of bits in the output string
Returns:
A list of binary numbers
"""
if pow(2,length) < number:
sys.exit('Insufficient number of bits for representing the number {}'.format(number))
bit_str = bin(number)
bit_str = bit_str[2:len(bit_str)] # chop off the first two chars
bit_string = [int(x) for x in list(bit_str)]
if len(bit_string) < length:
len_zeros = length - len(bit_string)
bit_string = [int(x) for x in list(np.zeros(len_zeros))] + bit_string
return bit_string
def bin2dec(x: List[int]) -> int:
"""Converts a binary vector to an integer, with the 0-th
element being the most significant digit.
Args:
x: (list) a binary vector
Returns:
An integer
"""
dec = 0
coeff = 1
for i in range(len(x)):
dec = dec + coeff * x[len(x)-1-i]
coeff = coeff * 2
return dec
"""
The functions PAULI_X, PAULI_Y, PAULI_Z and IDENTITY below are used for
generating the generators of the Pauli group, which include Pauli X, Y, Z
operators as well as identity operator
"""
pauli_x = np.array([[0.0,1.0],[1.0,0.0]])
pauli_y = np.array([[0.0,-1.0j],[1.0j,0.0]])
pauli_z = np.array([[1.0,0.0],[0.0,-1.0]])
identity = np.array([[1.0,0.0],[0.0,1.0]])
def is_identity(u, tol=1e-15):
"""Test if a matrix is identity.
Args:
u: np.ndarray
Matrix to be checked.
tol: float
Threshold below which two matrix elements are considered equal.
"""
dims = np.array(u).shape
if dims[0] != dims[1]:
raise Exception('Input matrix is not square.')
return np.allclose(u, np.eye(u.shape[0]), atol=tol)
def is_unitary(u, tol = 1e-15):
"""Test if a matrix is unitary.
Args:
u: array
Matrix to be checked.
tol: float
Threshold below which two matrix elements are considered equal.
"""
dims = np.array(u).shape
if dims[0] != dims[1]:
raise Exception('Input matrix is not square.')
test_matrix = np.dot(hermitian_conjugated(np.array(u)), u)
return is_identity(test_matrix, tol)
def compare_unitary(u1: np.ndarray,
u2: np.ndarray,
tol: float = 1e-15) -> bool:
"""Compares two unitary operators to see if they are equal to within a phase.
Args:
u1 (numpy.ndarray): First unitary operator.
u2 (numpy.ndarray): Second unitary operator.
tol (float): Threshold below which two matrix elements are considered equal.
Returns:
bool: True if the unitaries are equal to within the tolerance, ignoring
differences in global phase.
"""
if is_unitary(u1, tol) == False:
raise Exception('The first input matrix is not unitary.')
if is_unitary(u2, tol) == False:
raise Exception('The second input matrix is not unitary.')
test_matrix = np.dot(u1.conj().T, u2)
phase = test_matrix.item((0,0))**-1
return is_identity(phase*test_matrix, tol)
def sample_from_probability_distribution(probability_distribution: dict, n_samples: int) -> collections.Counter:
'''
Samples events from a discrete probability distribution
Args:
probabilty_distribution: The discrete probability distribution to be used
for sampling. This should be a dictionary
n_samples (int): The number of samples desired
Returns:
A dictionary of the outcomes sampled. The key values are the things be sampled
and values are how many times those things appeared in the sampling
'''
if isinstance(probability_distribution, dict):
prob_pmf = lea.pmf(probability_distribution)
sampled_dict = collections.Counter(prob_pmf.random(n_samples))
return sampled_dict
else:
raise RuntimeError("Probability distribution should be a dictionary with key value \
being the thing being sampled and the value being probability of getting \
sampled ")
def convert_bitstrings_to_tuples(bitstrings):
'''Given the measured bitstrings, convert each bitstring to tuple format
Args:
bitstrings (list of strings): the measured bitstrings
Returns:
A list of tuples
'''
# Convert from bitstrings to tuple format
measurements = []
for bitstring in bitstrings:
measurement = ()
for char in bitstring:
measurement = measurement + (int(char),)
measurements.append(measurement)
return measurements
def convert_tuples_to_bitstrings(tuples):
'''Given a set of measurement tuples, convert each to bitstring format
Args:
tuples (list of tuples): the measurement tuples
Returns:
A list of bitstrings
'''
# Convert from tuples to bitstrings
bitstrings = []
for tuple_item in tuples:
bitstring = ""
for bit in tuple_item:
bitstring = bitstring + str(bit)
bitstrings.append(bitstring)
return bitstrings
class ValueEstimate:
"""A class representing a numerical value and its precision corresponding
to an observable or an objective function
Args:
value (np.float): the numerical value
precision (np.float): its precision
Attributes:
value (np.float): the numerical value
precision (np.float): its precision
"""
def __init__(self, value, precision=None):
self.value = value
self.precision = precision
def to_dict(self):
"""Convert to a dictionary"""
data = {'schema' : SCHEMA_VERSION + '-value_estimate'}
if type(self.value).__module__ == np.__name__:
data['value'] = self.value.item()
else:
data['value'] = self.value
if type(self.precision).__module__ == np.__name__:
data['precision'] = self.precision.item()
else:
data['precision'] = self.precision
return data
@classmethod
def from_dict(cls, dictionary):
"""Create an ExpectationValues object from a dictionary."""
value = dictionary['value']
if 'precision' in dictionary:
precision = dictionary['precision']
return cls(value, precision)
else:
return cls(value)
def load_value_estimate(file):
"""Loads value estimate from a faile.
Args:
file (str or file-like object): the name of the file, or a file-like object.
Returns:
array (numpy.array): the array
"""
if isinstance(file, str):
with open(file, 'r') as f:
data = json.load(f)
else:
data = json.load(file)
return ValueEstimate.from_dict(data)
def save_value_estimate(value_estimate, filename):
"""Saves value estimate to a file.
Args:
value_estimate (core.utils.ValueEstimate): the value estimate
file (str or file-like object): the name of the file, or a file-like object
"""
dictionary = value_estimate.to_dict()
dictionary['schema'] = SCHEMA_VERSION + '-value_estimate'
with open(filename, 'w') as f:
f.write(json.dumps(dictionary, indent=2))
def load_list(file):
"""Load an array from a file.
Args:
file (str or file-like object): the name of the file, or a file-like object.
Returns:
array (list): the list
"""
if isinstance(file, str):
with open(file, 'r') as f:
data = json.load(f)
else:
data = json.load(file)
return data['list']
def save_list(array, filename):
"""Save expectation values to a file.
Args:
array (list): the list to be saved
file (str or file-like object): the name of the file, or a file-like object
"""
dictionary = {}
dictionary['schema'] = SCHEMA_VERSION + '-list'
dictionary['list'] = array
with open(filename, 'w') as f:
f.write(json.dumps(dictionary, indent=2))
def create_object(specs, **kwargs):
"""
Creates an object based on given specs.
Specs include information about module and function necessary to create the object,
as well as any additional input parameters for it.
Args:
specs (dict): dictionary containing the following keys:
module_name: specifies from which module an object comes.
function_name: specifies the name of the function used to create object.
Returns:
object: object of any type
"""
module_name = specs.pop("module_name")
module = importlib.import_module(module_name)
creator_name = specs.pop("function_name")
creator = getattr(module, creator_name)
created_object = creator(**specs, **kwargs)
return created_object
| 28.258242
| 112
| 0.637857
|
import numpy as np
from scipy.linalg import expm
import random
import math
import operator
import sys
import json
import openfermion
from openfermion import hermitian_conjugated
from openfermion.ops import SymbolicOperator
from networkx.readwrite import json_graph
import lea
import collections
import scipy
from typing import List
import importlib
SCHEMA_VERSION = 'zapata-v1'
RNDSEED = 12345
def convert_dict_to_array(dictionary: dict) -> np.ndarray:
array = np.array(dictionary['real'])
if dictionary.get('imag'):
array = array + 1j*np.array(dictionary['imag'])
return array
def convert_array_to_dict(array: np.ndarray) -> dict:
dictionary = {}
if np.iscomplexobj(array):
dictionary['real'] = array.real.tolist()
dictionary['imag'] = array.imag.tolist()
else:
dictionary['real'] = array.tolist()
return dictionary
def dec2bin(number: int, length: int) -> List[int]:
if pow(2,length) < number:
sys.exit('Insufficient number of bits for representing the number {}'.format(number))
bit_str = bin(number)
bit_str = bit_str[2:len(bit_str)]
bit_string = [int(x) for x in list(bit_str)]
if len(bit_string) < length:
len_zeros = length - len(bit_string)
bit_string = [int(x) for x in list(np.zeros(len_zeros))] + bit_string
return bit_string
def bin2dec(x: List[int]) -> int:
dec = 0
coeff = 1
for i in range(len(x)):
dec = dec + coeff * x[len(x)-1-i]
coeff = coeff * 2
return dec
pauli_x = np.array([[0.0,1.0],[1.0,0.0]])
pauli_y = np.array([[0.0,-1.0j],[1.0j,0.0]])
pauli_z = np.array([[1.0,0.0],[0.0,-1.0]])
identity = np.array([[1.0,0.0],[0.0,1.0]])
def is_identity(u, tol=1e-15):
dims = np.array(u).shape
if dims[0] != dims[1]:
raise Exception('Input matrix is not square.')
return np.allclose(u, np.eye(u.shape[0]), atol=tol)
def is_unitary(u, tol = 1e-15):
dims = np.array(u).shape
if dims[0] != dims[1]:
raise Exception('Input matrix is not square.')
test_matrix = np.dot(hermitian_conjugated(np.array(u)), u)
return is_identity(test_matrix, tol)
def compare_unitary(u1: np.ndarray,
u2: np.ndarray,
tol: float = 1e-15) -> bool:
if is_unitary(u1, tol) == False:
raise Exception('The first input matrix is not unitary.')
if is_unitary(u2, tol) == False:
raise Exception('The second input matrix is not unitary.')
test_matrix = np.dot(u1.conj().T, u2)
phase = test_matrix.item((0,0))**-1
return is_identity(phase*test_matrix, tol)
def sample_from_probability_distribution(probability_distribution: dict, n_samples: int) -> collections.Counter:
if isinstance(probability_distribution, dict):
prob_pmf = lea.pmf(probability_distribution)
sampled_dict = collections.Counter(prob_pmf.random(n_samples))
return sampled_dict
else:
raise RuntimeError("Probability distribution should be a dictionary with key value \
being the thing being sampled and the value being probability of getting \
sampled ")
def convert_bitstrings_to_tuples(bitstrings):
measurements = []
for bitstring in bitstrings:
measurement = ()
for char in bitstring:
measurement = measurement + (int(char),)
measurements.append(measurement)
return measurements
def convert_tuples_to_bitstrings(tuples):
bitstrings = []
for tuple_item in tuples:
bitstring = ""
for bit in tuple_item:
bitstring = bitstring + str(bit)
bitstrings.append(bitstring)
return bitstrings
class ValueEstimate:
def __init__(self, value, precision=None):
self.value = value
self.precision = precision
def to_dict(self):
data = {'schema' : SCHEMA_VERSION + '-value_estimate'}
if type(self.value).__module__ == np.__name__:
data['value'] = self.value.item()
else:
data['value'] = self.value
if type(self.precision).__module__ == np.__name__:
data['precision'] = self.precision.item()
else:
data['precision'] = self.precision
return data
@classmethod
def from_dict(cls, dictionary):
value = dictionary['value']
if 'precision' in dictionary:
precision = dictionary['precision']
return cls(value, precision)
else:
return cls(value)
def load_value_estimate(file):
if isinstance(file, str):
with open(file, 'r') as f:
data = json.load(f)
else:
data = json.load(file)
return ValueEstimate.from_dict(data)
def save_value_estimate(value_estimate, filename):
dictionary = value_estimate.to_dict()
dictionary['schema'] = SCHEMA_VERSION + '-value_estimate'
with open(filename, 'w') as f:
f.write(json.dumps(dictionary, indent=2))
def load_list(file):
if isinstance(file, str):
with open(file, 'r') as f:
data = json.load(f)
else:
data = json.load(file)
return data['list']
def save_list(array, filename):
dictionary = {}
dictionary['schema'] = SCHEMA_VERSION + '-list'
dictionary['list'] = array
with open(filename, 'w') as f:
f.write(json.dumps(dictionary, indent=2))
def create_object(specs, **kwargs):
module_name = specs.pop("module_name")
module = importlib.import_module(module_name)
creator_name = specs.pop("function_name")
creator = getattr(module, creator_name)
created_object = creator(**specs, **kwargs)
return created_object
| true
| true
|
f71882825d3f22da41da2d51951295858305f25f
| 128
|
py
|
Python
|
cactus/types/coin_solution.py
|
grayfallstown/cactus-blockchain
|
680d68d0bb7694bd4b99e4906b356e014bca7734
|
[
"Apache-2.0"
] | 11,902
|
2019-12-05T00:14:29.000Z
|
2022-03-31T23:25:37.000Z
|
chia/types/coin_solution.py
|
jcteng/ext9-blockchain
|
46506bc5778e14cbc373de39438b0c6f794a49c5
|
[
"Apache-2.0"
] | 5,246
|
2019-12-05T04:00:03.000Z
|
2022-03-31T21:33:30.000Z
|
chia/types/coin_solution.py
|
jcteng/ext9-blockchain
|
46506bc5778e14cbc373de39438b0c6f794a49c5
|
[
"Apache-2.0"
] | 2,149
|
2019-12-05T11:12:53.000Z
|
2022-03-31T06:08:34.000Z
|
import warnings
from .coin_spend import CoinSpend as CoinSolution # noqa
warnings.warn("`CoinSolution` is now `CoinSpend`")
| 18.285714
| 57
| 0.773438
|
import warnings
from .coin_spend import CoinSpend as CoinSolution
warnings.warn("`CoinSolution` is now `CoinSpend`")
| true
| true
|
f71882b27108d3771cc3ddae3f362ee0e8b76a4b
| 6,457
|
py
|
Python
|
examples/ale/train_nsq_ale.py
|
yuishihara/chainerrl
|
74901712a8ed8207b9d526d3f45b04bf22996b8d
|
[
"MIT"
] | 18
|
2018-08-07T07:27:41.000Z
|
2018-08-20T01:51:21.000Z
|
examples/ale/train_nsq_ale.py
|
yuishihara/chainerrl
|
74901712a8ed8207b9d526d3f45b04bf22996b8d
|
[
"MIT"
] | null | null | null |
examples/ale/train_nsq_ale.py
|
yuishihara/chainerrl
|
74901712a8ed8207b9d526d3f45b04bf22996b8d
|
[
"MIT"
] | 2
|
2018-08-16T06:47:26.000Z
|
2018-08-20T01:51:22.000Z
|
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import argparse
import os
import random
# This prevents numpy from using multiple threads
os.environ['OMP_NUM_THREADS'] = '1' # NOQA
import gym
gym.undo_logger_setup() # NOQA
from chainer import links as L
import numpy as np
from chainerrl.action_value import DiscreteActionValue
from chainerrl.agents import nsq
from chainerrl import experiments
from chainerrl import explorers
from chainerrl import links
from chainerrl import misc
from chainerrl.optimizers import rmsprop_async
from chainerrl import spaces
import atari_wrappers
def main():
parser = argparse.ArgumentParser()
parser.add_argument('processes', type=int)
parser.add_argument('--env', type=str, default='BreakoutNoFrameskip-v4')
parser.add_argument('--seed', type=int, default=0,
help='Random seed [0, 2 ** 31)')
parser.add_argument('--lr', type=float, default=7e-4)
parser.add_argument('--steps', type=int, default=8 * 10 ** 7)
parser.add_argument('--max-episode-len', type=int,
default=5 * 60 * 60 // 4, # 5 minutes with 60/4 fps
help='Maximum number of steps for each episode.')
parser.add_argument('--final-exploration-frames',
type=int, default=4 * 10 ** 6)
parser.add_argument('--outdir', type=str, default='results',
help='Directory path to save output files.'
' If it does not exist, it will be created.')
parser.add_argument('--profile', action='store_true')
parser.add_argument('--eval-interval', type=int, default=10 ** 6)
parser.add_argument('--eval-n-runs', type=int, default=10)
parser.add_argument('--demo', action='store_true', default=False)
parser.add_argument('--load', type=str, default=None)
parser.add_argument('--logging-level', type=int, default=20,
help='Logging level. 10:DEBUG, 20:INFO etc.')
parser.add_argument('--render', action='store_true', default=False,
help='Render env states in a GUI window.')
parser.add_argument('--monitor', action='store_true', default=False,
help='Monitor env. Videos and additional information'
' are saved as output files.')
args = parser.parse_args()
import logging
logging.basicConfig(level=args.logging_level)
# Set a random seed used in ChainerRL.
# If you use more than one processes, the results will be no longer
# deterministic even with the same random seed.
misc.set_random_seed(args.seed)
# Set different random seeds for different subprocesses.
# If seed=0 and processes=4, subprocess seeds are [0, 1, 2, 3].
# If seed=1 and processes=4, subprocess seeds are [4, 5, 6, 7].
process_seeds = np.arange(args.processes) + args.seed * args.processes
assert process_seeds.max() < 2 ** 31
args.outdir = experiments.prepare_output_dir(args, args.outdir)
print('Output files are saved in {}'.format(args.outdir))
def make_env(process_idx, test):
# Use different random seeds for train and test envs
process_seed = process_seeds[process_idx]
env_seed = 2 ** 31 - 1 - process_seed if test else process_seed
env = atari_wrappers.wrap_deepmind(
atari_wrappers.make_atari(args.env),
episode_life=not test,
clip_rewards=not test)
env.seed(int(env_seed))
if args.monitor:
env = gym.wrappers.Monitor(
env, args.outdir,
mode='evaluation' if test else 'training')
if args.render:
misc.env_modifiers.make_rendered(env)
return env
sample_env = make_env(0, test=False)
action_space = sample_env.action_space
assert isinstance(action_space, spaces.Discrete)
# Define a model and its optimizer
q_func = links.Sequence(
links.NIPSDQNHead(),
L.Linear(256, action_space.n),
DiscreteActionValue)
opt = rmsprop_async.RMSpropAsync(lr=args.lr, eps=1e-1, alpha=0.99)
opt.setup(q_func)
def phi(x):
# Feature extractor
return np.asarray(x, dtype=np.float32) / 255
# Make process-specific agents to diversify exploration
def make_agent(process_idx):
# Random epsilon assignment described in the original paper
rand = random.random()
if rand < 0.4:
epsilon_target = 0.1
elif rand < 0.7:
epsilon_target = 0.01
else:
epsilon_target = 0.5
explorer = explorers.LinearDecayEpsilonGreedy(
1, epsilon_target, args.final_exploration_frames,
action_space.sample)
# Suppress the explorer logger
explorer.logger.setLevel(logging.INFO)
return nsq.NSQ(q_func, opt, t_max=5, gamma=0.99,
i_target=40000,
explorer=explorer, phi=phi)
if args.demo:
env = make_env(0, True)
agent = make_agent(0)
eval_stats = experiments.eval_performance(
env=env,
agent=agent,
n_runs=args.eval_n_runs)
print('n_runs: {} mean: {} median: {} stdev {}'.format(
args.eval_n_runs, eval_stats['mean'], eval_stats['median'],
eval_stats['stdev']))
else:
explorer = explorers.ConstantEpsilonGreedy(0.05, action_space.sample)
# Linearly decay the learning rate to zero
def lr_setter(env, agent, value):
agent.optimizer.lr = value
lr_decay_hook = experiments.LinearInterpolationHook(
args.steps, args.lr, 0, lr_setter)
experiments.train_agent_async(
outdir=args.outdir,
processes=args.processes,
make_env=make_env,
make_agent=make_agent,
profile=args.profile,
steps=args.steps,
eval_n_runs=args.eval_n_runs,
eval_interval=args.eval_interval,
eval_explorer=explorer,
max_episode_len=args.max_episode_len,
global_step_hooks=[lr_decay_hook],
save_best_so_far_agent=False,
)
if __name__ == '__main__':
main()
| 37.760234
| 77
| 0.641474
|
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
from builtins import *
from future import standard_library
standard_library.install_aliases()
import argparse
import os
import random
os.environ['OMP_NUM_THREADS'] = '1'
import gym
gym.undo_logger_setup()
from chainer import links as L
import numpy as np
from chainerrl.action_value import DiscreteActionValue
from chainerrl.agents import nsq
from chainerrl import experiments
from chainerrl import explorers
from chainerrl import links
from chainerrl import misc
from chainerrl.optimizers import rmsprop_async
from chainerrl import spaces
import atari_wrappers
def main():
parser = argparse.ArgumentParser()
parser.add_argument('processes', type=int)
parser.add_argument('--env', type=str, default='BreakoutNoFrameskip-v4')
parser.add_argument('--seed', type=int, default=0,
help='Random seed [0, 2 ** 31)')
parser.add_argument('--lr', type=float, default=7e-4)
parser.add_argument('--steps', type=int, default=8 * 10 ** 7)
parser.add_argument('--max-episode-len', type=int,
default=5 * 60 * 60 // 4,
help='Maximum number of steps for each episode.')
parser.add_argument('--final-exploration-frames',
type=int, default=4 * 10 ** 6)
parser.add_argument('--outdir', type=str, default='results',
help='Directory path to save output files.'
' If it does not exist, it will be created.')
parser.add_argument('--profile', action='store_true')
parser.add_argument('--eval-interval', type=int, default=10 ** 6)
parser.add_argument('--eval-n-runs', type=int, default=10)
parser.add_argument('--demo', action='store_true', default=False)
parser.add_argument('--load', type=str, default=None)
parser.add_argument('--logging-level', type=int, default=20,
help='Logging level. 10:DEBUG, 20:INFO etc.')
parser.add_argument('--render', action='store_true', default=False,
help='Render env states in a GUI window.')
parser.add_argument('--monitor', action='store_true', default=False,
help='Monitor env. Videos and additional information'
' are saved as output files.')
args = parser.parse_args()
import logging
logging.basicConfig(level=args.logging_level)
misc.set_random_seed(args.seed)
process_seeds = np.arange(args.processes) + args.seed * args.processes
assert process_seeds.max() < 2 ** 31
args.outdir = experiments.prepare_output_dir(args, args.outdir)
print('Output files are saved in {}'.format(args.outdir))
def make_env(process_idx, test):
process_seed = process_seeds[process_idx]
env_seed = 2 ** 31 - 1 - process_seed if test else process_seed
env = atari_wrappers.wrap_deepmind(
atari_wrappers.make_atari(args.env),
episode_life=not test,
clip_rewards=not test)
env.seed(int(env_seed))
if args.monitor:
env = gym.wrappers.Monitor(
env, args.outdir,
mode='evaluation' if test else 'training')
if args.render:
misc.env_modifiers.make_rendered(env)
return env
sample_env = make_env(0, test=False)
action_space = sample_env.action_space
assert isinstance(action_space, spaces.Discrete)
q_func = links.Sequence(
links.NIPSDQNHead(),
L.Linear(256, action_space.n),
DiscreteActionValue)
opt = rmsprop_async.RMSpropAsync(lr=args.lr, eps=1e-1, alpha=0.99)
opt.setup(q_func)
def phi(x):
return np.asarray(x, dtype=np.float32) / 255
def make_agent(process_idx):
rand = random.random()
if rand < 0.4:
epsilon_target = 0.1
elif rand < 0.7:
epsilon_target = 0.01
else:
epsilon_target = 0.5
explorer = explorers.LinearDecayEpsilonGreedy(
1, epsilon_target, args.final_exploration_frames,
action_space.sample)
explorer.logger.setLevel(logging.INFO)
return nsq.NSQ(q_func, opt, t_max=5, gamma=0.99,
i_target=40000,
explorer=explorer, phi=phi)
if args.demo:
env = make_env(0, True)
agent = make_agent(0)
eval_stats = experiments.eval_performance(
env=env,
agent=agent,
n_runs=args.eval_n_runs)
print('n_runs: {} mean: {} median: {} stdev {}'.format(
args.eval_n_runs, eval_stats['mean'], eval_stats['median'],
eval_stats['stdev']))
else:
explorer = explorers.ConstantEpsilonGreedy(0.05, action_space.sample)
def lr_setter(env, agent, value):
agent.optimizer.lr = value
lr_decay_hook = experiments.LinearInterpolationHook(
args.steps, args.lr, 0, lr_setter)
experiments.train_agent_async(
outdir=args.outdir,
processes=args.processes,
make_env=make_env,
make_agent=make_agent,
profile=args.profile,
steps=args.steps,
eval_n_runs=args.eval_n_runs,
eval_interval=args.eval_interval,
eval_explorer=explorer,
max_episode_len=args.max_episode_len,
global_step_hooks=[lr_decay_hook],
save_best_so_far_agent=False,
)
if __name__ == '__main__':
main()
| true
| true
|
f71882cab39cf5d57a226bf9a24b1aec3387c49a
| 1,466
|
py
|
Python
|
aliyun-python-sdk-ons/aliyunsdkons/request/v20190214/OnsTopicListRequest.py
|
liumihust/aliyun-openapi-python-sdk
|
c7b5dd4befae4b9c59181654289f9272531207ef
|
[
"Apache-2.0"
] | null | null | null |
aliyun-python-sdk-ons/aliyunsdkons/request/v20190214/OnsTopicListRequest.py
|
liumihust/aliyun-openapi-python-sdk
|
c7b5dd4befae4b9c59181654289f9272531207ef
|
[
"Apache-2.0"
] | null | null | null |
aliyun-python-sdk-ons/aliyunsdkons/request/v20190214/OnsTopicListRequest.py
|
liumihust/aliyun-openapi-python-sdk
|
c7b5dd4befae4b9c59181654289f9272531207ef
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class OnsTopicListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ons', '2019-02-14', 'OnsTopicList','ons')
def get_PreventCache(self):
return self.get_query_params().get('PreventCache')
def set_PreventCache(self,PreventCache):
self.add_query_param('PreventCache',PreventCache)
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId)
def get_Topic(self):
return self.get_query_params().get('Topic')
def set_Topic(self,Topic):
self.add_query_param('Topic',Topic)
| 34.904762
| 71
| 0.761255
|
from aliyunsdkcore.request import RpcRequest
class OnsTopicListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ons', '2019-02-14', 'OnsTopicList','ons')
def get_PreventCache(self):
return self.get_query_params().get('PreventCache')
def set_PreventCache(self,PreventCache):
self.add_query_param('PreventCache',PreventCache)
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId)
def get_Topic(self):
return self.get_query_params().get('Topic')
def set_Topic(self,Topic):
self.add_query_param('Topic',Topic)
| true
| true
|
f7188338fbe6feb8c2970ca81bf60c30b86db76c
| 1,709
|
py
|
Python
|
landmark.py
|
gavincangan/alvin
|
4e1945a3f5bb061842f0e35633f254863f8923c8
|
[
"MIT"
] | null | null | null |
landmark.py
|
gavincangan/alvin
|
4e1945a3f5bb061842f0e35633f254863f8923c8
|
[
"MIT"
] | null | null | null |
landmark.py
|
gavincangan/alvin
|
4e1945a3f5bb061842f0e35633f254863f8923c8
|
[
"MIT"
] | null | null | null |
import sys
from pymunk import Body, Circle, ShapeFilter
from configsingleton import ConfigSingleton
from common import *
from common.drawing import draw_circle
class Landmark(object):
def __init__(self, mask, radius):
self.body = Body(0, 0, Body.STATIC)
self.body.position = 0, 0
self.body.angle = 0
self.body.velocity = 0, 0
self.body.angular_velocity = 0
self.shape = Circle(self.body, radius)
self.mask = mask
self.shape.filter = ShapeFilter(categories = mask)
if mask == ARC_LANDMARK_MASK:
self.shape.color = 0, 255, 0
elif mask == POLE_LANDMARK_MASK:
self.shape.color = 0, 0, 255
elif mask == BLAST_LANDMARK_MASK:
self.shape.color = 255, 0, 0
else:
sys.exit("Unknown landmark mask: " + str(mask))
# The following is just to set the appropriate params to visualize below
config = ConfigSingleton.get_instance()
self.vis_range_max = \
config.getfloat("RangeScan:landmarks", "range_max") \
+ radius
self.vis_inside_radius = \
config.getfloat("LandmarkCircleController", "inside_radius") \
+ radius
self.vis_outside_radius = \
config.getfloat("LandmarkCircleController", "outside_radius") \
+ radius
def visualize_params(self):
centre = (self.body.position.x, self.body.position.y)
draw_circle(centre, self.vis_range_max, (255, 255, 255))
if self.mask == ARC_LANDMARK_MASK:
draw_circle(centre, self.vis_inside_radius, (0, 255, 0))
draw_circle(centre, self.vis_outside_radius, (255, 0, 0))
| 37.152174
| 80
| 0.622586
|
import sys
from pymunk import Body, Circle, ShapeFilter
from configsingleton import ConfigSingleton
from common import *
from common.drawing import draw_circle
class Landmark(object):
def __init__(self, mask, radius):
self.body = Body(0, 0, Body.STATIC)
self.body.position = 0, 0
self.body.angle = 0
self.body.velocity = 0, 0
self.body.angular_velocity = 0
self.shape = Circle(self.body, radius)
self.mask = mask
self.shape.filter = ShapeFilter(categories = mask)
if mask == ARC_LANDMARK_MASK:
self.shape.color = 0, 255, 0
elif mask == POLE_LANDMARK_MASK:
self.shape.color = 0, 0, 255
elif mask == BLAST_LANDMARK_MASK:
self.shape.color = 255, 0, 0
else:
sys.exit("Unknown landmark mask: " + str(mask))
config = ConfigSingleton.get_instance()
self.vis_range_max = \
config.getfloat("RangeScan:landmarks", "range_max") \
+ radius
self.vis_inside_radius = \
config.getfloat("LandmarkCircleController", "inside_radius") \
+ radius
self.vis_outside_radius = \
config.getfloat("LandmarkCircleController", "outside_radius") \
+ radius
def visualize_params(self):
centre = (self.body.position.x, self.body.position.y)
draw_circle(centre, self.vis_range_max, (255, 255, 255))
if self.mask == ARC_LANDMARK_MASK:
draw_circle(centre, self.vis_inside_radius, (0, 255, 0))
draw_circle(centre, self.vis_outside_radius, (255, 0, 0))
| true
| true
|
f71883436f0b25e5b91c4b47d39b55ee34c3a2ba
| 353
|
py
|
Python
|
Chapter 02/scape_v1.py
|
mujib2953/Python_Practice
|
39da23190196f050ea5834358907db723053da27
|
[
"MIT"
] | null | null | null |
Chapter 02/scape_v1.py
|
mujib2953/Python_Practice
|
39da23190196f050ea5834358907db723053da27
|
[
"MIT"
] | null | null | null |
Chapter 02/scape_v1.py
|
mujib2953/Python_Practice
|
39da23190196f050ea5834358907db723053da27
|
[
"MIT"
] | null | null | null |
import urllib.request
# http://beans-r-us.appspot.com/prices.html
# http://www.beans-r-us.biz/prices.html
# http://www.moneycontrol.com/india/stockpricequote/computers-software/tataconsultancyservices/TCS
baseUrl = 'http://beans-r-us.appspot.com/prices.html'
page = urllib.request.urlopen( baseUrl )
text = page.read().decode( 'utf8' )
print( text )
| 29.416667
| 98
| 0.750708
|
import urllib.request
baseUrl = 'http://beans-r-us.appspot.com/prices.html'
page = urllib.request.urlopen( baseUrl )
text = page.read().decode( 'utf8' )
print( text )
| true
| true
|
f71883577f39ef70d1dc2c52c00b5ff024d4497c
| 1,400
|
py
|
Python
|
likes/urls.py
|
CMPUT404-stev-sand-pant-ashw-mehr/CMPUT404-stev-sand-pant-ashw-mehr-repo
|
0f96d938e9e3ec51103f2b20cb9673bd0b145343
|
[
"MIT"
] | null | null | null |
likes/urls.py
|
CMPUT404-stev-sand-pant-ashw-mehr/CMPUT404-stev-sand-pant-ashw-mehr-repo
|
0f96d938e9e3ec51103f2b20cb9673bd0b145343
|
[
"MIT"
] | 50
|
2021-10-08T00:01:43.000Z
|
2021-12-06T06:34:29.000Z
|
likes/urls.py
|
CMPUT404-stev-sand-pant-ashw-mehr/CMPUT404-stev-sand-pant-ashw-mehr-repo
|
0f96d938e9e3ec51103f2b20cb9673bd0b145343
|
[
"MIT"
] | null | null | null |
from django.urls import re_path
from .api import PostLikeViewSet, CommentLikeViewSet, AuthorLikeViewSet
urlpatterns = [
re_path(r'^author/(?P<author_id>[a-z0-9-\.-]+)/post/(?P<post_id>[a-z0-9-:\.-]+)/likes/?$', PostLikeViewSet.as_view({
"get": "get_post_likes",
"post": "add_post_like",
})),
re_path(r'^author/(?P<author_id>[a-z0-9-/:\.-]+)/post/(?P<post_id>[a-z0-9-:\.-]+)/comments/(?P<comment_id>[a-z0-9-:\.-]+)/likes/?$', CommentLikeViewSet.as_view({
"get": "get_comment_likes",
"post": "add_comment_like",
})),
re_path(r'^author/(?P<author_id>[a-z0-9-\.-]+)/posts/(?P<post_id>[a-z0-9-:\.-]+)/likes/?$', PostLikeViewSet.as_view({
"get": "get_post_likes",
"post": "add_post_like",
})),
re_path(r'^author/(?P<author_id>[a-z0-9-/:\.-]+)/posts/(?P<post_id>[a-z0-9-:\.-]+)/comments/(?P<comment_id>[a-z0-9-:\.-]+)/likes/?$', CommentLikeViewSet.as_view({
"get": "get_comment_likes",
"post": "add_comment_like",
})),
re_path(r'^author/(?P<author_id>[a-z0-9-\.-]+)/likes/?$', AuthorLikeViewSet.as_view({
"get": "get_likes",
})),
re_path(r'^author/(?P<author_id>(http://|https://)[a-z0-9\.-:]+(/author/)[a-z0-9\.-]+)/likes/?$', AuthorLikeViewSet.as_view({
"get": "get_likes",
}))
]
| 46.666667
| 166
| 0.531429
|
from django.urls import re_path
from .api import PostLikeViewSet, CommentLikeViewSet, AuthorLikeViewSet
urlpatterns = [
re_path(r'^author/(?P<author_id>[a-z0-9-\.-]+)/post/(?P<post_id>[a-z0-9-:\.-]+)/likes/?$', PostLikeViewSet.as_view({
"get": "get_post_likes",
"post": "add_post_like",
})),
re_path(r'^author/(?P<author_id>[a-z0-9-/:\.-]+)/post/(?P<post_id>[a-z0-9-:\.-]+)/comments/(?P<comment_id>[a-z0-9-:\.-]+)/likes/?$', CommentLikeViewSet.as_view({
"get": "get_comment_likes",
"post": "add_comment_like",
})),
re_path(r'^author/(?P<author_id>[a-z0-9-\.-]+)/posts/(?P<post_id>[a-z0-9-:\.-]+)/likes/?$', PostLikeViewSet.as_view({
"get": "get_post_likes",
"post": "add_post_like",
})),
re_path(r'^author/(?P<author_id>[a-z0-9-/:\.-]+)/posts/(?P<post_id>[a-z0-9-:\.-]+)/comments/(?P<comment_id>[a-z0-9-:\.-]+)/likes/?$', CommentLikeViewSet.as_view({
"get": "get_comment_likes",
"post": "add_comment_like",
})),
re_path(r'^author/(?P<author_id>[a-z0-9-\.-]+)/likes/?$', AuthorLikeViewSet.as_view({
"get": "get_likes",
})),
re_path(r'^author/(?P<author_id>(http://|https://)[a-z0-9\.-:]+(/author/)[a-z0-9\.-]+)/likes/?$', AuthorLikeViewSet.as_view({
"get": "get_likes",
}))
]
| true
| true
|
f7188362b6c3cf336bbf4135433d127a1a5e5491
| 469
|
py
|
Python
|
search/selectionsort.py
|
jsz1/algorithms-and-data-structures
|
fbf71f290d55c4f3f7b2123c4bd6df6396e3ede4
|
[
"MIT"
] | null | null | null |
search/selectionsort.py
|
jsz1/algorithms-and-data-structures
|
fbf71f290d55c4f3f7b2123c4bd6df6396e3ede4
|
[
"MIT"
] | null | null | null |
search/selectionsort.py
|
jsz1/algorithms-and-data-structures
|
fbf71f290d55c4f3f7b2123c4bd6df6396e3ede4
|
[
"MIT"
] | null | null | null |
def selection_sort(alist):
for fill_slot in range(len(alist) - 1,0,-1):
position_of_max = 0
for location in range(1, fill_slot + 1):
if alist[location] > alist[position_of_max]:
position_of_max = location
temp = alist[fill_slot]
alist[fill_slot] = alist[position_of_max]
alist[position_of_max] = temp
alist = [54,23,65,87,43,32,5,7,3423,44,6,23,35,6,68,76,53,3]
selection_sort(alist)
print alist
| 33.5
| 60
| 0.635394
|
def selection_sort(alist):
for fill_slot in range(len(alist) - 1,0,-1):
position_of_max = 0
for location in range(1, fill_slot + 1):
if alist[location] > alist[position_of_max]:
position_of_max = location
temp = alist[fill_slot]
alist[fill_slot] = alist[position_of_max]
alist[position_of_max] = temp
alist = [54,23,65,87,43,32,5,7,3423,44,6,23,35,6,68,76,53,3]
selection_sort(alist)
print alist
| false
| true
|
f718836b8c0b46908cdf57f0144d22ccc800514c
| 363,980
|
py
|
Python
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_interface_ext.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_interface_ext.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_6/ver_6_0_1/yang/brocade_interface_ext.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_interface_ext(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def get_vlan_brief_input_request_type_get_request_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
vlan_id = ET.SubElement(get_request, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_input_request_type_get_next_request_last_rcvd_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_vlan_id = ET.SubElement(get_next_request, "last-rcvd-vlan-id")
last_rcvd_vlan_id.text = kwargs.pop('last_rcvd_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_configured_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
configured_vlans_count = ET.SubElement(output, "configured-vlans-count")
configured_vlans_count.text = kwargs.pop('configured_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_provisioned_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
provisioned_vlans_count = ET.SubElement(output, "provisioned-vlans-count")
provisioned_vlans_count.text = kwargs.pop('provisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_unprovisioned_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
unprovisioned_vlans_count = ET.SubElement(output, "unprovisioned-vlans-count")
unprovisioned_vlans_count.text = kwargs.pop('unprovisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id = ET.SubElement(vlan, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_type = ET.SubElement(vlan, "vlan-type")
vlan_type.text = kwargs.pop('vlan_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_name = ET.SubElement(vlan, "vlan-name")
vlan_name.text = kwargs.pop('vlan_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_state = ET.SubElement(vlan, "vlan-state")
vlan_state.text = kwargs.pop('vlan_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_tag(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
tag = ET.SubElement(interface, "tag")
tag.text = kwargs.pop('tag')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_value_key = ET.SubElement(classification, "classification-value")
classification_value_key.text = kwargs.pop('classification_value')
classification_type = ET.SubElement(classification, "classification-type")
classification_type.text = kwargs.pop('classification_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_type_key = ET.SubElement(classification, "classification-type")
classification_type_key.text = kwargs.pop('classification_type')
classification_value = ET.SubElement(classification, "classification-value")
classification_value.text = kwargs.pop('classification_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_last_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
last_vlan_id = ET.SubElement(output, "last-vlan-id")
last_vlan_id.text = kwargs.pop('last_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(switchport, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(switchport, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mode = ET.SubElement(switchport, "mode")
mode.text = kwargs.pop('mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_fcoe_port_enabled(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
fcoe_port_enabled = ET.SubElement(switchport, "fcoe-port-enabled")
fcoe_port_enabled.text = kwargs.pop('fcoe_port_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_ingress_filter_enabled(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ingress_filter_enabled = ET.SubElement(switchport, "ingress-filter-enabled")
ingress_filter_enabled.text = kwargs.pop('ingress_filter_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_acceptable_frame_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
acceptable_frame_type = ET.SubElement(switchport, "acceptable-frame-type")
acceptable_frame_type.text = kwargs.pop('acceptable_frame_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_default_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
default_vlan = ET.SubElement(switchport, "default-vlan")
default_vlan.text = kwargs.pop('default_vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
rbridge_id = ET.SubElement(get_request, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4 = ET.SubElement(ip_address, "ipv4")
ipv4.text = kwargs.pop('ipv4')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ipv4_type = ET.SubElement(ip_address, "ipv4-type")
ipv4_type.text = kwargs.pop('ipv4_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_broadcast(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
broadcast = ET.SubElement(ip_address, "broadcast")
broadcast.text = kwargs.pop('broadcast')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ip_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ip_mtu = ET.SubElement(ip_address, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_line_protocol_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_proxy_arp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
proxy_arp = ET.SubElement(interface, "proxy-arp")
proxy_arp.text = kwargs.pop('proxy_arp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_vrf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
vrf = ET.SubElement(interface, "vrf")
vrf.text = kwargs.pop('vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_type = ET.SubElement(last_rcvd_interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_name = ET.SubElement(last_rcvd_interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifindex(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifindex = ET.SubElement(interface, "ifindex")
ifindex.text = kwargs.pop('ifindex')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mtu = ET.SubElement(interface, "mtu")
mtu.text = kwargs.pop('mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ip_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_mtu = ET.SubElement(interface, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state_info = ET.SubElement(interface, "line-protocol-state-info")
line_protocol_state_info.text = kwargs.pop('line_protocol_state_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_exception_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_exception_info = ET.SubElement(interface, "line-protocol-exception-info")
line_protocol_exception_info.text = kwargs.pop('line_protocol_exception_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_hardware_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
hardware_type = ET.SubElement(interface, "hardware-type")
hardware_type.text = kwargs.pop('hardware_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_logical_hardware_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
logical_hardware_address = ET.SubElement(interface, "logical-hardware-address")
logical_hardware_address.text = kwargs.pop('logical_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_current_hardware_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
current_hardware_address = ET.SubElement(interface, "current-hardware-address")
current_hardware_address.text = kwargs.pop('current_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_media_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
media_type = ET.SubElement(interface, "media-type")
media_type.text = kwargs.pop('media_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
wavelength = ET.SubElement(interface, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_description(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_description = ET.SubElement(interface, "if-description")
if_description.text = kwargs.pop('if_description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_actual_line_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
actual_line_speed = ET.SubElement(interface, "actual-line-speed")
actual_line_speed.text = kwargs.pop('actual_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_configured_line_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
configured_line_speed = ET.SubElement(interface, "configured-line-speed")
configured_line_speed.text = kwargs.pop('configured_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_duplex_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_duplex_state = ET.SubElement(interface, "line-duplex-state")
line_duplex_state.text = kwargs.pop('line_duplex_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_flow_control(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
flow_control = ET.SubElement(interface, "flow-control")
flow_control.text = kwargs.pop('flow_control')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_queuing_strategy(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
queuing_strategy = ET.SubElement(interface, "queuing-strategy")
queuing_strategy.text = kwargs.pop('queuing_strategy')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_role(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_role = ET.SubElement(interface, "port-role")
port_role.text = kwargs.pop('port_role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_mode = ET.SubElement(interface, "port-mode")
port_mode.text = kwargs.pop('port_mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInOctets(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInOctets = ET.SubElement(interface, "ifHCInOctets")
ifHCInOctets.text = kwargs.pop('ifHCInOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInUcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInUcastPkts = ET.SubElement(interface, "ifHCInUcastPkts")
ifHCInUcastPkts.text = kwargs.pop('ifHCInUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInMulticastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInMulticastPkts = ET.SubElement(interface, "ifHCInMulticastPkts")
ifHCInMulticastPkts.text = kwargs.pop('ifHCInMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInBroadcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInBroadcastPkts = ET.SubElement(interface, "ifHCInBroadcastPkts")
ifHCInBroadcastPkts.text = kwargs.pop('ifHCInBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInErrors(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInErrors = ET.SubElement(interface, "ifHCInErrors")
ifHCInErrors.text = kwargs.pop('ifHCInErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutOctets(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutOctets = ET.SubElement(interface, "ifHCOutOctets")
ifHCOutOctets.text = kwargs.pop('ifHCOutOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutUcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutUcastPkts = ET.SubElement(interface, "ifHCOutUcastPkts")
ifHCOutUcastPkts.text = kwargs.pop('ifHCOutUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutMulticastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutMulticastPkts = ET.SubElement(interface, "ifHCOutMulticastPkts")
ifHCOutMulticastPkts.text = kwargs.pop('ifHCOutMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutBroadcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutBroadcastPkts = ET.SubElement(interface, "ifHCOutBroadcastPkts")
ifHCOutBroadcastPkts.text = kwargs.pop('ifHCOutBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutErrors(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutErrors = ET.SubElement(interface, "ifHCOutErrors")
ifHCOutErrors.text = kwargs.pop('ifHCOutErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_type = ET.SubElement(input, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_name = ET.SubElement(input, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
speed = ET.SubElement(sfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
connector = ET.SubElement(sfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
encoding = ET.SubElement(sfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_name = ET.SubElement(sfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_oui = ET.SubElement(sfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_pn = ET.SubElement(sfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_rev = ET.SubElement(sfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
distance = ET.SubElement(sfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
media_form_factor = ET.SubElement(sfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
wavelength = ET.SubElement(sfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
serial_no = ET.SubElement(sfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
date_code = ET.SubElement(sfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
temperature = ET.SubElement(sfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
voltage = ET.SubElement(sfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
current = ET.SubElement(sfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
tx_power = ET.SubElement(sfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
rx_power = ET.SubElement(sfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
speed = ET.SubElement(on_board, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
connector = ET.SubElement(on_board, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
encoding = ET.SubElement(on_board, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_name = ET.SubElement(on_board, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_oui = ET.SubElement(on_board, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_pn = ET.SubElement(on_board, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_rev = ET.SubElement(on_board, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_name = ET.SubElement(gbc, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_oui = ET.SubElement(gbc, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_pn = ET.SubElement(gbc, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_rev = ET.SubElement(gbc, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_name = ET.SubElement(xfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_oui = ET.SubElement(xfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_pn = ET.SubElement(xfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_rev = ET.SubElement(xfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_name = ET.SubElement(xff, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_oui = ET.SubElement(xff, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_pn = ET.SubElement(xff, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_rev = ET.SubElement(xff, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_name = ET.SubElement(xfpe, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_oui = ET.SubElement(xfpe, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_pn = ET.SubElement(xfpe, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_rev = ET.SubElement(xfpe, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_name = ET.SubElement(unknown, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_oui = ET.SubElement(unknown, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_pn = ET.SubElement(unknown, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_rev = ET.SubElement(unknown, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
speed = ET.SubElement(qsfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
connector = ET.SubElement(qsfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
encoding = ET.SubElement(qsfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_name = ET.SubElement(qsfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_oui = ET.SubElement(qsfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_pn = ET.SubElement(qsfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_rev = ET.SubElement(qsfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
distance = ET.SubElement(qsfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
media_form_factor = ET.SubElement(qsfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
wavelength = ET.SubElement(qsfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
serial_no = ET.SubElement(qsfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
date_code = ET.SubElement(qsfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
temperature = ET.SubElement(qsfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
voltage = ET.SubElement(qsfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
current = ET.SubElement(qsfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
tx_power = ET.SubElement(qsfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
rx_power = ET.SubElement(qsfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
speed = ET.SubElement(qsfpp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
connector = ET.SubElement(qsfpp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
encoding = ET.SubElement(qsfpp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_name = ET.SubElement(qsfpp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_oui = ET.SubElement(qsfpp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_pn = ET.SubElement(qsfpp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_rev = ET.SubElement(qsfpp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
distance = ET.SubElement(qsfpp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
media_form_factor = ET.SubElement(qsfpp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
wavelength = ET.SubElement(qsfpp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
serial_no = ET.SubElement(qsfpp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
date_code = ET.SubElement(qsfpp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
temperature = ET.SubElement(qsfpp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
voltage = ET.SubElement(qsfpp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
current = ET.SubElement(qsfpp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
tx_power = ET.SubElement(qsfpp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
rx_power = ET.SubElement(qsfpp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
speed = ET.SubElement(cfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
connector = ET.SubElement(cfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
encoding = ET.SubElement(cfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_name = ET.SubElement(cfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_oui = ET.SubElement(cfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_pn = ET.SubElement(cfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_rev = ET.SubElement(cfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
distance = ET.SubElement(cfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
media_form_factor = ET.SubElement(cfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
wavelength = ET.SubElement(cfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
serial_no = ET.SubElement(cfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
date_code = ET.SubElement(cfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
temperature = ET.SubElement(cfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
voltage = ET.SubElement(cfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
current = ET.SubElement(cfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
tx_power = ET.SubElement(cfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
rx_power = ET.SubElement(cfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
speed = ET.SubElement(cfp2, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
connector = ET.SubElement(cfp2, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
encoding = ET.SubElement(cfp2, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_name = ET.SubElement(cfp2, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_oui = ET.SubElement(cfp2, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_pn = ET.SubElement(cfp2, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_rev = ET.SubElement(cfp2, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
distance = ET.SubElement(cfp2, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
media_form_factor = ET.SubElement(cfp2, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
wavelength = ET.SubElement(cfp2, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
serial_no = ET.SubElement(cfp2, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
date_code = ET.SubElement(cfp2, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
temperature = ET.SubElement(cfp2, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
voltage = ET.SubElement(cfp2, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
current = ET.SubElement(cfp2, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
tx_power = ET.SubElement(cfp2, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
rx_power = ET.SubElement(cfp2, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_input_request_type_get_request_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
vlan_id = ET.SubElement(get_request, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_input_request_type_get_next_request_last_rcvd_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_vlan_id = ET.SubElement(get_next_request, "last-rcvd-vlan-id")
last_rcvd_vlan_id.text = kwargs.pop('last_rcvd_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_configured_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
configured_vlans_count = ET.SubElement(output, "configured-vlans-count")
configured_vlans_count.text = kwargs.pop('configured_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_provisioned_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
provisioned_vlans_count = ET.SubElement(output, "provisioned-vlans-count")
provisioned_vlans_count.text = kwargs.pop('provisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_unprovisioned_vlans_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
unprovisioned_vlans_count = ET.SubElement(output, "unprovisioned-vlans-count")
unprovisioned_vlans_count.text = kwargs.pop('unprovisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id = ET.SubElement(vlan, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_type = ET.SubElement(vlan, "vlan-type")
vlan_type.text = kwargs.pop('vlan_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_name = ET.SubElement(vlan, "vlan-name")
vlan_name.text = kwargs.pop('vlan_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_state = ET.SubElement(vlan, "vlan-state")
vlan_state.text = kwargs.pop('vlan_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_tag(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
tag = ET.SubElement(interface, "tag")
tag.text = kwargs.pop('tag')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_value_key = ET.SubElement(classification, "classification-value")
classification_value_key.text = kwargs.pop('classification_value')
classification_type = ET.SubElement(classification, "classification-type")
classification_type.text = kwargs.pop('classification_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_type_key = ET.SubElement(classification, "classification-type")
classification_type_key.text = kwargs.pop('classification_type')
classification_value = ET.SubElement(classification, "classification-value")
classification_value.text = kwargs.pop('classification_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_last_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
last_vlan_id = ET.SubElement(output, "last-vlan-id")
last_vlan_id.text = kwargs.pop('last_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(switchport, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(switchport, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mode = ET.SubElement(switchport, "mode")
mode.text = kwargs.pop('mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_fcoe_port_enabled(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
fcoe_port_enabled = ET.SubElement(switchport, "fcoe-port-enabled")
fcoe_port_enabled.text = kwargs.pop('fcoe_port_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_ingress_filter_enabled(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ingress_filter_enabled = ET.SubElement(switchport, "ingress-filter-enabled")
ingress_filter_enabled.text = kwargs.pop('ingress_filter_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_acceptable_frame_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
acceptable_frame_type = ET.SubElement(switchport, "acceptable-frame-type")
acceptable_frame_type.text = kwargs.pop('acceptable_frame_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_default_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
default_vlan = ET.SubElement(switchport, "default-vlan")
default_vlan.text = kwargs.pop('default_vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
rbridge_id = ET.SubElement(get_request, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4 = ET.SubElement(ip_address, "ipv4")
ipv4.text = kwargs.pop('ipv4')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ipv4_type = ET.SubElement(ip_address, "ipv4-type")
ipv4_type.text = kwargs.pop('ipv4_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_broadcast(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
broadcast = ET.SubElement(ip_address, "broadcast")
broadcast.text = kwargs.pop('broadcast')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ip_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ip_mtu = ET.SubElement(ip_address, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_line_protocol_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_proxy_arp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
proxy_arp = ET.SubElement(interface, "proxy-arp")
proxy_arp.text = kwargs.pop('proxy_arp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_vrf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
vrf = ET.SubElement(interface, "vrf")
vrf.text = kwargs.pop('vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_type = ET.SubElement(last_rcvd_interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_name = ET.SubElement(last_rcvd_interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifindex(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifindex = ET.SubElement(interface, "ifindex")
ifindex.text = kwargs.pop('ifindex')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mtu = ET.SubElement(interface, "mtu")
mtu.text = kwargs.pop('mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ip_mtu(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_mtu = ET.SubElement(interface, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state_info = ET.SubElement(interface, "line-protocol-state-info")
line_protocol_state_info.text = kwargs.pop('line_protocol_state_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_exception_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_exception_info = ET.SubElement(interface, "line-protocol-exception-info")
line_protocol_exception_info.text = kwargs.pop('line_protocol_exception_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_hardware_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
hardware_type = ET.SubElement(interface, "hardware-type")
hardware_type.text = kwargs.pop('hardware_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_logical_hardware_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
logical_hardware_address = ET.SubElement(interface, "logical-hardware-address")
logical_hardware_address.text = kwargs.pop('logical_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_current_hardware_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
current_hardware_address = ET.SubElement(interface, "current-hardware-address")
current_hardware_address.text = kwargs.pop('current_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_media_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
media_type = ET.SubElement(interface, "media-type")
media_type.text = kwargs.pop('media_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
wavelength = ET.SubElement(interface, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_description(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_description = ET.SubElement(interface, "if-description")
if_description.text = kwargs.pop('if_description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_actual_line_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
actual_line_speed = ET.SubElement(interface, "actual-line-speed")
actual_line_speed.text = kwargs.pop('actual_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_configured_line_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
configured_line_speed = ET.SubElement(interface, "configured-line-speed")
configured_line_speed.text = kwargs.pop('configured_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_duplex_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_duplex_state = ET.SubElement(interface, "line-duplex-state")
line_duplex_state.text = kwargs.pop('line_duplex_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_flow_control(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
flow_control = ET.SubElement(interface, "flow-control")
flow_control.text = kwargs.pop('flow_control')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_queuing_strategy(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
queuing_strategy = ET.SubElement(interface, "queuing-strategy")
queuing_strategy.text = kwargs.pop('queuing_strategy')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_role(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_role = ET.SubElement(interface, "port-role")
port_role.text = kwargs.pop('port_role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_mode = ET.SubElement(interface, "port-mode")
port_mode.text = kwargs.pop('port_mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInOctets(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInOctets = ET.SubElement(interface, "ifHCInOctets")
ifHCInOctets.text = kwargs.pop('ifHCInOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInUcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInUcastPkts = ET.SubElement(interface, "ifHCInUcastPkts")
ifHCInUcastPkts.text = kwargs.pop('ifHCInUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInMulticastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInMulticastPkts = ET.SubElement(interface, "ifHCInMulticastPkts")
ifHCInMulticastPkts.text = kwargs.pop('ifHCInMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInBroadcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInBroadcastPkts = ET.SubElement(interface, "ifHCInBroadcastPkts")
ifHCInBroadcastPkts.text = kwargs.pop('ifHCInBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInErrors(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInErrors = ET.SubElement(interface, "ifHCInErrors")
ifHCInErrors.text = kwargs.pop('ifHCInErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutOctets(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutOctets = ET.SubElement(interface, "ifHCOutOctets")
ifHCOutOctets.text = kwargs.pop('ifHCOutOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutUcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutUcastPkts = ET.SubElement(interface, "ifHCOutUcastPkts")
ifHCOutUcastPkts.text = kwargs.pop('ifHCOutUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutMulticastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutMulticastPkts = ET.SubElement(interface, "ifHCOutMulticastPkts")
ifHCOutMulticastPkts.text = kwargs.pop('ifHCOutMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutBroadcastPkts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutBroadcastPkts = ET.SubElement(interface, "ifHCOutBroadcastPkts")
ifHCOutBroadcastPkts.text = kwargs.pop('ifHCOutBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutErrors(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutErrors = ET.SubElement(interface, "ifHCOutErrors")
ifHCOutErrors.text = kwargs.pop('ifHCOutErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_has_more(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_type = ET.SubElement(input, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_name = ET.SubElement(input, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
speed = ET.SubElement(sfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
connector = ET.SubElement(sfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
encoding = ET.SubElement(sfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_name = ET.SubElement(sfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_oui = ET.SubElement(sfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_pn = ET.SubElement(sfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_rev = ET.SubElement(sfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
distance = ET.SubElement(sfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
media_form_factor = ET.SubElement(sfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
wavelength = ET.SubElement(sfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
serial_no = ET.SubElement(sfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
date_code = ET.SubElement(sfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
temperature = ET.SubElement(sfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
voltage = ET.SubElement(sfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
current = ET.SubElement(sfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
tx_power = ET.SubElement(sfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
rx_power = ET.SubElement(sfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
speed = ET.SubElement(on_board, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
connector = ET.SubElement(on_board, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
encoding = ET.SubElement(on_board, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_name = ET.SubElement(on_board, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_oui = ET.SubElement(on_board, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_pn = ET.SubElement(on_board, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_rev = ET.SubElement(on_board, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_name = ET.SubElement(gbc, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_oui = ET.SubElement(gbc, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_pn = ET.SubElement(gbc, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_rev = ET.SubElement(gbc, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_name = ET.SubElement(xfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_oui = ET.SubElement(xfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_pn = ET.SubElement(xfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_rev = ET.SubElement(xfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_name = ET.SubElement(xff, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_oui = ET.SubElement(xff, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_pn = ET.SubElement(xff, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_rev = ET.SubElement(xff, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_name = ET.SubElement(xfpe, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_oui = ET.SubElement(xfpe, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_pn = ET.SubElement(xfpe, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_rev = ET.SubElement(xfpe, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_name = ET.SubElement(unknown, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_oui = ET.SubElement(unknown, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_pn = ET.SubElement(unknown, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_rev = ET.SubElement(unknown, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
speed = ET.SubElement(qsfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
connector = ET.SubElement(qsfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
encoding = ET.SubElement(qsfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_name = ET.SubElement(qsfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_oui = ET.SubElement(qsfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_pn = ET.SubElement(qsfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_rev = ET.SubElement(qsfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
distance = ET.SubElement(qsfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
media_form_factor = ET.SubElement(qsfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
wavelength = ET.SubElement(qsfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
serial_no = ET.SubElement(qsfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
date_code = ET.SubElement(qsfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
temperature = ET.SubElement(qsfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
voltage = ET.SubElement(qsfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
current = ET.SubElement(qsfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
tx_power = ET.SubElement(qsfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
rx_power = ET.SubElement(qsfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
speed = ET.SubElement(qsfpp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
connector = ET.SubElement(qsfpp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
encoding = ET.SubElement(qsfpp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_name = ET.SubElement(qsfpp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_oui = ET.SubElement(qsfpp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_pn = ET.SubElement(qsfpp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_rev = ET.SubElement(qsfpp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
distance = ET.SubElement(qsfpp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
media_form_factor = ET.SubElement(qsfpp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
wavelength = ET.SubElement(qsfpp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
serial_no = ET.SubElement(qsfpp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
date_code = ET.SubElement(qsfpp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
temperature = ET.SubElement(qsfpp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
voltage = ET.SubElement(qsfpp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
current = ET.SubElement(qsfpp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
tx_power = ET.SubElement(qsfpp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
rx_power = ET.SubElement(qsfpp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
speed = ET.SubElement(cfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
connector = ET.SubElement(cfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
encoding = ET.SubElement(cfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_name = ET.SubElement(cfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_oui = ET.SubElement(cfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_pn = ET.SubElement(cfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_rev = ET.SubElement(cfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
distance = ET.SubElement(cfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
media_form_factor = ET.SubElement(cfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
wavelength = ET.SubElement(cfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
serial_no = ET.SubElement(cfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
date_code = ET.SubElement(cfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
temperature = ET.SubElement(cfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
voltage = ET.SubElement(cfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
current = ET.SubElement(cfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
tx_power = ET.SubElement(cfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
rx_power = ET.SubElement(cfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_speed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
speed = ET.SubElement(cfp2, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_connector(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
connector = ET.SubElement(cfp2, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_encoding(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
encoding = ET.SubElement(cfp2, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_name = ET.SubElement(cfp2, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_oui(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_oui = ET.SubElement(cfp2, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_pn(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_pn = ET.SubElement(cfp2, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_rev(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_rev = ET.SubElement(cfp2, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_distance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
distance = ET.SubElement(cfp2, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_media_form_factor(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
media_form_factor = ET.SubElement(cfp2, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_wavelength(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
wavelength = ET.SubElement(cfp2, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_serial_no(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
serial_no = ET.SubElement(cfp2, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_date_code(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
date_code = ET.SubElement(cfp2, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_temperature(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
temperature = ET.SubElement(cfp2, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_voltage(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
voltage = ET.SubElement(cfp2, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_current(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
current = ET.SubElement(cfp2, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_tx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
tx_power = ET.SubElement(cfp2, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_rx_power(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
rx_power = ET.SubElement(cfp2, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 48.640919
| 116
| 0.675903
|
import xml.etree.ElementTree as ET
class brocade_interface_ext(object):
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def get_vlan_brief_input_request_type_get_request_vlan_id(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
vlan_id = ET.SubElement(get_request, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_input_request_type_get_next_request_last_rcvd_vlan_id(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_vlan_id = ET.SubElement(get_next_request, "last-rcvd-vlan-id")
last_rcvd_vlan_id.text = kwargs.pop('last_rcvd_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_configured_vlans_count(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
configured_vlans_count = ET.SubElement(output, "configured-vlans-count")
configured_vlans_count.text = kwargs.pop('configured_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_provisioned_vlans_count(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
provisioned_vlans_count = ET.SubElement(output, "provisioned-vlans-count")
provisioned_vlans_count.text = kwargs.pop('provisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_unprovisioned_vlans_count(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
unprovisioned_vlans_count = ET.SubElement(output, "unprovisioned-vlans-count")
unprovisioned_vlans_count.text = kwargs.pop('unprovisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_id(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id = ET.SubElement(vlan, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_type(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_type = ET.SubElement(vlan, "vlan-type")
vlan_type.text = kwargs.pop('vlan_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_name(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_name = ET.SubElement(vlan, "vlan-name")
vlan_name.text = kwargs.pop('vlan_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_state(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_state = ET.SubElement(vlan, "vlan-state")
vlan_state.text = kwargs.pop('vlan_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_type(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_name(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_tag(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
tag = ET.SubElement(interface, "tag")
tag.text = kwargs.pop('tag')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_type(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_value_key = ET.SubElement(classification, "classification-value")
classification_value_key.text = kwargs.pop('classification_value')
classification_type = ET.SubElement(classification, "classification-type")
classification_type.text = kwargs.pop('classification_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_value(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_type_key = ET.SubElement(classification, "classification-type")
classification_type_key.text = kwargs.pop('classification_type')
classification_value = ET.SubElement(classification, "classification-value")
classification_value.text = kwargs.pop('classification_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_last_vlan_id(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
last_vlan_id = ET.SubElement(output, "last-vlan-id")
last_vlan_id.text = kwargs.pop('last_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_has_more(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_type(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(switchport, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_name(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(switchport, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_mode(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mode = ET.SubElement(switchport, "mode")
mode.text = kwargs.pop('mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_fcoe_port_enabled(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
fcoe_port_enabled = ET.SubElement(switchport, "fcoe-port-enabled")
fcoe_port_enabled.text = kwargs.pop('fcoe_port_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_ingress_filter_enabled(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ingress_filter_enabled = ET.SubElement(switchport, "ingress-filter-enabled")
ingress_filter_enabled.text = kwargs.pop('ingress_filter_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_acceptable_frame_type(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
acceptable_frame_type = ET.SubElement(switchport, "acceptable-frame-type")
acceptable_frame_type.text = kwargs.pop('acceptable_frame_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_default_vlan(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
default_vlan = ET.SubElement(switchport, "default-vlan")
default_vlan.text = kwargs.pop('default_vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_type(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_name(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_rbridge_id(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
rbridge_id = ET.SubElement(get_request, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_type(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_name(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_name(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4 = ET.SubElement(ip_address, "ipv4")
ipv4.text = kwargs.pop('ipv4')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4_type(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ipv4_type = ET.SubElement(ip_address, "ipv4-type")
ipv4_type.text = kwargs.pop('ipv4_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_broadcast(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
broadcast = ET.SubElement(ip_address, "broadcast")
broadcast.text = kwargs.pop('broadcast')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ip_mtu(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ip_mtu = ET.SubElement(ip_address, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_state(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_line_protocol_state(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_proxy_arp(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
proxy_arp = ET.SubElement(interface, "proxy-arp")
proxy_arp.text = kwargs.pop('proxy_arp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_vrf(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
vrf = ET.SubElement(interface, "vrf")
vrf.text = kwargs.pop('vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_has_more(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_type(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_name(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_type(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_type = ET.SubElement(last_rcvd_interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_name(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_name = ET.SubElement(last_rcvd_interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_type(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_name(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifindex(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifindex = ET.SubElement(interface, "ifindex")
ifindex.text = kwargs.pop('ifindex')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_mtu(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mtu = ET.SubElement(interface, "mtu")
mtu.text = kwargs.pop('mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ip_mtu(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_mtu = ET.SubElement(interface, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_name(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_state(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state_info(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state_info = ET.SubElement(interface, "line-protocol-state-info")
line_protocol_state_info.text = kwargs.pop('line_protocol_state_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_exception_info(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_exception_info = ET.SubElement(interface, "line-protocol-exception-info")
line_protocol_exception_info.text = kwargs.pop('line_protocol_exception_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_hardware_type(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
hardware_type = ET.SubElement(interface, "hardware-type")
hardware_type.text = kwargs.pop('hardware_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_logical_hardware_address(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
logical_hardware_address = ET.SubElement(interface, "logical-hardware-address")
logical_hardware_address.text = kwargs.pop('logical_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_current_hardware_address(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
current_hardware_address = ET.SubElement(interface, "current-hardware-address")
current_hardware_address.text = kwargs.pop('current_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_media_type(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
media_type = ET.SubElement(interface, "media-type")
media_type.text = kwargs.pop('media_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_wavelength(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
wavelength = ET.SubElement(interface, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_description(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_description = ET.SubElement(interface, "if-description")
if_description.text = kwargs.pop('if_description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_actual_line_speed(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
actual_line_speed = ET.SubElement(interface, "actual-line-speed")
actual_line_speed.text = kwargs.pop('actual_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_configured_line_speed(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
configured_line_speed = ET.SubElement(interface, "configured-line-speed")
configured_line_speed.text = kwargs.pop('configured_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_duplex_state(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_duplex_state = ET.SubElement(interface, "line-duplex-state")
line_duplex_state.text = kwargs.pop('line_duplex_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_flow_control(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
flow_control = ET.SubElement(interface, "flow-control")
flow_control.text = kwargs.pop('flow_control')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_queuing_strategy(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
queuing_strategy = ET.SubElement(interface, "queuing-strategy")
queuing_strategy.text = kwargs.pop('queuing_strategy')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_role(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_role = ET.SubElement(interface, "port-role")
port_role.text = kwargs.pop('port_role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_mode(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_mode = ET.SubElement(interface, "port-mode")
port_mode.text = kwargs.pop('port_mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInOctets(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInOctets = ET.SubElement(interface, "ifHCInOctets")
ifHCInOctets.text = kwargs.pop('ifHCInOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInUcastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInUcastPkts = ET.SubElement(interface, "ifHCInUcastPkts")
ifHCInUcastPkts.text = kwargs.pop('ifHCInUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInMulticastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInMulticastPkts = ET.SubElement(interface, "ifHCInMulticastPkts")
ifHCInMulticastPkts.text = kwargs.pop('ifHCInMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInBroadcastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInBroadcastPkts = ET.SubElement(interface, "ifHCInBroadcastPkts")
ifHCInBroadcastPkts.text = kwargs.pop('ifHCInBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInErrors(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInErrors = ET.SubElement(interface, "ifHCInErrors")
ifHCInErrors.text = kwargs.pop('ifHCInErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutOctets(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutOctets = ET.SubElement(interface, "ifHCOutOctets")
ifHCOutOctets.text = kwargs.pop('ifHCOutOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutUcastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutUcastPkts = ET.SubElement(interface, "ifHCOutUcastPkts")
ifHCOutUcastPkts.text = kwargs.pop('ifHCOutUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutMulticastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutMulticastPkts = ET.SubElement(interface, "ifHCOutMulticastPkts")
ifHCOutMulticastPkts.text = kwargs.pop('ifHCOutMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutBroadcastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutBroadcastPkts = ET.SubElement(interface, "ifHCOutBroadcastPkts")
ifHCOutBroadcastPkts.text = kwargs.pop('ifHCOutBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutErrors(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutErrors = ET.SubElement(interface, "ifHCOutErrors")
ifHCOutErrors.text = kwargs.pop('ifHCOutErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_has_more(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_type(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_type = ET.SubElement(input, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_name = ET.SubElement(input, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_rbridge_id(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_type(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
speed = ET.SubElement(sfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
connector = ET.SubElement(sfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
encoding = ET.SubElement(sfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_name = ET.SubElement(sfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_oui = ET.SubElement(sfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_pn = ET.SubElement(sfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_rev = ET.SubElement(sfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_distance(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
distance = ET.SubElement(sfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_media_form_factor(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
media_form_factor = ET.SubElement(sfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_wavelength(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
wavelength = ET.SubElement(sfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_serial_no(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
serial_no = ET.SubElement(sfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_date_code(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
date_code = ET.SubElement(sfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_temperature(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
temperature = ET.SubElement(sfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_voltage(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
voltage = ET.SubElement(sfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_current(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
current = ET.SubElement(sfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_tx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
tx_power = ET.SubElement(sfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_rx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
rx_power = ET.SubElement(sfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
speed = ET.SubElement(on_board, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
connector = ET.SubElement(on_board, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
encoding = ET.SubElement(on_board, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_name = ET.SubElement(on_board, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_oui = ET.SubElement(on_board, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_pn = ET.SubElement(on_board, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_rev = ET.SubElement(on_board, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_name = ET.SubElement(gbc, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_oui = ET.SubElement(gbc, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_pn = ET.SubElement(gbc, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_rev = ET.SubElement(gbc, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_name = ET.SubElement(xfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_oui = ET.SubElement(xfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_pn = ET.SubElement(xfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_rev = ET.SubElement(xfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_name = ET.SubElement(xff, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_oui = ET.SubElement(xff, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_pn = ET.SubElement(xff, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_rev = ET.SubElement(xff, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_name = ET.SubElement(xfpe, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_oui = ET.SubElement(xfpe, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_pn = ET.SubElement(xfpe, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_rev = ET.SubElement(xfpe, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_name = ET.SubElement(unknown, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_oui = ET.SubElement(unknown, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_pn = ET.SubElement(unknown, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_rev = ET.SubElement(unknown, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
speed = ET.SubElement(qsfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
connector = ET.SubElement(qsfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
encoding = ET.SubElement(qsfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_name = ET.SubElement(qsfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_oui = ET.SubElement(qsfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_pn = ET.SubElement(qsfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_rev = ET.SubElement(qsfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_distance(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
distance = ET.SubElement(qsfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_media_form_factor(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
media_form_factor = ET.SubElement(qsfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_wavelength(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
wavelength = ET.SubElement(qsfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_serial_no(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
serial_no = ET.SubElement(qsfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_date_code(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
date_code = ET.SubElement(qsfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_temperature(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
temperature = ET.SubElement(qsfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_voltage(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
voltage = ET.SubElement(qsfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_current(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
current = ET.SubElement(qsfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_tx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
tx_power = ET.SubElement(qsfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_rx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
rx_power = ET.SubElement(qsfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
speed = ET.SubElement(qsfpp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
connector = ET.SubElement(qsfpp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
encoding = ET.SubElement(qsfpp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_name = ET.SubElement(qsfpp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_oui = ET.SubElement(qsfpp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_pn = ET.SubElement(qsfpp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_rev = ET.SubElement(qsfpp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_distance(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
distance = ET.SubElement(qsfpp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_media_form_factor(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
media_form_factor = ET.SubElement(qsfpp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_wavelength(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
wavelength = ET.SubElement(qsfpp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_serial_no(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
serial_no = ET.SubElement(qsfpp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_date_code(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
date_code = ET.SubElement(qsfpp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_temperature(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
temperature = ET.SubElement(qsfpp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_voltage(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
voltage = ET.SubElement(qsfpp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_current(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
current = ET.SubElement(qsfpp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_tx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
tx_power = ET.SubElement(qsfpp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_rx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
rx_power = ET.SubElement(qsfpp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
speed = ET.SubElement(cfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
connector = ET.SubElement(cfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
encoding = ET.SubElement(cfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_name = ET.SubElement(cfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_oui = ET.SubElement(cfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_pn = ET.SubElement(cfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_rev = ET.SubElement(cfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_distance(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
distance = ET.SubElement(cfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_media_form_factor(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
media_form_factor = ET.SubElement(cfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_wavelength(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
wavelength = ET.SubElement(cfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_serial_no(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
serial_no = ET.SubElement(cfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_date_code(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
date_code = ET.SubElement(cfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_temperature(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
temperature = ET.SubElement(cfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_voltage(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
voltage = ET.SubElement(cfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_current(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
current = ET.SubElement(cfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_tx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
tx_power = ET.SubElement(cfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_rx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
rx_power = ET.SubElement(cfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
speed = ET.SubElement(cfp2, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
connector = ET.SubElement(cfp2, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
encoding = ET.SubElement(cfp2, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_name = ET.SubElement(cfp2, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_oui = ET.SubElement(cfp2, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_pn = ET.SubElement(cfp2, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_rev = ET.SubElement(cfp2, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_distance(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
distance = ET.SubElement(cfp2, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_media_form_factor(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
media_form_factor = ET.SubElement(cfp2, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_wavelength(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
wavelength = ET.SubElement(cfp2, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_serial_no(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
serial_no = ET.SubElement(cfp2, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_date_code(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
date_code = ET.SubElement(cfp2, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_temperature(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
temperature = ET.SubElement(cfp2, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_voltage(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
voltage = ET.SubElement(cfp2, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_current(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
current = ET.SubElement(cfp2, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_tx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
tx_power = ET.SubElement(cfp2, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_rx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
rx_power = ET.SubElement(cfp2, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_input_request_type_get_request_vlan_id(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
vlan_id = ET.SubElement(get_request, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_input_request_type_get_next_request_last_rcvd_vlan_id(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_vlan_id = ET.SubElement(get_next_request, "last-rcvd-vlan-id")
last_rcvd_vlan_id.text = kwargs.pop('last_rcvd_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_configured_vlans_count(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
configured_vlans_count = ET.SubElement(output, "configured-vlans-count")
configured_vlans_count.text = kwargs.pop('configured_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_provisioned_vlans_count(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
provisioned_vlans_count = ET.SubElement(output, "provisioned-vlans-count")
provisioned_vlans_count.text = kwargs.pop('provisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_unprovisioned_vlans_count(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
unprovisioned_vlans_count = ET.SubElement(output, "unprovisioned-vlans-count")
unprovisioned_vlans_count.text = kwargs.pop('unprovisioned_vlans_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_id(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id = ET.SubElement(vlan, "vlan-id")
vlan_id.text = kwargs.pop('vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_type(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_type = ET.SubElement(vlan, "vlan-type")
vlan_type.text = kwargs.pop('vlan_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_name(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_name = ET.SubElement(vlan, "vlan-name")
vlan_name.text = kwargs.pop('vlan_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_vlan_state(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
vlan_state = ET.SubElement(vlan, "vlan-state")
vlan_state.text = kwargs.pop('vlan_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_type(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_interface_name(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_tag(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
tag = ET.SubElement(interface, "tag")
tag.text = kwargs.pop('tag')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_type(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_value_key = ET.SubElement(classification, "classification-value")
classification_value_key.text = kwargs.pop('classification_value')
classification_type = ET.SubElement(classification, "classification-type")
classification_type.text = kwargs.pop('classification_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_vlan_interface_classification_classification_value(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
vlan = ET.SubElement(output, "vlan")
vlan_id_key = ET.SubElement(vlan, "vlan-id")
vlan_id_key.text = kwargs.pop('vlan_id')
interface = ET.SubElement(vlan, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
classification = ET.SubElement(interface, "classification")
classification_type_key = ET.SubElement(classification, "classification-type")
classification_type_key.text = kwargs.pop('classification_type')
classification_value = ET.SubElement(classification, "classification-value")
classification_value.text = kwargs.pop('classification_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_last_vlan_id(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
last_vlan_id = ET.SubElement(output, "last-vlan-id")
last_vlan_id.text = kwargs.pop('last_vlan_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_vlan_brief_output_has_more(self, **kwargs):
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
output = ET.SubElement(get_vlan_brief, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_type(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(switchport, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_interface_name(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(switchport, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_mode(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mode = ET.SubElement(switchport, "mode")
mode.text = kwargs.pop('mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_fcoe_port_enabled(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
fcoe_port_enabled = ET.SubElement(switchport, "fcoe-port-enabled")
fcoe_port_enabled.text = kwargs.pop('fcoe_port_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_ingress_filter_enabled(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ingress_filter_enabled = ET.SubElement(switchport, "ingress-filter-enabled")
ingress_filter_enabled.text = kwargs.pop('ingress_filter_enabled')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_acceptable_frame_type(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
acceptable_frame_type = ET.SubElement(switchport, "acceptable-frame-type")
acceptable_frame_type.text = kwargs.pop('acceptable_frame_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_switchport_output_switchport_default_vlan(self, **kwargs):
config = ET.Element("config")
get_interface_switchport = ET.Element("get_interface_switchport")
config = get_interface_switchport
output = ET.SubElement(get_interface_switchport, "output")
switchport = ET.SubElement(output, "switchport")
interface_type_key = ET.SubElement(switchport, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(switchport, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
default_vlan = ET.SubElement(switchport, "default-vlan")
default_vlan.text = kwargs.pop('default_vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_type(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_interface_name(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_input_request_type_get_request_rbridge_id(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
input = ET.SubElement(get_ip_interface, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
rbridge_id = ET.SubElement(get_request, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_type(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_interface_name(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_name(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4 = ET.SubElement(ip_address, "ipv4")
ipv4.text = kwargs.pop('ipv4')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ipv4_type(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ipv4_type = ET.SubElement(ip_address, "ipv4-type")
ipv4_type.text = kwargs.pop('ipv4_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_broadcast(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
broadcast = ET.SubElement(ip_address, "broadcast")
broadcast.text = kwargs.pop('broadcast')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_ip_address_ip_mtu(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_address = ET.SubElement(interface, "ip-address")
ipv4_key = ET.SubElement(ip_address, "ipv4")
ipv4_key.text = kwargs.pop('ipv4')
ip_mtu = ET.SubElement(ip_address, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_if_state(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_line_protocol_state(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_proxy_arp(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
proxy_arp = ET.SubElement(interface, "proxy-arp")
proxy_arp.text = kwargs.pop('proxy_arp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_interface_vrf(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
vrf = ET.SubElement(interface, "vrf")
vrf.text = kwargs.pop('vrf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_ip_interface_output_has_more(self, **kwargs):
config = ET.Element("config")
get_ip_interface = ET.Element("get_ip_interface")
config = get_ip_interface
output = ET.SubElement(get_ip_interface, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_type(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_type = ET.SubElement(get_request, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_request_interface_name(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
interface_name = ET.SubElement(get_request, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_type(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_type = ET.SubElement(last_rcvd_interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_input_request_type_get_next_request_last_rcvd_interface_interface_name(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
input = ET.SubElement(get_interface_detail, "input")
request_type = ET.SubElement(input, "request-type")
get_next_request = ET.SubElement(request_type, "get-next-request")
last_rcvd_interface = ET.SubElement(get_next_request, "last-rcvd-interface")
interface_name = ET.SubElement(last_rcvd_interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_type(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_interface_name(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifindex(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifindex = ET.SubElement(interface, "ifindex")
ifindex.text = kwargs.pop('ifindex')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_mtu(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
mtu = ET.SubElement(interface, "mtu")
mtu.text = kwargs.pop('mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ip_mtu(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ip_mtu = ET.SubElement(interface, "ip-mtu")
ip_mtu.text = kwargs.pop('ip_mtu')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_name(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_name = ET.SubElement(interface, "if-name")
if_name.text = kwargs.pop('if_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_state(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_state = ET.SubElement(interface, "if-state")
if_state.text = kwargs.pop('if_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state = ET.SubElement(interface, "line-protocol-state")
line_protocol_state.text = kwargs.pop('line_protocol_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_state_info(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_state_info = ET.SubElement(interface, "line-protocol-state-info")
line_protocol_state_info.text = kwargs.pop('line_protocol_state_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_protocol_exception_info(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_exception_info = ET.SubElement(interface, "line-protocol-exception-info")
line_protocol_exception_info.text = kwargs.pop('line_protocol_exception_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_hardware_type(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
hardware_type = ET.SubElement(interface, "hardware-type")
hardware_type.text = kwargs.pop('hardware_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_logical_hardware_address(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
logical_hardware_address = ET.SubElement(interface, "logical-hardware-address")
logical_hardware_address.text = kwargs.pop('logical_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_current_hardware_address(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
current_hardware_address = ET.SubElement(interface, "current-hardware-address")
current_hardware_address.text = kwargs.pop('current_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_media_type(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
media_type = ET.SubElement(interface, "media-type")
media_type.text = kwargs.pop('media_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_wavelength(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
wavelength = ET.SubElement(interface, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_if_description(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
if_description = ET.SubElement(interface, "if-description")
if_description.text = kwargs.pop('if_description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_actual_line_speed(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
actual_line_speed = ET.SubElement(interface, "actual-line-speed")
actual_line_speed.text = kwargs.pop('actual_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_configured_line_speed(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
configured_line_speed = ET.SubElement(interface, "configured-line-speed")
configured_line_speed.text = kwargs.pop('configured_line_speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_line_duplex_state(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_duplex_state = ET.SubElement(interface, "line-duplex-state")
line_duplex_state.text = kwargs.pop('line_duplex_state')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_flow_control(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
flow_control = ET.SubElement(interface, "flow-control")
flow_control.text = kwargs.pop('flow_control')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_queuing_strategy(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
queuing_strategy = ET.SubElement(interface, "queuing-strategy")
queuing_strategy.text = kwargs.pop('queuing_strategy')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_role(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_role = ET.SubElement(interface, "port-role")
port_role.text = kwargs.pop('port_role')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_port_mode(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
port_mode = ET.SubElement(interface, "port-mode")
port_mode.text = kwargs.pop('port_mode')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInOctets(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInOctets = ET.SubElement(interface, "ifHCInOctets")
ifHCInOctets.text = kwargs.pop('ifHCInOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInUcastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInUcastPkts = ET.SubElement(interface, "ifHCInUcastPkts")
ifHCInUcastPkts.text = kwargs.pop('ifHCInUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInMulticastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInMulticastPkts = ET.SubElement(interface, "ifHCInMulticastPkts")
ifHCInMulticastPkts.text = kwargs.pop('ifHCInMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInBroadcastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInBroadcastPkts = ET.SubElement(interface, "ifHCInBroadcastPkts")
ifHCInBroadcastPkts.text = kwargs.pop('ifHCInBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCInErrors(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCInErrors = ET.SubElement(interface, "ifHCInErrors")
ifHCInErrors.text = kwargs.pop('ifHCInErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutOctets(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutOctets = ET.SubElement(interface, "ifHCOutOctets")
ifHCOutOctets.text = kwargs.pop('ifHCOutOctets')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutUcastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutUcastPkts = ET.SubElement(interface, "ifHCOutUcastPkts")
ifHCOutUcastPkts.text = kwargs.pop('ifHCOutUcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutMulticastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutMulticastPkts = ET.SubElement(interface, "ifHCOutMulticastPkts")
ifHCOutMulticastPkts.text = kwargs.pop('ifHCOutMulticastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutBroadcastPkts(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutBroadcastPkts = ET.SubElement(interface, "ifHCOutBroadcastPkts")
ifHCOutBroadcastPkts.text = kwargs.pop('ifHCOutBroadcastPkts')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_interface_ifHCOutErrors(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
ifHCOutErrors = ET.SubElement(interface, "ifHCOutErrors")
ifHCOutErrors.text = kwargs.pop('ifHCOutErrors')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_interface_detail_output_has_more(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
has_more = ET.SubElement(output, "has-more")
has_more.text = kwargs.pop('has_more')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_type(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_type = ET.SubElement(input, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_interface_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
interface_name = ET.SubElement(input, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_input_rbridge_id(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
input = ET.SubElement(get_media_detail, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_type(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
speed = ET.SubElement(sfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
connector = ET.SubElement(sfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
encoding = ET.SubElement(sfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_name = ET.SubElement(sfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_oui = ET.SubElement(sfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_pn = ET.SubElement(sfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
vendor_rev = ET.SubElement(sfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_distance(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
distance = ET.SubElement(sfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_media_form_factor(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
media_form_factor = ET.SubElement(sfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_wavelength(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
wavelength = ET.SubElement(sfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_serial_no(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
serial_no = ET.SubElement(sfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_date_code(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
date_code = ET.SubElement(sfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_temperature(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
temperature = ET.SubElement(sfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_voltage(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
voltage = ET.SubElement(sfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_current(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
current = ET.SubElement(sfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_tx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
tx_power = ET.SubElement(sfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_sfp_sfp_rx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
sfp = ET.SubElement(interface_identifier, "sfp")
sfp = ET.SubElement(sfp, "sfp")
rx_power = ET.SubElement(sfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
speed = ET.SubElement(on_board, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
connector = ET.SubElement(on_board, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
encoding = ET.SubElement(on_board, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_name = ET.SubElement(on_board, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_oui = ET.SubElement(on_board, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_pn = ET.SubElement(on_board, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_on_board_on_board_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
on_board = ET.SubElement(interface_identifier, "on-board")
on_board = ET.SubElement(on_board, "on-board")
vendor_rev = ET.SubElement(on_board, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_name = ET.SubElement(gbc, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_oui = ET.SubElement(gbc, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_pn = ET.SubElement(gbc, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_gbic_gbc_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
gbic = ET.SubElement(interface_identifier, "gbic")
gbc = ET.SubElement(gbic, "gbc")
vendor_rev = ET.SubElement(gbc, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_name = ET.SubElement(xfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_oui = ET.SubElement(xfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_pn = ET.SubElement(xfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfp_xfp_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfp = ET.SubElement(interface_identifier, "xfp")
xfp = ET.SubElement(xfp, "xfp")
vendor_rev = ET.SubElement(xfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_name = ET.SubElement(xff, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_oui = ET.SubElement(xff, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_pn = ET.SubElement(xff, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xff_xff_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xff = ET.SubElement(interface_identifier, "xff")
xff = ET.SubElement(xff, "xff")
vendor_rev = ET.SubElement(xff, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_name = ET.SubElement(xfpe, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_oui = ET.SubElement(xfpe, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_pn = ET.SubElement(xfpe, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_xfpe_xfpe_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
xfpe = ET.SubElement(interface_identifier, "xfpe")
xfpe = ET.SubElement(xfpe, "xfpe")
vendor_rev = ET.SubElement(xfpe, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_name = ET.SubElement(unknown, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_oui = ET.SubElement(unknown, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_pn = ET.SubElement(unknown, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_unknown_unknown_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
unknown = ET.SubElement(interface_identifier, "unknown")
unknown = ET.SubElement(unknown, "unknown")
vendor_rev = ET.SubElement(unknown, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
speed = ET.SubElement(qsfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
connector = ET.SubElement(qsfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
encoding = ET.SubElement(qsfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_name = ET.SubElement(qsfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_oui = ET.SubElement(qsfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_pn = ET.SubElement(qsfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
vendor_rev = ET.SubElement(qsfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_distance(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
distance = ET.SubElement(qsfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_media_form_factor(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
media_form_factor = ET.SubElement(qsfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_wavelength(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
wavelength = ET.SubElement(qsfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_serial_no(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
serial_no = ET.SubElement(qsfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_date_code(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
date_code = ET.SubElement(qsfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_temperature(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
temperature = ET.SubElement(qsfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_voltage(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
voltage = ET.SubElement(qsfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_current(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
current = ET.SubElement(qsfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_tx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
tx_power = ET.SubElement(qsfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfp_qsfp_rx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfp = ET.SubElement(interface_identifier, "qsfp")
qsfp = ET.SubElement(qsfp, "qsfp")
rx_power = ET.SubElement(qsfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
speed = ET.SubElement(qsfpp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
connector = ET.SubElement(qsfpp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
encoding = ET.SubElement(qsfpp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_name = ET.SubElement(qsfpp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_oui = ET.SubElement(qsfpp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_pn = ET.SubElement(qsfpp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
vendor_rev = ET.SubElement(qsfpp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_distance(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
distance = ET.SubElement(qsfpp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_media_form_factor(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
media_form_factor = ET.SubElement(qsfpp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_wavelength(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
wavelength = ET.SubElement(qsfpp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_serial_no(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
serial_no = ET.SubElement(qsfpp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_date_code(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
date_code = ET.SubElement(qsfpp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_temperature(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
temperature = ET.SubElement(qsfpp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_voltage(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
voltage = ET.SubElement(qsfpp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_current(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
current = ET.SubElement(qsfpp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_tx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
tx_power = ET.SubElement(qsfpp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_qsfpp_qsfpp_rx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
qsfpp = ET.SubElement(interface_identifier, "qsfpp")
qsfpp = ET.SubElement(qsfpp, "qsfpp")
rx_power = ET.SubElement(qsfpp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
speed = ET.SubElement(cfp, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
connector = ET.SubElement(cfp, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
encoding = ET.SubElement(cfp, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_name = ET.SubElement(cfp, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_oui = ET.SubElement(cfp, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_pn = ET.SubElement(cfp, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
vendor_rev = ET.SubElement(cfp, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_distance(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
distance = ET.SubElement(cfp, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_media_form_factor(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
media_form_factor = ET.SubElement(cfp, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_wavelength(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
wavelength = ET.SubElement(cfp, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_serial_no(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
serial_no = ET.SubElement(cfp, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_date_code(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
date_code = ET.SubElement(cfp, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_temperature(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
temperature = ET.SubElement(cfp, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_voltage(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
voltage = ET.SubElement(cfp, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_current(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
current = ET.SubElement(cfp, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_tx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
tx_power = ET.SubElement(cfp, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp_cfp_rx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp = ET.SubElement(interface_identifier, "cfp")
cfp = ET.SubElement(cfp, "cfp")
rx_power = ET.SubElement(cfp, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_speed(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
speed = ET.SubElement(cfp2, "speed")
speed.text = kwargs.pop('speed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_connector(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
connector = ET.SubElement(cfp2, "connector")
connector.text = kwargs.pop('connector')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_encoding(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
encoding = ET.SubElement(cfp2, "encoding")
encoding.text = kwargs.pop('encoding')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_name(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_name = ET.SubElement(cfp2, "vendor-name")
vendor_name.text = kwargs.pop('vendor_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_oui(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_oui = ET.SubElement(cfp2, "vendor-oui")
vendor_oui.text = kwargs.pop('vendor_oui')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_pn(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_pn = ET.SubElement(cfp2, "vendor-pn")
vendor_pn.text = kwargs.pop('vendor_pn')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_vendor_rev(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
vendor_rev = ET.SubElement(cfp2, "vendor-rev")
vendor_rev.text = kwargs.pop('vendor_rev')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_distance(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
distance = ET.SubElement(cfp2, "distance")
distance.text = kwargs.pop('distance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_media_form_factor(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
media_form_factor = ET.SubElement(cfp2, "media-form-factor")
media_form_factor.text = kwargs.pop('media_form_factor')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_wavelength(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
wavelength = ET.SubElement(cfp2, "wavelength")
wavelength.text = kwargs.pop('wavelength')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_serial_no(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
serial_no = ET.SubElement(cfp2, "serial-no")
serial_no.text = kwargs.pop('serial_no')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_date_code(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
date_code = ET.SubElement(cfp2, "date-code")
date_code.text = kwargs.pop('date_code')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_temperature(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
temperature = ET.SubElement(cfp2, "temperature")
temperature.text = kwargs.pop('temperature')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_voltage(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
voltage = ET.SubElement(cfp2, "voltage")
voltage.text = kwargs.pop('voltage')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_current(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
current = ET.SubElement(cfp2, "current")
current.text = kwargs.pop('current')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_tx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
tx_power = ET.SubElement(cfp2, "tx-power")
tx_power.text = kwargs.pop('tx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_media_detail_output_interface_interface_identifier_cfp2_cfp2_rx_power(self, **kwargs):
config = ET.Element("config")
get_media_detail = ET.Element("get_media_detail")
config = get_media_detail
output = ET.SubElement(get_media_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_identifier = ET.SubElement(interface, "interface-identifier")
cfp2 = ET.SubElement(interface_identifier, "cfp2")
cfp2 = ET.SubElement(cfp2, "cfp2")
rx_power = ET.SubElement(cfp2, "rx-power")
rx_power.text = kwargs.pop('rx_power')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| true
| true
|
f71884bbf144038a727debbf7cccc7fa2cfb1499
| 720
|
py
|
Python
|
qa/rpc-tests/create_cache.py
|
jtoomim/BitcoinUnlimited
|
b7b9b59a8440f720c5e0c3d5aeb1bcc4e48f1b9c
|
[
"MIT"
] | 535
|
2015-09-04T15:10:08.000Z
|
2022-03-17T20:51:05.000Z
|
qa/rpc-tests/create_cache.py
|
jtoomim/BitcoinUnlimited
|
b7b9b59a8440f720c5e0c3d5aeb1bcc4e48f1b9c
|
[
"MIT"
] | 1,269
|
2016-01-31T20:21:24.000Z
|
2022-03-16T01:20:08.000Z
|
qa/rpc-tests/create_cache.py
|
jtoomim/BitcoinUnlimited
|
b7b9b59a8440f720c5e0c3d5aeb1bcc4e48f1b9c
|
[
"MIT"
] | 295
|
2015-10-19T16:12:29.000Z
|
2021-08-02T20:05:17.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import test_framework.loginit
#
# Helper script to create the cache
# (see BitcoinTestFramework.setup_chain)
#
from test_framework.test_framework import BitcoinTestFramework
class CreateCache(BitcoinTestFramework):
def __init__(self):
super().__init__()
# Test network and test nodes are not required:
self.num_nodes = 0
self.nodes = []
def setup_network(self):
pass
def run_test(self):
pass
if __name__ == '__main__':
CreateCache().main()
| 24
| 69
| 0.708333
|
import test_framework.loginit
from test_framework.test_framework import BitcoinTestFramework
class CreateCache(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 0
self.nodes = []
def setup_network(self):
pass
def run_test(self):
pass
if __name__ == '__main__':
CreateCache().main()
| true
| true
|
f71885021d239ae2002c7b0633f54ec994ea9bdc
| 255
|
py
|
Python
|
app/validators/answers/answer_validator.py
|
ajmaddaford/eq-questionnaire-validator
|
f1f2540533e01e476ffc0a558f36f8f822a7362c
|
[
"MIT"
] | 1
|
2021-09-10T12:03:02.000Z
|
2021-09-10T12:03:02.000Z
|
app/validators/answers/answer_validator.py
|
ajmaddaford/eq-questionnaire-validator
|
f1f2540533e01e476ffc0a558f36f8f822a7362c
|
[
"MIT"
] | 67
|
2020-02-05T11:54:27.000Z
|
2022-03-03T12:55:25.000Z
|
app/validators/answers/answer_validator.py
|
ajmaddaford/eq-questionnaire-validator
|
f1f2540533e01e476ffc0a558f36f8f822a7362c
|
[
"MIT"
] | 2
|
2021-04-11T07:45:45.000Z
|
2021-04-19T14:52:07.000Z
|
from app.validators.validator import Validator
class AnswerValidator(Validator):
def __init__(self, schema_element):
super().__init__(schema_element)
self.answer = schema_element
self.context["answer_id"] = self.answer["id"]
| 28.333333
| 53
| 0.713725
|
from app.validators.validator import Validator
class AnswerValidator(Validator):
def __init__(self, schema_element):
super().__init__(schema_element)
self.answer = schema_element
self.context["answer_id"] = self.answer["id"]
| true
| true
|
f718868cd1db2d24c9fc9a8d1fbfa74f3af35d37
| 22,294
|
py
|
Python
|
airtest/core/api.py
|
Aracoix/Airtest
|
d41737944738e651dd29564c29b88cc4c2e71e2e
|
[
"Apache-2.0"
] | 6,140
|
2018-01-24T03:27:48.000Z
|
2022-03-31T14:37:54.000Z
|
airtest/core/api.py
|
Aracoix/Airtest
|
d41737944738e651dd29564c29b88cc4c2e71e2e
|
[
"Apache-2.0"
] | 993
|
2018-02-02T11:21:40.000Z
|
2022-03-31T20:41:41.000Z
|
airtest/core/api.py
|
Aracoix/Airtest
|
d41737944738e651dd29564c29b88cc4c2e71e2e
|
[
"Apache-2.0"
] | 1,022
|
2018-03-05T07:45:22.000Z
|
2022-03-31T04:29:57.000Z
|
# -*- coding: utf-8 -*-
"""
This module contains the Airtest Core APIs.
"""
import os
import time
from six.moves.urllib.parse import parse_qsl, urlparse
from airtest.core.cv import Template, loop_find, try_log_screen
from airtest.core.error import TargetNotFoundError
from airtest.core.settings import Settings as ST
from airtest.utils.compat import script_log_dir
from airtest.core.helper import (G, delay_after_operation, import_device_cls,
logwrap, set_logdir, using, log)
"""
Device Setup APIs
"""
def init_device(platform="Android", uuid=None, **kwargs):
"""
Initialize device if not yet, and set as current device.
:param platform: Android, IOS or Windows
:param uuid: uuid for target device, e.g. serialno for Android, handle for Windows, uuid for iOS
:param kwargs: Optional platform specific keyword args, e.g. `cap_method=JAVACAP` for Android
:return: device instance
:Example:
>>> init_device(platform="Android",uuid="SJE5T17B17", cap_method="JAVACAP")
>>> init_device(platform="Windows",uuid="123456")
"""
cls = import_device_cls(platform)
dev = cls(uuid, **kwargs)
# Add device instance in G and set as current device.
G.add_device(dev)
return dev
def connect_device(uri):
"""
Initialize device with uri, and set as current device.
:param uri: an URI where to connect to device, e.g. `android://adbhost:adbport/serialno?param=value¶m2=value2`
:return: device instance
:Example:
>>> connect_device("Android:///") # local adb device using default params
>>> # local device with serial number SJE5T17B17 and custom params
>>> connect_device("Android:///SJE5T17B17?cap_method=javacap&touch_method=adb")
>>> # remote device using custom params Android://adbhost:adbport/serialno
>>> connect_device("Android://127.0.0.1:5037/10.254.60.1:5555")
>>> connect_device("Windows:///") # connect to the desktop
>>> connect_device("Windows:///123456") # Connect to the window with handle 123456
>>> connect_device("iOS:///127.0.0.1:8100") # iOS device
"""
d = urlparse(uri)
platform = d.scheme
host = d.netloc
uuid = d.path.lstrip("/")
params = dict(parse_qsl(d.query))
if host:
params["host"] = host.split(":")
dev = init_device(platform, uuid, **params)
return dev
def device():
"""
Return the current active device.
:return: current device instance
:Example:
>>> dev = device()
>>> dev.touch((100, 100))
"""
return G.DEVICE
def set_current(idx):
"""
Set current active device.
:param idx: uuid or index of initialized device instance
:raise IndexError: raised when device idx is not found
:return: None
:platforms: Android, iOS, Windows
:Example:
>>> # switch to the first phone currently connected
>>> set_current(0)
>>> # switch to the phone with serial number serialno1
>>> set_current("serialno1")
"""
dev_dict = {dev.uuid: dev for dev in G.DEVICE_LIST}
if idx in dev_dict:
current_dev = dev_dict[idx]
elif isinstance(idx, int) and idx < len(G.DEVICE_LIST):
current_dev = G.DEVICE_LIST[idx]
else:
raise IndexError("device idx not found in: %s or %s" % (
list(dev_dict.keys()), list(range(len(G.DEVICE_LIST)))))
G.DEVICE = current_dev
def auto_setup(basedir=None, devices=None, logdir=None, project_root=None, compress=None):
"""
Auto setup running env and try connect android device if not device connected.
:param basedir: basedir of script, __file__ is also acceptable.
:param devices: connect_device uri in list.
:param logdir: log dir for script report, default is None for no log, set to ``True`` for ``<basedir>/log``.
:param project_root: project root dir for `using` api.
:param compress: The compression rate of the screenshot image, integer in range [1, 99], default is 10
:Example:
>>> auto_setup(__file__)
>>> auto_setup(__file__, devices=["Android://127.0.0.1:5037/SJE5T17B17"],
... logdir=True, project_root=r"D:\\test\\logs", compress=90)
"""
if basedir:
if os.path.isfile(basedir):
basedir = os.path.dirname(basedir)
if basedir not in G.BASEDIR:
G.BASEDIR.append(basedir)
if devices:
for dev in devices:
connect_device(dev)
if logdir:
logdir = script_log_dir(basedir, logdir)
set_logdir(logdir)
if project_root:
ST.PROJECT_ROOT = project_root
if compress:
ST.SNAPSHOT_QUALITY = compress
"""
Device Operations
"""
@logwrap
def shell(cmd):
"""
Start remote shell in the target device and execute the command
:param cmd: command to be run on device, e.g. "ls /data/local/tmp"
:return: the output of the shell cmd
:platforms: Android
:Example:
>>> # Execute commands on the current device adb shell ls
>>> print(shell("ls"))
>>> # Execute adb instructions for specific devices
>>> dev = connect_device("Android:///device1")
>>> dev.shell("ls")
>>> # Switch to a device and execute the adb command
>>> set_current(0)
>>> shell("ls")
"""
return G.DEVICE.shell(cmd)
@logwrap
def start_app(package, activity=None):
"""
Start the target application on device
:param package: name of the package to be started, e.g. "com.netease.my"
:param activity: the activity to start, default is None which means the main activity
:return: None
:platforms: Android, iOS
:Example:
>>> start_app("com.netease.cloudmusic")
>>> start_app("com.apple.mobilesafari") # on iOS
"""
G.DEVICE.start_app(package, activity)
@logwrap
def stop_app(package):
"""
Stop the target application on device
:param package: name of the package to stop, see also `start_app`
:return: None
:platforms: Android, iOS
:Example:
>>> stop_app("com.netease.cloudmusic")
"""
G.DEVICE.stop_app(package)
@logwrap
def clear_app(package):
"""
Clear data of the target application on device
:param package: name of the package, see also `start_app`
:return: None
:platforms: Android
:Example:
>>> clear_app("com.netease.cloudmusic")
"""
G.DEVICE.clear_app(package)
@logwrap
def install(filepath, **kwargs):
"""
Install application on device
:param filepath: the path to file to be installed on target device
:param kwargs: platform specific `kwargs`, please refer to corresponding docs
:return: None
:platforms: Android
:Example:
>>> install(r"D:\\demo\\test.apk")
>>> # adb install -r -t D:\\demo\\test.apk
>>> install(r"D:\\demo\\test.apk", install_options=["-r", "-t"])
"""
return G.DEVICE.install_app(filepath, **kwargs)
@logwrap
def uninstall(package):
"""
Uninstall application on device
:param package: name of the package, see also `start_app`
:return: None
:platforms: Android
:Example:
>>> uninstall("com.netease.cloudmusic")
"""
return G.DEVICE.uninstall_app(package)
@logwrap
def snapshot(filename=None, msg="", quality=None, max_size=None):
"""
Take the screenshot of the target device and save it to the file.
:param filename: name of the file where to save the screenshot. If the relative path is provided, the default
location is ``ST.LOG_DIR``
:param msg: short description for screenshot, it will be recorded in the report
:param quality: The image quality, integer in range [1, 99], default is 10
:param max_size: the maximum size of the picture, e.g 1200
:return: {"screen": filename, "resolution": resolution of the screen} or None
:platforms: Android, iOS, Windows
:Example:
>>> snapshot(msg="index")
>>> # save the screenshot to test.jpg
>>> snapshot(filename="test.png", msg="test")
The quality and size of the screenshot can be set::
>>> # Set the screenshot quality to 30
>>> ST.SNAPSHOT_QUALITY = 30
>>> # Set the screenshot size not to exceed 600*600
>>> # if not set, the default size is the original image size
>>> ST.IMAGE_MAXSIZE = 600
>>> # The quality of the screenshot is 30, and the size does not exceed 600*600
>>> touch((100, 100))
>>> # The quality of the screenshot of this sentence is 90
>>> snapshot(filename="test.png", msg="test", quality=90)
>>> # The quality of the screenshot is 90, and the size does not exceed 1200*1200
>>> snapshot(filename="test2.png", msg="test", quality=90, max_size=1200)
"""
if not quality:
quality = ST.SNAPSHOT_QUALITY
if not max_size and ST.IMAGE_MAXSIZE:
max_size = ST.IMAGE_MAXSIZE
if filename:
if not os.path.isabs(filename):
logdir = ST.LOG_DIR or "."
filename = os.path.join(logdir, filename)
screen = G.DEVICE.snapshot(filename, quality=quality, max_size=max_size)
return try_log_screen(screen, quality=quality, max_size=max_size)
else:
return try_log_screen(quality=quality, max_size=max_size)
@logwrap
def wake():
"""
Wake up and unlock the target device
:return: None
:platforms: Android
:Example:
>>> wake()
.. note:: Might not work on some models
"""
G.DEVICE.wake()
@logwrap
def home():
"""
Return to the home screen of the target device.
:return: None
:platforms: Android, iOS
:Example:
>>> home()
"""
G.DEVICE.home()
@logwrap
def touch(v, times=1, **kwargs):
"""
Perform the touch action on the device screen
:param v: target to touch, either a ``Template`` instance or absolute coordinates (x, y)
:param times: how many touches to be performed
:param kwargs: platform specific `kwargs`, please refer to corresponding docs
:return: finial position to be clicked, e.g. (100, 100)
:platforms: Android, Windows, iOS
:Example:
Click absolute coordinates::
>>> touch((100, 100))
Click the center of the picture(Template object)::
>>> touch(Template(r"tpl1606730579419.png", target_pos=5))
Click 2 times::
>>> touch((100, 100), times=2)
Under Android and Windows platforms, you can set the click duration::
>>> touch((100, 100), duration=2)
Right click(Windows)::
>>> touch((100, 100), right_click=True)
"""
if isinstance(v, Template):
pos = loop_find(v, timeout=ST.FIND_TIMEOUT)
else:
try_log_screen()
pos = v
for _ in range(times):
G.DEVICE.touch(pos, **kwargs)
time.sleep(0.05)
delay_after_operation()
return pos
click = touch # click is alias of touch
@logwrap
def double_click(v):
"""
Perform double click
:param v: target to touch, either a ``Template`` instance or absolute coordinates (x, y)
:return: finial position to be clicked
:Example:
>>> double_click((100, 100))
>>> double_click(Template(r"tpl1606730579419.png"))
"""
if isinstance(v, Template):
pos = loop_find(v, timeout=ST.FIND_TIMEOUT)
else:
try_log_screen()
pos = v
G.DEVICE.double_click(pos)
delay_after_operation()
return pos
@logwrap
def swipe(v1, v2=None, vector=None, **kwargs):
"""
Perform the swipe action on the device screen.
There are two ways of assigning the parameters
* ``swipe(v1, v2=Template(...))`` # swipe from v1 to v2
* ``swipe(v1, vector=(x, y))`` # swipe starts at v1 and moves along the vector.
:param v1: the start point of swipe,
either a Template instance or absolute coordinates (x, y)
:param v2: the end point of swipe,
either a Template instance or absolute coordinates (x, y)
:param vector: a vector coordinates of swipe action, either absolute coordinates (x, y) or percentage of
screen e.g.(0.5, 0.5)
:param **kwargs: platform specific `kwargs`, please refer to corresponding docs
:raise Exception: general exception when not enough parameters to perform swap action have been provided
:return: Origin position and target position
:platforms: Android, Windows, iOS
:Example:
>>> swipe(Template(r"tpl1606814865574.png"), vector=[-0.0316, -0.3311])
>>> swipe((100, 100), (200, 200))
Custom swiping duration and number of steps(Android and iOS)::
>>> # swiping lasts for 1 second, divided into 6 steps
>>> swipe((100, 100), (200, 200), duration=1, steps=6)
"""
if isinstance(v1, Template):
pos1 = loop_find(v1, timeout=ST.FIND_TIMEOUT)
else:
try_log_screen()
pos1 = v1
if v2:
if isinstance(v2, Template):
pos2 = loop_find(v2, timeout=ST.FIND_TIMEOUT_TMP)
else:
pos2 = v2
elif vector:
if vector[0] <= 1 and vector[1] <= 1:
w, h = G.DEVICE.get_current_resolution()
vector = (int(vector[0] * w), int(vector[1] * h))
pos2 = (pos1[0] + vector[0], pos1[1] + vector[1])
else:
raise Exception("no enough params for swipe")
G.DEVICE.swipe(pos1, pos2, **kwargs)
delay_after_operation()
return pos1, pos2
@logwrap
def pinch(in_or_out='in', center=None, percent=0.5):
"""
Perform the pinch action on the device screen
:param in_or_out: pinch in or pinch out, enum in ["in", "out"]
:param center: center of pinch action, default as None which is the center of the screen
:param percent: percentage of the screen of pinch action, default is 0.5
:return: None
:platforms: Android
:Example:
Pinch in the center of the screen with two fingers::
>>> pinch()
Take (100,100) as the center and slide out with two fingers::
>>> pinch('out', center=(100, 100))
"""
try_log_screen()
G.DEVICE.pinch(in_or_out=in_or_out, center=center, percent=percent)
delay_after_operation()
@logwrap
def keyevent(keyname, **kwargs):
"""
Perform key event on the device
:param keyname: platform specific key name
:param **kwargs: platform specific `kwargs`, please refer to corresponding docs
:return: None
:platforms: Android, Windows, iOS
:Example:
* ``Android``: it is equivalent to executing ``adb shell input keyevent KEYNAME`` ::
>>> keyevent("HOME")
>>> # The constant corresponding to the home key is 3
>>> keyevent("3") # same as keyevent("HOME")
>>> keyevent("BACK")
>>> keyevent("KEYCODE_DEL")
.. seealso::
Module :py:mod:`airtest.core.android.adb.ADB.keyevent`
Equivalent to calling the ``android.adb.keyevent()``
`Android Keyevent <https://developer.android.com/reference/android/view/KeyEvent#constants_1>`_
Documentation for more ``Android.KeyEvent``
* ``Windows``: Use ``pywinauto.keyboard`` module for key input::
>>> keyevent("{DEL}")
>>> keyevent("%{F4}") # close an active window with Alt+F4
.. seealso::
Module :py:mod:`airtest.core.win.win.Windows.keyevent`
`pywinauto.keyboard <https://pywinauto.readthedocs.io/en/latest/code/pywinauto.keyboard.html>`_
Documentation for ``pywinauto.keyboard``
* ``iOS``: Only supports home/volumeUp/volumeDown::
>>> keyevent("HOME")
>>> keyevent("volumeUp")
"""
G.DEVICE.keyevent(keyname, **kwargs)
delay_after_operation()
@logwrap
def text(text, enter=True, **kwargs):
"""
Input text on the target device. Text input widget must be active first.
:param text: text to input, unicode is supported
:param enter: input `Enter` keyevent after text input, default is True
:return: None
:platforms: Android, Windows, iOS
:Example:
>>> text("test")
>>> text("test", enter=False)
On Android, sometimes you need to click the search button after typing::
>>> text("test", search=True)
.. seealso::
Module :py:mod:`airtest.core.android.ime.YosemiteIme.code`
If you want to enter other keys on the keyboard, you can use the interface::
>>> text("test")
>>> device().yosemite_ime.code("3") # 3 = IME_ACTION_SEARCH
Ref: `Editor Action Code <http://developer.android.com/reference/android/view/inputmethod/EditorInfo.html>`_
"""
G.DEVICE.text(text, enter=enter, **kwargs)
delay_after_operation()
@logwrap
def sleep(secs=1.0):
"""
Set the sleep interval. It will be recorded in the report
:param secs: seconds to sleep
:return: None
:platforms: Android, Windows, iOS
:Example:
>>> sleep(1)
"""
time.sleep(secs)
@logwrap
def wait(v, timeout=None, interval=0.5, intervalfunc=None):
"""
Wait to match the Template on the device screen
:param v: target object to wait for, Template instance
:param timeout: time interval to wait for the match, default is None which is ``ST.FIND_TIMEOUT``
:param interval: time interval in seconds to attempt to find a match
:param intervalfunc: called after each unsuccessful attempt to find the corresponding match
:raise TargetNotFoundError: raised if target is not found after the time limit expired
:return: coordinates of the matched target
:platforms: Android, Windows, iOS
:Example:
>>> wait(Template(r"tpl1606821804906.png")) # timeout after ST.FIND_TIMEOUT
>>> # find Template every 3 seconds, timeout after 120 seconds
>>> wait(Template(r"tpl1606821804906.png"), timeout=120, interval=3)
You can specify a callback function every time the search target fails::
>>> def notfound():
>>> print("No target found")
>>> wait(Template(r"tpl1607510661400.png"), intervalfunc=notfound)
"""
timeout = timeout or ST.FIND_TIMEOUT
pos = loop_find(v, timeout=timeout, interval=interval, intervalfunc=intervalfunc)
return pos
@logwrap
def exists(v):
"""
Check whether given target exists on device screen
:param v: target to be checked
:return: False if target is not found, otherwise returns the coordinates of the target
:platforms: Android, Windows, iOS
:Example:
>>> if exists(Template(r"tpl1606822430589.png")):
>>> touch(Template(r"tpl1606822430589.png"))
Since ``exists()`` will return the coordinates, we can directly click on this return value to reduce one image search::
>>> pos = exists(Template(r"tpl1606822430589.png"))
>>> if pos:
>>> touch(pos)
"""
try:
pos = loop_find(v, timeout=ST.FIND_TIMEOUT_TMP)
except TargetNotFoundError:
return False
else:
return pos
@logwrap
def find_all(v):
"""
Find all occurrences of the target on the device screen and return their coordinates
:param v: target to find
:return: list of results, [{'result': (x, y),
'rectangle': ( (left_top, left_bottom, right_bottom, right_top) ),
'confidence': 0.9},
...]
:platforms: Android, Windows, iOS
:Example:
>>> find_all(Template(r"tpl1607511235111.png"))
[{'result': (218, 468), 'rectangle': ((149, 440), (149, 496), (288, 496), (288, 440)),
'confidence': 0.9999996423721313}]
"""
screen = G.DEVICE.snapshot(quality=ST.SNAPSHOT_QUALITY)
return v.match_all_in(screen)
"""
Assertions
"""
@logwrap
def assert_exists(v, msg=""):
"""
Assert target exists on device screen
:param v: target to be checked
:param msg: short description of assertion, it will be recorded in the report
:raise AssertionError: if assertion fails
:return: coordinates of the target
:platforms: Android, Windows, iOS
:Example:
>>> assert_exists(Template(r"tpl1607324047907.png"), "assert exists")
"""
try:
pos = loop_find(v, timeout=ST.FIND_TIMEOUT, threshold=ST.THRESHOLD_STRICT or v.threshold)
return pos
except TargetNotFoundError:
raise AssertionError("%s does not exist in screen, message: %s" % (v, msg))
@logwrap
def assert_not_exists(v, msg=""):
"""
Assert target does not exist on device screen
:param v: target to be checked
:param msg: short description of assertion, it will be recorded in the report
:raise AssertionError: if assertion fails
:return: None.
:platforms: Android, Windows, iOS
:Example:
>>> assert_not_exists(Template(r"tpl1607324047907.png"), "assert not exists")
"""
try:
pos = loop_find(v, timeout=ST.FIND_TIMEOUT_TMP)
raise AssertionError("%s exists unexpectedly at pos: %s, message: %s" % (v, pos, msg))
except TargetNotFoundError:
pass
@logwrap
def assert_equal(first, second, msg=""):
"""
Assert two values are equal
:param first: first value
:param second: second value
:param msg: short description of assertion, it will be recorded in the report
:raise AssertionError: if assertion fails
:return: None
:platforms: Android, Windows, iOS
:Example:
>>> assert_equal(1, 1, msg="assert 1==1")
"""
if first != second:
raise AssertionError("%s and %s are not equal, message: %s" % (first, second, msg))
@logwrap
def assert_not_equal(first, second, msg=""):
"""
Assert two values are not equal
:param first: first value
:param second: second value
:param msg: short description of assertion, it will be recorded in the report
:raise AssertionError: if assertion
:return: None
:platforms: Android, Windows, iOS
:Example:
>>> assert_not_equal(1, 2, msg="assert 1!=2")
"""
if first == second:
raise AssertionError("%s and %s are equal, message: %s" % (first, second, msg))
| 30.581619
| 127
| 0.632906
|
import os
import time
from six.moves.urllib.parse import parse_qsl, urlparse
from airtest.core.cv import Template, loop_find, try_log_screen
from airtest.core.error import TargetNotFoundError
from airtest.core.settings import Settings as ST
from airtest.utils.compat import script_log_dir
from airtest.core.helper import (G, delay_after_operation, import_device_cls,
logwrap, set_logdir, using, log)
def init_device(platform="Android", uuid=None, **kwargs):
cls = import_device_cls(platform)
dev = cls(uuid, **kwargs)
G.add_device(dev)
return dev
def connect_device(uri):
d = urlparse(uri)
platform = d.scheme
host = d.netloc
uuid = d.path.lstrip("/")
params = dict(parse_qsl(d.query))
if host:
params["host"] = host.split(":")
dev = init_device(platform, uuid, **params)
return dev
def device():
return G.DEVICE
def set_current(idx):
dev_dict = {dev.uuid: dev for dev in G.DEVICE_LIST}
if idx in dev_dict:
current_dev = dev_dict[idx]
elif isinstance(idx, int) and idx < len(G.DEVICE_LIST):
current_dev = G.DEVICE_LIST[idx]
else:
raise IndexError("device idx not found in: %s or %s" % (
list(dev_dict.keys()), list(range(len(G.DEVICE_LIST)))))
G.DEVICE = current_dev
def auto_setup(basedir=None, devices=None, logdir=None, project_root=None, compress=None):
if basedir:
if os.path.isfile(basedir):
basedir = os.path.dirname(basedir)
if basedir not in G.BASEDIR:
G.BASEDIR.append(basedir)
if devices:
for dev in devices:
connect_device(dev)
if logdir:
logdir = script_log_dir(basedir, logdir)
set_logdir(logdir)
if project_root:
ST.PROJECT_ROOT = project_root
if compress:
ST.SNAPSHOT_QUALITY = compress
@logwrap
def shell(cmd):
return G.DEVICE.shell(cmd)
@logwrap
def start_app(package, activity=None):
G.DEVICE.start_app(package, activity)
@logwrap
def stop_app(package):
G.DEVICE.stop_app(package)
@logwrap
def clear_app(package):
G.DEVICE.clear_app(package)
@logwrap
def install(filepath, **kwargs):
return G.DEVICE.install_app(filepath, **kwargs)
@logwrap
def uninstall(package):
return G.DEVICE.uninstall_app(package)
@logwrap
def snapshot(filename=None, msg="", quality=None, max_size=None):
if not quality:
quality = ST.SNAPSHOT_QUALITY
if not max_size and ST.IMAGE_MAXSIZE:
max_size = ST.IMAGE_MAXSIZE
if filename:
if not os.path.isabs(filename):
logdir = ST.LOG_DIR or "."
filename = os.path.join(logdir, filename)
screen = G.DEVICE.snapshot(filename, quality=quality, max_size=max_size)
return try_log_screen(screen, quality=quality, max_size=max_size)
else:
return try_log_screen(quality=quality, max_size=max_size)
@logwrap
def wake():
G.DEVICE.wake()
@logwrap
def home():
G.DEVICE.home()
@logwrap
def touch(v, times=1, **kwargs):
if isinstance(v, Template):
pos = loop_find(v, timeout=ST.FIND_TIMEOUT)
else:
try_log_screen()
pos = v
for _ in range(times):
G.DEVICE.touch(pos, **kwargs)
time.sleep(0.05)
delay_after_operation()
return pos
click = touch
@logwrap
def double_click(v):
if isinstance(v, Template):
pos = loop_find(v, timeout=ST.FIND_TIMEOUT)
else:
try_log_screen()
pos = v
G.DEVICE.double_click(pos)
delay_after_operation()
return pos
@logwrap
def swipe(v1, v2=None, vector=None, **kwargs):
if isinstance(v1, Template):
pos1 = loop_find(v1, timeout=ST.FIND_TIMEOUT)
else:
try_log_screen()
pos1 = v1
if v2:
if isinstance(v2, Template):
pos2 = loop_find(v2, timeout=ST.FIND_TIMEOUT_TMP)
else:
pos2 = v2
elif vector:
if vector[0] <= 1 and vector[1] <= 1:
w, h = G.DEVICE.get_current_resolution()
vector = (int(vector[0] * w), int(vector[1] * h))
pos2 = (pos1[0] + vector[0], pos1[1] + vector[1])
else:
raise Exception("no enough params for swipe")
G.DEVICE.swipe(pos1, pos2, **kwargs)
delay_after_operation()
return pos1, pos2
@logwrap
def pinch(in_or_out='in', center=None, percent=0.5):
try_log_screen()
G.DEVICE.pinch(in_or_out=in_or_out, center=center, percent=percent)
delay_after_operation()
@logwrap
def keyevent(keyname, **kwargs):
G.DEVICE.keyevent(keyname, **kwargs)
delay_after_operation()
@logwrap
def text(text, enter=True, **kwargs):
G.DEVICE.text(text, enter=enter, **kwargs)
delay_after_operation()
@logwrap
def sleep(secs=1.0):
time.sleep(secs)
@logwrap
def wait(v, timeout=None, interval=0.5, intervalfunc=None):
timeout = timeout or ST.FIND_TIMEOUT
pos = loop_find(v, timeout=timeout, interval=interval, intervalfunc=intervalfunc)
return pos
@logwrap
def exists(v):
try:
pos = loop_find(v, timeout=ST.FIND_TIMEOUT_TMP)
except TargetNotFoundError:
return False
else:
return pos
@logwrap
def find_all(v):
screen = G.DEVICE.snapshot(quality=ST.SNAPSHOT_QUALITY)
return v.match_all_in(screen)
@logwrap
def assert_exists(v, msg=""):
try:
pos = loop_find(v, timeout=ST.FIND_TIMEOUT, threshold=ST.THRESHOLD_STRICT or v.threshold)
return pos
except TargetNotFoundError:
raise AssertionError("%s does not exist in screen, message: %s" % (v, msg))
@logwrap
def assert_not_exists(v, msg=""):
try:
pos = loop_find(v, timeout=ST.FIND_TIMEOUT_TMP)
raise AssertionError("%s exists unexpectedly at pos: %s, message: %s" % (v, pos, msg))
except TargetNotFoundError:
pass
@logwrap
def assert_equal(first, second, msg=""):
if first != second:
raise AssertionError("%s and %s are not equal, message: %s" % (first, second, msg))
@logwrap
def assert_not_equal(first, second, msg=""):
if first == second:
raise AssertionError("%s and %s are equal, message: %s" % (first, second, msg))
| true
| true
|
f71886a55daf56e9ad908610ff29c7fc570805ef
| 3,988
|
py
|
Python
|
model_zoo/official/recommend/wide_and_deep_multitable/train_and_eval.py
|
huxian123/mindspore
|
ec5ba10c82bbd6eccafe32d3a1149add90105bc8
|
[
"Apache-2.0"
] | 1
|
2021-04-23T06:35:18.000Z
|
2021-04-23T06:35:18.000Z
|
model_zoo/official/recommend/wide_and_deep_multitable/train_and_eval.py
|
nudt-eddie/mindspore
|
55372b41fdfae6d2b88d7078971e06d537f6c558
|
[
"Apache-2.0"
] | null | null | null |
model_zoo/official/recommend/wide_and_deep_multitable/train_and_eval.py
|
nudt-eddie/mindspore
|
55372b41fdfae6d2b88d7078971e06d537f6c558
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""" training_and_evaluating """
import os
import sys
from mindspore import Model, context
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig
from mindspore.train.callback import TimeMonitor
from src.wide_and_deep import PredictWithSigmoid, TrainStepWrap, NetWithLossClass, WideDeepModel
from src.callbacks import LossCallBack, EvalCallBack
from src.datasets import create_dataset, compute_emb_dim
from src.metrics import AUCMetric
from src.config import WideDeepConfig
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def get_WideDeep_net(config):
"""
Get network of wide&deep model.
"""
WideDeep_net = WideDeepModel(config)
loss_net = NetWithLossClass(WideDeep_net, config)
train_net = TrainStepWrap(loss_net, config)
eval_net = PredictWithSigmoid(WideDeep_net)
return train_net, eval_net
class ModelBuilder():
"""
ModelBuilder.
"""
def __init__(self):
pass
def get_hook(self):
pass
def get_train_hook(self):
hooks = []
callback = LossCallBack()
hooks.append(callback)
if int(os.getenv('DEVICE_ID')) == 0:
pass
return hooks
def get_net(self, config):
return get_WideDeep_net(config)
def train_and_eval(config):
"""
train_and_eval.
"""
data_path = config.data_path
epochs = config.epochs
print("epochs is {}".format(epochs))
ds_train = create_dataset(data_path, train_mode=True, epochs=1,
batch_size=config.batch_size, is_tf_dataset=config.is_tf_dataset)
ds_eval = create_dataset(data_path, train_mode=False, epochs=1,
batch_size=config.batch_size, is_tf_dataset=config.is_tf_dataset)
print("ds_train.size: {}".format(ds_train.get_dataset_size()))
print("ds_eval.size: {}".format(ds_eval.get_dataset_size()))
net_builder = ModelBuilder()
train_net, eval_net = net_builder.get_net(config)
train_net.set_train()
auc_metric = AUCMetric()
model = Model(train_net, eval_network=eval_net, metrics={"auc": auc_metric})
eval_callback = EvalCallBack(model, ds_eval, auc_metric, config)
callback = LossCallBack(config)
# Only save the last checkpoint at the last epoch. For saving epochs at each epoch, please
# set save_checkpoint_steps=ds_train.get_dataset_size()
ckptconfig = CheckpointConfig(save_checkpoint_steps=ds_train.get_dataset_size()*config.epochs,
keep_checkpoint_max=10)
ckpoint_cb = ModelCheckpoint(prefix='widedeep_train',
directory=config.ckpt_path, config=ckptconfig)
model.train(epochs, ds_train, callbacks=[TimeMonitor(ds_train.get_dataset_size()), eval_callback,
callback, ckpoint_cb], sink_size=ds_train.get_dataset_size())
if __name__ == "__main__":
wide_and_deep_config = WideDeepConfig()
wide_and_deep_config.argparse_init()
compute_emb_dim(wide_and_deep_config)
context.set_context(mode=context.GRAPH_MODE, device_target="Davinci",
save_graphs=True)
train_and_eval(wide_and_deep_config)
| 36.254545
| 107
| 0.675527
|
import os
import sys
from mindspore import Model, context
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig
from mindspore.train.callback import TimeMonitor
from src.wide_and_deep import PredictWithSigmoid, TrainStepWrap, NetWithLossClass, WideDeepModel
from src.callbacks import LossCallBack, EvalCallBack
from src.datasets import create_dataset, compute_emb_dim
from src.metrics import AUCMetric
from src.config import WideDeepConfig
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def get_WideDeep_net(config):
WideDeep_net = WideDeepModel(config)
loss_net = NetWithLossClass(WideDeep_net, config)
train_net = TrainStepWrap(loss_net, config)
eval_net = PredictWithSigmoid(WideDeep_net)
return train_net, eval_net
class ModelBuilder():
def __init__(self):
pass
def get_hook(self):
pass
def get_train_hook(self):
hooks = []
callback = LossCallBack()
hooks.append(callback)
if int(os.getenv('DEVICE_ID')) == 0:
pass
return hooks
def get_net(self, config):
return get_WideDeep_net(config)
def train_and_eval(config):
data_path = config.data_path
epochs = config.epochs
print("epochs is {}".format(epochs))
ds_train = create_dataset(data_path, train_mode=True, epochs=1,
batch_size=config.batch_size, is_tf_dataset=config.is_tf_dataset)
ds_eval = create_dataset(data_path, train_mode=False, epochs=1,
batch_size=config.batch_size, is_tf_dataset=config.is_tf_dataset)
print("ds_train.size: {}".format(ds_train.get_dataset_size()))
print("ds_eval.size: {}".format(ds_eval.get_dataset_size()))
net_builder = ModelBuilder()
train_net, eval_net = net_builder.get_net(config)
train_net.set_train()
auc_metric = AUCMetric()
model = Model(train_net, eval_network=eval_net, metrics={"auc": auc_metric})
eval_callback = EvalCallBack(model, ds_eval, auc_metric, config)
callback = LossCallBack(config)
ckptconfig = CheckpointConfig(save_checkpoint_steps=ds_train.get_dataset_size()*config.epochs,
keep_checkpoint_max=10)
ckpoint_cb = ModelCheckpoint(prefix='widedeep_train',
directory=config.ckpt_path, config=ckptconfig)
model.train(epochs, ds_train, callbacks=[TimeMonitor(ds_train.get_dataset_size()), eval_callback,
callback, ckpoint_cb], sink_size=ds_train.get_dataset_size())
if __name__ == "__main__":
wide_and_deep_config = WideDeepConfig()
wide_and_deep_config.argparse_init()
compute_emb_dim(wide_and_deep_config)
context.set_context(mode=context.GRAPH_MODE, device_target="Davinci",
save_graphs=True)
train_and_eval(wide_and_deep_config)
| true
| true
|
f71887a83f72a2a2fe70f9569cd374a4db495fcc
| 242
|
py
|
Python
|
easilyb/file_ops/_file_ops.py
|
xaled/easilyb
|
cdb5f738205f700b37e03c50d04061a2d1e730cc
|
[
"MIT"
] | null | null | null |
easilyb/file_ops/_file_ops.py
|
xaled/easilyb
|
cdb5f738205f700b37e03c50d04061a2d1e730cc
|
[
"MIT"
] | null | null | null |
easilyb/file_ops/_file_ops.py
|
xaled/easilyb
|
cdb5f738205f700b37e03c50d04061a2d1e730cc
|
[
"MIT"
] | null | null | null |
import logging
logger = logging.getLogger(__name__)
def read_all(sock):
sock.settimeout(5.0)
data = ""
while True:
part = sock.recv(4096)
data += part
if len(part) < 4096:
break
return data
| 20.166667
| 36
| 0.57438
|
import logging
logger = logging.getLogger(__name__)
def read_all(sock):
sock.settimeout(5.0)
data = ""
while True:
part = sock.recv(4096)
data += part
if len(part) < 4096:
break
return data
| true
| true
|
f71887d368f45f3ec2e13e47d2be1ae91cb6c737
| 4,803
|
py
|
Python
|
python_anon/PlotLetterROC.py
|
MaviccPRP/Anonymizer
|
3d75ed3e97e260b6ded7e188eb3d58d749844e36
|
[
"MIT"
] | null | null | null |
python_anon/PlotLetterROC.py
|
MaviccPRP/Anonymizer
|
3d75ed3e97e260b6ded7e188eb3d58d749844e36
|
[
"MIT"
] | 2
|
2019-06-14T19:55:39.000Z
|
2019-06-14T20:16:11.000Z
|
python_anon/PlotLetterROC.py
|
MaviccPRP/Anonymizer
|
3d75ed3e97e260b6ded7e188eb3d58d749844e36
|
[
"MIT"
] | 1
|
2020-03-13T14:32:31.000Z
|
2020-03-13T14:32:31.000Z
|
#!/usr/env/python
"""
Author: Ralf Hauenschild
E-Mail: ralf_hauenschild@gmx.de
"""
import sys
import os
import numpy
import matplotlib
import matplotlib as mpl
import matplotlib.pyplot as plt
import pylab as py
import matplotlib.cm as cm
import math
c = []
sens = [] # sens (recall)
senserror = []
specloss = [] # 1-spec
specerror = []
p = [] # ppv (precision)
perror = []
for i in range(1, len(sys.argv)):
if sys.argv[i] != "distinct":
c.append([])
sens.append([]) # sens (recall)
senserror.append([])
specloss.append([]) # 1-spec
specerror.append([])
p.append([]) # ppv (precision)
perror.append([])
infile = open(sys.argv[i], "r")
line = infile.readline()
while len(line) > 4:
splitlist = line[:-1].split("\t")
if "distinct" in sys.argv[i]:
c[i-1].append(float(splitlist[1]))
sens[i-1].append(float(splitlist[21]))
senserror[i-1].append(float(splitlist[23]))
specloss[i-1].append(1-float(splitlist[33]))
specerror[i-1].append(float(splitlist[35]))
p[i-1].append(float(splitlist[27]))
perror[i-1].append(float(splitlist[29]))
else:
c[i-1].append(float(splitlist[1]))
sens[i-1].append(float(splitlist[3]))
senserror[i-1].append(float(splitlist[5]))
specloss[i-1].append(1-float(splitlist[15]))
specerror[i-1].append(float(splitlist[17]))
p[i-1].append(float(splitlist[9]))
perror[i-1].append(float(splitlist[11]))
line = infile.readline()
infile.close()
fig = py.figure(1, figsize=(12, 6))
py.subplots_adjust(top=0.8)
linestyles = ["-", "--"]
#colors=[cb.to_rgba(value) for value in c]
norm=mpl.colors.Normalize(vmin=min(c[0]), vmax=max(c[0]))
fig.suptitle("Anonymization performance assessment under sliding PPV threshold\nfor words leading X-ed out training content")
#labels = ["-1 leader only", "-1 & -2 leader indep."]
labels = ["X-out events", "word-distinct X-out events"]
handles = []
m = cm.ScalarMappable(norm=norm, cmap=cm.jet)
ax = fig.add_subplot(121)
for column in range(0, len(specloss)):
for i in range(1, len(specloss[column])):
aplot, = ax.plot(specloss[column][i-1:i+1], sens[column][i-1:i+1], linestyles[column], color=m.to_rgba(numpy.mean(c[column][i-1:i+1])), linewidth=3)
for i in range(0, len(specloss[column])):
ax.errorbar(specloss[column][i:i+1], sens[column][i:i+1], xerr=specerror[column][i:i+1], yerr=senserror[column][i:i+1], marker='o', fmt='-o', linewidth=3, color=m.to_rgba(c[column][i]))
group1 = ax.scatter(specloss[column], sens[column], c=c[column], cmap=cm.jet, norm=norm, alpha=0)
import matplotlib.ticker as ticker
#, format=ticker.FormatStrFormatter('%.0f')
cb = py.colorbar(group1)
cb.set_alpha(1)
cb.draw_all()
cb.ax.set_ylabel("PPV threshold for feature to be considered in model")
py.xlim(xmin=0)
py.ylim(ymin=0)
py.xlabel("FPR (1-specificity)")
py.ylabel("TPR (sensitivity)")
py.title("ROC")
a, = ax.plot([-1, -1], [-1, -1], linestyles[0], color=m.to_rgba(c[column][0]), linewidth=3)
b, = ax.plot([-1, -1], [-1, -1], linestyles[1], color=m.to_rgba(c[column][0]), linewidth=3)
#plt.legend([a, b], labels, loc='lower right')
#plt.legend(handles=handles, labels=labels)
ax = fig.add_subplot(122)
for column in range(0, len(specloss)):
print sens[column]
for i in range(1, len(specloss[column])):
ax.plot(sens[column][i-1:i+1], p[column][i-1:i+1], linestyles[column], color=m.to_rgba(numpy.mean(c[column][i-1:i+1])), linewidth=3)
for i in range(0, len(specloss[column])):
ax.errorbar(sens[column][i:i+1], p[column][i:i+1], xerr=senserror[column][i:i+1], yerr=perror[column][i:i+1], marker='o', fmt='-o', linewidth=3, color=m.to_rgba(c[column][i]))
cb = py.colorbar(group1)
cb.set_alpha(1)
cb.draw_all()
cb.ax.set_ylabel("PPV threshold for feature to be considered in model")
py.xlim(xmin=0, xmax=1)
py.ylim(ymin=0, ymax=1)
py.xlabel("sensitivity (recall)")
py.ylabel("PPV (precision)")
py.title("P/R (precision by recall)")
a, = ax.plot([-1, -1], [-1, -1], linestyles[0], color=m.to_rgba(c[column][0]), linewidth=3)
b, = ax.plot([-1, -1], [-1, -1], linestyles[1], color=m.to_rgba(c[column][0]), linewidth=3)
#plt.legend([a, b], labels, loc='lower left')
auc = round(numpy.trapz(y=[item for item in sens[0][::-1]], x=[item for item in specloss[0][::-1]]) / numpy.trapz(y=[1.0 for item in sens[0][::-1]], x=[item for item in specloss[0][::-1]]), 4)
print "AUC:", auc
plt.savefig(sys.argv[1] + ".roc" + "_auc_" + str(auc) + ".svg")
plt.show()
plt.close()
| 32.452703
| 193
| 0.609827
|
"""
Author: Ralf Hauenschild
E-Mail: ralf_hauenschild@gmx.de
"""
import sys
import os
import numpy
import matplotlib
import matplotlib as mpl
import matplotlib.pyplot as plt
import pylab as py
import matplotlib.cm as cm
import math
c = []
sens = []
senserror = []
specloss = []
specerror = []
p = []
perror = []
for i in range(1, len(sys.argv)):
if sys.argv[i] != "distinct":
c.append([])
sens.append([])
senserror.append([])
specloss.append([])
specerror.append([])
p.append([])
perror.append([])
infile = open(sys.argv[i], "r")
line = infile.readline()
while len(line) > 4:
splitlist = line[:-1].split("\t")
if "distinct" in sys.argv[i]:
c[i-1].append(float(splitlist[1]))
sens[i-1].append(float(splitlist[21]))
senserror[i-1].append(float(splitlist[23]))
specloss[i-1].append(1-float(splitlist[33]))
specerror[i-1].append(float(splitlist[35]))
p[i-1].append(float(splitlist[27]))
perror[i-1].append(float(splitlist[29]))
else:
c[i-1].append(float(splitlist[1]))
sens[i-1].append(float(splitlist[3]))
senserror[i-1].append(float(splitlist[5]))
specloss[i-1].append(1-float(splitlist[15]))
specerror[i-1].append(float(splitlist[17]))
p[i-1].append(float(splitlist[9]))
perror[i-1].append(float(splitlist[11]))
line = infile.readline()
infile.close()
fig = py.figure(1, figsize=(12, 6))
py.subplots_adjust(top=0.8)
linestyles = ["-", "--"]
norm=mpl.colors.Normalize(vmin=min(c[0]), vmax=max(c[0]))
fig.suptitle("Anonymization performance assessment under sliding PPV threshold\nfor words leading X-ed out training content")
labels = ["X-out events", "word-distinct X-out events"]
handles = []
m = cm.ScalarMappable(norm=norm, cmap=cm.jet)
ax = fig.add_subplot(121)
for column in range(0, len(specloss)):
for i in range(1, len(specloss[column])):
aplot, = ax.plot(specloss[column][i-1:i+1], sens[column][i-1:i+1], linestyles[column], color=m.to_rgba(numpy.mean(c[column][i-1:i+1])), linewidth=3)
for i in range(0, len(specloss[column])):
ax.errorbar(specloss[column][i:i+1], sens[column][i:i+1], xerr=specerror[column][i:i+1], yerr=senserror[column][i:i+1], marker='o', fmt='-o', linewidth=3, color=m.to_rgba(c[column][i]))
group1 = ax.scatter(specloss[column], sens[column], c=c[column], cmap=cm.jet, norm=norm, alpha=0)
import matplotlib.ticker as ticker
cb = py.colorbar(group1)
cb.set_alpha(1)
cb.draw_all()
cb.ax.set_ylabel("PPV threshold for feature to be considered in model")
py.xlim(xmin=0)
py.ylim(ymin=0)
py.xlabel("FPR (1-specificity)")
py.ylabel("TPR (sensitivity)")
py.title("ROC")
a, = ax.plot([-1, -1], [-1, -1], linestyles[0], color=m.to_rgba(c[column][0]), linewidth=3)
b, = ax.plot([-1, -1], [-1, -1], linestyles[1], color=m.to_rgba(c[column][0]), linewidth=3)
ax = fig.add_subplot(122)
for column in range(0, len(specloss)):
print sens[column]
for i in range(1, len(specloss[column])):
ax.plot(sens[column][i-1:i+1], p[column][i-1:i+1], linestyles[column], color=m.to_rgba(numpy.mean(c[column][i-1:i+1])), linewidth=3)
for i in range(0, len(specloss[column])):
ax.errorbar(sens[column][i:i+1], p[column][i:i+1], xerr=senserror[column][i:i+1], yerr=perror[column][i:i+1], marker='o', fmt='-o', linewidth=3, color=m.to_rgba(c[column][i]))
cb = py.colorbar(group1)
cb.set_alpha(1)
cb.draw_all()
cb.ax.set_ylabel("PPV threshold for feature to be considered in model")
py.xlim(xmin=0, xmax=1)
py.ylim(ymin=0, ymax=1)
py.xlabel("sensitivity (recall)")
py.ylabel("PPV (precision)")
py.title("P/R (precision by recall)")
a, = ax.plot([-1, -1], [-1, -1], linestyles[0], color=m.to_rgba(c[column][0]), linewidth=3)
b, = ax.plot([-1, -1], [-1, -1], linestyles[1], color=m.to_rgba(c[column][0]), linewidth=3)
auc = round(numpy.trapz(y=[item for item in sens[0][::-1]], x=[item for item in specloss[0][::-1]]) / numpy.trapz(y=[1.0 for item in sens[0][::-1]], x=[item for item in specloss[0][::-1]]), 4)
print "AUC:", auc
plt.savefig(sys.argv[1] + ".roc" + "_auc_" + str(auc) + ".svg")
plt.show()
plt.close()
| false
| true
|
f718889a61bd413f4f9ce078cd48cee4a7b368ac
| 36,429
|
py
|
Python
|
tests/keras/test_callbacks.py
|
mdand2000/keras-team-keras
|
5eecd55a6f1d6d149b42f9b76aa53d4c5ab8d3eb
|
[
"MIT"
] | 2
|
2019-09-17T22:01:41.000Z
|
2020-05-30T05:48:14.000Z
|
tests/keras/test_callbacks.py
|
mdand2000/keras-team-keras
|
5eecd55a6f1d6d149b42f9b76aa53d4c5ab8d3eb
|
[
"MIT"
] | null | null | null |
tests/keras/test_callbacks.py
|
mdand2000/keras-team-keras
|
5eecd55a6f1d6d149b42f9b76aa53d4c5ab8d3eb
|
[
"MIT"
] | 3
|
2019-08-12T18:15:17.000Z
|
2021-06-20T19:40:13.000Z
|
import os
import multiprocessing
import numpy as np
import pytest
from csv import reader
from csv import Sniffer
import shutil
from keras import optimizers
from keras import initializers
from keras import callbacks
from keras.models import Sequential, Model
from keras.layers import Input, Dense, Dropout, add, dot, Lambda
from keras.layers.convolutional import Conv2D
from keras.layers.pooling import MaxPooling2D, GlobalAveragePooling1D, GlobalAveragePooling2D
from keras.utils.test_utils import get_test_data
from keras.utils.test_utils import keras_test
from keras import backend as K
from keras.utils import np_utils
try:
from unittest.mock import patch
except:
from mock import patch
input_dim = 2
num_hidden = 4
num_classes = 2
batch_size = 5
train_samples = 20
test_samples = 20
@keras_test
def test_TerminateOnNaN():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
cbks = [callbacks.TerminateOnNaN()]
model = Sequential()
initializer = initializers.Constant(value=1e5)
for _ in range(5):
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu',
kernel_initializer=initializer))
model.add(Dense(num_classes, activation='linear'))
model.compile(loss='mean_squared_error',
optimizer='rmsprop')
# case 1 fit
history = model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=20)
loss = history.history['loss']
assert len(loss) == 1
assert loss[0] == np.inf
# case 2 fit_generator
def data_generator():
max_batch_index = len(X_train) // batch_size
i = 0
while 1:
yield (X_train[i * batch_size: (i + 1) * batch_size],
y_train[i * batch_size: (i + 1) * batch_size])
i += 1
i = i % max_batch_index
history = model.fit_generator(data_generator(),
len(X_train),
validation_data=(X_test, y_test),
callbacks=cbks,
epochs=20)
loss = history.history['loss']
assert len(loss) == 1
assert loss[0] == np.inf or np.isnan(loss[0])
@keras_test
def test_stop_training_csv(tmpdir):
np.random.seed(1337)
fp = str(tmpdir / 'test.csv')
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
cbks = [callbacks.TerminateOnNaN(), callbacks.CSVLogger(fp)]
model = Sequential()
for _ in range(5):
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='linear'))
model.compile(loss='mean_squared_error',
optimizer='rmsprop')
def data_generator():
i = 0
max_batch_index = len(X_train) // batch_size
tot = 0
while 1:
if tot > 3 * len(X_train):
yield np.ones([batch_size, input_dim]) * np.nan, np.ones([batch_size, num_classes]) * np.nan
else:
yield (X_train[i * batch_size: (i + 1) * batch_size],
y_train[i * batch_size: (i + 1) * batch_size])
i += 1
tot += 1
i = i % max_batch_index
history = model.fit_generator(data_generator(),
len(X_train) // batch_size,
validation_data=(X_test, y_test),
callbacks=cbks,
epochs=20)
loss = history.history['loss']
assert len(loss) > 1
assert loss[-1] == np.inf or np.isnan(loss[-1])
values = []
with open(fp) as f:
for x in reader(f):
values.append(x)
assert 'nan' in values[-1], 'The last epoch was not logged.'
os.remove(fp)
@keras_test
def test_ModelCheckpoint(tmpdir):
np.random.seed(1337)
filepath = str(tmpdir / 'checkpoint.h5')
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
# case 1
monitor = 'val_loss'
save_best_only = False
mode = 'auto'
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
cbks = [callbacks.ModelCheckpoint(filepath, monitor=monitor,
save_best_only=save_best_only, mode=mode)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
assert os.path.isfile(filepath)
os.remove(filepath)
# case 2
mode = 'min'
cbks = [callbacks.ModelCheckpoint(filepath, monitor=monitor,
save_best_only=save_best_only, mode=mode)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
assert os.path.isfile(filepath)
os.remove(filepath)
# case 3
mode = 'max'
monitor = 'val_acc'
cbks = [callbacks.ModelCheckpoint(filepath, monitor=monitor,
save_best_only=save_best_only, mode=mode)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
assert os.path.isfile(filepath)
os.remove(filepath)
# case 4
save_best_only = True
cbks = [callbacks.ModelCheckpoint(filepath, monitor=monitor,
save_best_only=save_best_only, mode=mode)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
assert os.path.isfile(filepath)
os.remove(filepath)
# case 5
save_best_only = False
period = 2
mode = 'auto'
filepath = 'checkpoint.{epoch:02d}.h5'
cbks = [callbacks.ModelCheckpoint(filepath, monitor=monitor,
save_best_only=save_best_only, mode=mode,
period=period)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=4)
assert os.path.isfile(filepath.format(epoch=2))
assert os.path.isfile(filepath.format(epoch=4))
assert not os.path.exists(filepath.format(epoch=1))
assert not os.path.exists(filepath.format(epoch=3))
os.remove(filepath.format(epoch=2))
os.remove(filepath.format(epoch=4))
assert not tmpdir.listdir()
@keras_test
def test_EarlyStopping():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
mode = 'max'
monitor = 'val_acc'
patience = 0
cbks = [callbacks.EarlyStopping(patience=patience, monitor=monitor, mode=mode)]
history = model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=20)
mode = 'auto'
monitor = 'val_acc'
patience = 2
cbks = [callbacks.EarlyStopping(patience=patience, monitor=monitor, mode=mode)]
history = model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=20)
@keras_test
def test_EarlyStopping_reuse():
np.random.seed(1337)
patience = 3
data = np.random.random((100, 1))
labels = np.where(data > 0.5, 1, 0)
model = Sequential((
Dense(1, input_dim=1, activation='relu'),
Dense(1, activation='sigmoid'),
))
model.compile(optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy'])
stopper = callbacks.EarlyStopping(monitor='acc', patience=patience)
weights = model.get_weights()
hist = model.fit(data, labels, callbacks=[stopper], epochs=20)
assert len(hist.epoch) >= patience
# This should allow training to go for at least `patience` epochs
model.set_weights(weights)
hist = model.fit(data, labels, callbacks=[stopper], epochs=20)
assert len(hist.epoch) >= patience
@keras_test
def test_EarlyStopping_patience():
class DummyModel(object):
def __init__(self):
self.stop_training = False
early_stop = callbacks.EarlyStopping(monitor='val_loss', patience=2)
early_stop.model = DummyModel()
losses = [0.0860, 0.1096, 0.1040, 0.1019]
# Should stop after epoch 3, as the loss has not improved after patience=2 epochs.
epochs_trained = 0
early_stop.on_train_begin()
for epoch in range(len(losses)):
epochs_trained += 1
early_stop.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})
if early_stop.model.stop_training:
break
assert epochs_trained == 3
@keras_test
def test_EarlyStopping_baseline():
class DummyModel(object):
def __init__(self):
self.stop_training = False
def baseline_tester(acc_levels):
early_stop = callbacks.EarlyStopping(monitor='val_acc', baseline=0.75, patience=2)
early_stop.model = DummyModel()
epochs_trained = 0
early_stop.on_train_begin()
for epoch in range(len(acc_levels)):
epochs_trained += 1
early_stop.on_epoch_end(epoch, logs={'val_acc': acc_levels[epoch]})
if early_stop.model.stop_training:
break
return epochs_trained
acc_levels = [0.55, 0.76, 0.81, 0.81]
baseline_met = baseline_tester(acc_levels)
acc_levels = [0.55, 0.74, 0.81, 0.81]
baseline_not_met = baseline_tester(acc_levels)
# All epochs should run because baseline was met in second epoch
assert baseline_met == 4
# Baseline was not met by second epoch and should stop
assert baseline_not_met == 2
@keras_test
def test_LearningRateScheduler():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
cbks = [callbacks.LearningRateScheduler(lambda x: 1. / (1. + x))]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=5)
assert (float(K.get_value(model.optimizer.lr)) - 0.2) < K.epsilon()
@keras_test
def test_ReduceLROnPlateau():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
def make_model():
np.random.seed(1337)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizers.SGD(lr=0.1),
metrics=['accuracy'])
return model
model = make_model()
# This should reduce the LR after the first epoch (due to high epsilon).
cbks = [callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.1, min_delta=10, patience=1, cooldown=5)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=5, verbose=2)
assert np.allclose(float(K.get_value(model.optimizer.lr)), 0.01, atol=K.epsilon())
model = make_model()
cbks = [callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.1, min_delta=0, patience=1, cooldown=5)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=5, verbose=2)
assert np.allclose(float(K.get_value(model.optimizer.lr)), 0.1, atol=K.epsilon())
@keras_test
def test_ReduceLROnPlateau_patience():
class DummyOptimizer(object):
def __init__(self):
self.lr = K.variable(1.0)
class DummyModel(object):
def __init__(self):
self.optimizer = DummyOptimizer()
reduce_on_plateau = callbacks.ReduceLROnPlateau(monitor='val_loss',
patience=2)
reduce_on_plateau.model = DummyModel()
losses = [0.0860, 0.1096, 0.1040]
lrs = []
for epoch in range(len(losses)):
reduce_on_plateau.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})
lrs.append(K.get_value(reduce_on_plateau.model.optimizer.lr))
# The learning rates should be 1.0 except the last one
assert all([lr == 1.0 for lr in lrs[:-1]]) and lrs[-1] < 1.0
@keras_test
def test_ReduceLROnPlateau_backwards_compatibility():
import warnings
with warnings.catch_warnings(record=True) as ws:
reduce_on_plateau = callbacks.ReduceLROnPlateau(epsilon=1e-13)
# Check if warnings are disabled
if os.environ.get("PYTHONWARNINGS") != "ignore":
assert "`epsilon` argument is deprecated" in str(ws[0].message)
assert not hasattr(reduce_on_plateau, 'epsilon')
assert hasattr(reduce_on_plateau, 'min_delta')
assert reduce_on_plateau.min_delta == 1e-13
@keras_test
def test_CSVLogger(tmpdir):
np.random.seed(1337)
filepath = str(tmpdir / 'log.tsv')
sep = '\t'
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
def make_model():
np.random.seed(1337)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizers.SGD(lr=0.1),
metrics=['accuracy'])
return model
# case 1, create new file with defined separator
model = make_model()
cbks = [callbacks.CSVLogger(filepath, separator=sep)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
assert os.path.isfile(filepath)
with open(filepath) as csvfile:
dialect = Sniffer().sniff(csvfile.read())
assert dialect.delimiter == sep
del model
del cbks
# case 2, append data to existing file, skip header
model = make_model()
cbks = [callbacks.CSVLogger(filepath, separator=sep, append=True)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
# case 3, reuse of CSVLogger object
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
import re
with open(filepath) as csvfile:
output = " ".join(csvfile.readlines())
assert len(re.findall('epoch', output)) == 1
os.remove(filepath)
assert not tmpdir.listdir()
@keras_test
def test_TensorBoard(tmpdir):
np.random.seed(np.random.randint(1, 1e7))
filepath = str(tmpdir / 'logs')
(X_train, y_train), (X_test, y_test) = get_test_data(
num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
def data_generator(train):
if train:
max_batch_index = len(X_train) // batch_size
else:
max_batch_index = len(X_test) // batch_size
i = 0
while 1:
if train:
# simulate multi-input/output models
yield (X_train[i * batch_size: (i + 1) * batch_size],
y_train[i * batch_size: (i + 1) * batch_size])
else:
yield (X_test[i * batch_size: (i + 1) * batch_size],
y_test[i * batch_size: (i + 1) * batch_size])
i += 1
i = i % max_batch_index
inp = Input((input_dim,))
hidden = Dense(num_hidden, activation='relu')(inp)
hidden = Dropout(0.1)(hidden)
output = Dense(num_classes, activation='softmax')(hidden)
model = Model(inputs=inp, outputs=output)
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# we must generate new callbacks for each test, as they aren't stateless
def callbacks_factory(histogram_freq):
return [callbacks.TensorBoard(log_dir=filepath,
histogram_freq=histogram_freq,
write_images=True, write_grads=True,
embeddings_freq=1,
embeddings_layer_names=['dense_1'],
batch_size=5)]
# fit without validation data
model.fit(X_train, y_train, batch_size=batch_size,
callbacks=callbacks_factory(histogram_freq=0), epochs=3)
# fit with validation data and accuracy
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test),
callbacks=callbacks_factory(histogram_freq=0), epochs=2)
# fit generator without validation data
model.fit_generator(data_generator(True), len(X_train), epochs=2,
callbacks=callbacks_factory(histogram_freq=0))
# fit generator with validation data and accuracy
model.fit_generator(data_generator(True), len(X_train), epochs=2,
validation_data=(X_test, y_test),
callbacks=callbacks_factory(histogram_freq=1))
assert os.path.isdir(filepath)
shutil.rmtree(filepath)
assert not tmpdir.listdir()
@keras_test
@pytest.mark.skipif((K.backend() != 'tensorflow'),
reason='Requires TensorFlow backend')
def test_TensorBoard_histogram_freq_must_have_validation_data(tmpdir):
np.random.seed(np.random.randint(1, 1e7))
filepath = str(tmpdir / 'logs')
(X_train, y_train), (X_test, y_test) = get_test_data(
num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
def data_generator(train):
if train:
max_batch_index = len(X_train) // batch_size
else:
max_batch_index = len(X_test) // batch_size
i = 0
while 1:
if train:
# simulate multi-input/output models
yield (X_train[i * batch_size: (i + 1) * batch_size],
y_train[i * batch_size: (i + 1) * batch_size])
else:
yield (X_test[i * batch_size: (i + 1) * batch_size],
y_test[i * batch_size: (i + 1) * batch_size])
i += 1
i = i % max_batch_index
inp = Input((input_dim,))
hidden = Dense(num_hidden, activation='relu')(inp)
hidden = Dropout(0.1)(hidden)
output = Dense(num_classes, activation='softmax')(hidden)
model = Model(inputs=inp, outputs=output)
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# we must generate new callbacks for each test, as they aren't stateless
def callbacks_factory(histogram_freq):
return [callbacks.TensorBoard(log_dir=filepath,
histogram_freq=histogram_freq,
write_images=True, write_grads=True,
embeddings_freq=1,
embeddings_layer_names=['dense_1'],
batch_size=5)]
# fit without validation data should raise ValueError if histogram_freq > 0
with pytest.raises(ValueError) as raised_exception:
model.fit(X_train, y_train, batch_size=batch_size,
callbacks=callbacks_factory(histogram_freq=1), epochs=3)
assert 'validation_data must be provided' in str(raised_exception.value)
# fit generator without validation data should raise ValueError if
# histogram_freq > 0
with pytest.raises(ValueError) as raised_exception:
model.fit_generator(data_generator(True), len(X_train), epochs=2,
callbacks=callbacks_factory(histogram_freq=1))
assert 'validation_data must be provided' in str(raised_exception.value)
# fit generator with validation data generator should raise ValueError if
# histogram_freq > 0
with pytest.raises(ValueError) as raised_exception:
model.fit_generator(data_generator(True), len(X_train), epochs=2,
validation_data=data_generator(False),
validation_steps=1,
callbacks=callbacks_factory(histogram_freq=1))
assert 'validation_data must be provided' in str(raised_exception.value)
@keras_test
def test_TensorBoard_multi_input_output(tmpdir):
np.random.seed(np.random.randint(1, 1e7))
filepath = str(tmpdir / 'logs')
(X_train, y_train), (X_test, y_test) = get_test_data(
num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim, input_dim),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
def data_generator(train):
if train:
max_batch_index = len(X_train) // batch_size
else:
max_batch_index = len(X_test) // batch_size
i = 0
while 1:
if train:
# simulate multi-input/output models
yield ([X_train[i * batch_size: (i + 1) * batch_size]] * 2,
[y_train[i * batch_size: (i + 1) * batch_size]] * 2)
else:
yield ([X_test[i * batch_size: (i + 1) * batch_size]] * 2,
[y_test[i * batch_size: (i + 1) * batch_size]] * 2)
i += 1
i = i % max_batch_index
inp1 = Input((input_dim, input_dim))
inp2 = Input((input_dim, input_dim))
inp_3d = add([inp1, inp2])
inp_2d = GlobalAveragePooling1D()(inp_3d)
inp_pair = Lambda(lambda x: x)([inp_3d, inp_2d]) # test a layer with a list of output tensors
hidden = dot(inp_pair, axes=-1)
hidden = Dense(num_hidden, activation='relu')(hidden)
hidden = Dropout(0.1)(hidden)
output1 = Dense(num_classes, activation='softmax')(hidden)
output2 = Dense(num_classes, activation='softmax')(hidden)
model = Model(inputs=[inp1, inp2], outputs=[output1, output2])
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# we must generate new callbacks for each test, as they aren't stateless
def callbacks_factory(histogram_freq):
return [callbacks.TensorBoard(log_dir=filepath,
histogram_freq=histogram_freq,
write_images=True, write_grads=True,
embeddings_freq=1,
embeddings_layer_names=['dense_1'],
batch_size=5)]
# fit without validation data
model.fit([X_train] * 2, [y_train] * 2, batch_size=batch_size,
callbacks=callbacks_factory(histogram_freq=0), epochs=3)
# fit with validation data and accuracy
model.fit([X_train] * 2, [y_train] * 2, batch_size=batch_size,
validation_data=([X_test] * 2, [y_test] * 2),
callbacks=callbacks_factory(histogram_freq=1), epochs=2)
# fit generator without validation data
model.fit_generator(data_generator(True), len(X_train), epochs=2,
callbacks=callbacks_factory(histogram_freq=0))
# fit generator with validation data and accuracy
model.fit_generator(data_generator(True), len(X_train), epochs=2,
validation_data=([X_test] * 2, [y_test] * 2),
callbacks=callbacks_factory(histogram_freq=1))
assert os.path.isdir(filepath)
shutil.rmtree(filepath)
assert not tmpdir.listdir()
@keras_test
def test_TensorBoard_convnet(tmpdir):
np.random.seed(np.random.randint(1, 1e7))
filepath = str(tmpdir / 'logs')
input_shape = (16, 16, 3)
(x_train, y_train), (x_test, y_test) = get_test_data(num_train=500,
num_test=200,
input_shape=input_shape,
classification=True,
num_classes=num_classes)
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
model = Sequential([
Conv2D(filters=8, kernel_size=3,
activation='relu',
input_shape=input_shape),
MaxPooling2D(pool_size=2),
Conv2D(filters=4, kernel_size=(3, 3),
activation='relu', padding='same'),
GlobalAveragePooling2D(),
Dense(num_classes, activation='softmax')
])
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1,
write_images=True, write_grads=True,
batch_size=16)
cbks = [tsb]
model.summary()
history = model.fit(x_train, y_train, epochs=2, batch_size=16,
validation_data=(x_test, y_test),
callbacks=cbks,
verbose=0)
assert os.path.isdir(filepath)
shutil.rmtree(filepath)
assert not tmpdir.listdir()
@keras_test
def test_CallbackValData():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
cbk = callbacks.LambdaCallback(on_train_end=lambda x: 1)
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=[cbk], epochs=1)
def data_generator(train):
if train:
max_batch_index = len(X_train) // batch_size
else:
max_batch_index = len(X_test) // batch_size
i = 0
while 1:
if train:
yield (X_train[i * batch_size: (i + 1) * batch_size],
y_train[i * batch_size: (i + 1) * batch_size])
else:
yield (X_test[i * batch_size: (i + 1) * batch_size],
y_test[i * batch_size: (i + 1) * batch_size])
i += 1
i = i % max_batch_index
cbk2 = callbacks.LambdaCallback(on_train_end=lambda x: 1)
model.fit_generator(data_generator(True), len(X_train), epochs=1,
validation_data=(X_test, y_test),
callbacks=[cbk2])
# callback validation data should always have x, y, and sample weights
assert len(cbk.validation_data) == len(cbk2.validation_data) == 3
assert cbk.validation_data[0] is cbk2.validation_data[0]
assert cbk.validation_data[1] is cbk2.validation_data[1]
assert cbk.validation_data[2].shape == cbk2.validation_data[2].shape
@keras_test
def test_LambdaCallback():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# Start an arbitrary process that should run during model training and be terminated after training has completed.
def f():
while True:
pass
p = multiprocessing.Process(target=f)
p.start()
cleanup_callback = callbacks.LambdaCallback(on_train_end=lambda logs: p.terminate())
cbks = [cleanup_callback]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=5)
p.join()
assert not p.is_alive()
@keras_test
def test_TensorBoard_with_ReduceLROnPlateau(tmpdir):
import shutil
np.random.seed(np.random.randint(1, 1e7))
filepath = str(tmpdir / 'logs')
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='binary_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
cbks = [
callbacks.ReduceLROnPlateau(
monitor='val_loss',
factor=0.5,
patience=4,
verbose=1),
callbacks.TensorBoard(
log_dir=filepath)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=2)
assert os.path.isdir(filepath)
shutil.rmtree(filepath)
assert not tmpdir.listdir()
@keras_test
def tests_RemoteMonitor():
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
cbks = [callbacks.RemoteMonitor()]
with patch('requests.post'):
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
@keras_test
def tests_RemoteMonitorWithJsonPayload():
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
cbks = [callbacks.RemoteMonitor(send_as_json=True)]
with patch('requests.post'):
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
if __name__ == '__main__':
pytest.main([__file__])
| 40.342193
| 118
| 0.588322
|
import os
import multiprocessing
import numpy as np
import pytest
from csv import reader
from csv import Sniffer
import shutil
from keras import optimizers
from keras import initializers
from keras import callbacks
from keras.models import Sequential, Model
from keras.layers import Input, Dense, Dropout, add, dot, Lambda
from keras.layers.convolutional import Conv2D
from keras.layers.pooling import MaxPooling2D, GlobalAveragePooling1D, GlobalAveragePooling2D
from keras.utils.test_utils import get_test_data
from keras.utils.test_utils import keras_test
from keras import backend as K
from keras.utils import np_utils
try:
from unittest.mock import patch
except:
from mock import patch
input_dim = 2
num_hidden = 4
num_classes = 2
batch_size = 5
train_samples = 20
test_samples = 20
@keras_test
def test_TerminateOnNaN():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
cbks = [callbacks.TerminateOnNaN()]
model = Sequential()
initializer = initializers.Constant(value=1e5)
for _ in range(5):
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu',
kernel_initializer=initializer))
model.add(Dense(num_classes, activation='linear'))
model.compile(loss='mean_squared_error',
optimizer='rmsprop')
history = model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=20)
loss = history.history['loss']
assert len(loss) == 1
assert loss[0] == np.inf
def data_generator():
max_batch_index = len(X_train) // batch_size
i = 0
while 1:
yield (X_train[i * batch_size: (i + 1) * batch_size],
y_train[i * batch_size: (i + 1) * batch_size])
i += 1
i = i % max_batch_index
history = model.fit_generator(data_generator(),
len(X_train),
validation_data=(X_test, y_test),
callbacks=cbks,
epochs=20)
loss = history.history['loss']
assert len(loss) == 1
assert loss[0] == np.inf or np.isnan(loss[0])
@keras_test
def test_stop_training_csv(tmpdir):
np.random.seed(1337)
fp = str(tmpdir / 'test.csv')
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
cbks = [callbacks.TerminateOnNaN(), callbacks.CSVLogger(fp)]
model = Sequential()
for _ in range(5):
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='linear'))
model.compile(loss='mean_squared_error',
optimizer='rmsprop')
def data_generator():
i = 0
max_batch_index = len(X_train) // batch_size
tot = 0
while 1:
if tot > 3 * len(X_train):
yield np.ones([batch_size, input_dim]) * np.nan, np.ones([batch_size, num_classes]) * np.nan
else:
yield (X_train[i * batch_size: (i + 1) * batch_size],
y_train[i * batch_size: (i + 1) * batch_size])
i += 1
tot += 1
i = i % max_batch_index
history = model.fit_generator(data_generator(),
len(X_train) // batch_size,
validation_data=(X_test, y_test),
callbacks=cbks,
epochs=20)
loss = history.history['loss']
assert len(loss) > 1
assert loss[-1] == np.inf or np.isnan(loss[-1])
values = []
with open(fp) as f:
for x in reader(f):
values.append(x)
assert 'nan' in values[-1], 'The last epoch was not logged.'
os.remove(fp)
@keras_test
def test_ModelCheckpoint(tmpdir):
np.random.seed(1337)
filepath = str(tmpdir / 'checkpoint.h5')
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
monitor = 'val_loss'
save_best_only = False
mode = 'auto'
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
cbks = [callbacks.ModelCheckpoint(filepath, monitor=monitor,
save_best_only=save_best_only, mode=mode)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
assert os.path.isfile(filepath)
os.remove(filepath)
mode = 'min'
cbks = [callbacks.ModelCheckpoint(filepath, monitor=monitor,
save_best_only=save_best_only, mode=mode)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
assert os.path.isfile(filepath)
os.remove(filepath)
mode = 'max'
monitor = 'val_acc'
cbks = [callbacks.ModelCheckpoint(filepath, monitor=monitor,
save_best_only=save_best_only, mode=mode)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
assert os.path.isfile(filepath)
os.remove(filepath)
save_best_only = True
cbks = [callbacks.ModelCheckpoint(filepath, monitor=monitor,
save_best_only=save_best_only, mode=mode)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
assert os.path.isfile(filepath)
os.remove(filepath)
save_best_only = False
period = 2
mode = 'auto'
filepath = 'checkpoint.{epoch:02d}.h5'
cbks = [callbacks.ModelCheckpoint(filepath, monitor=monitor,
save_best_only=save_best_only, mode=mode,
period=period)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=4)
assert os.path.isfile(filepath.format(epoch=2))
assert os.path.isfile(filepath.format(epoch=4))
assert not os.path.exists(filepath.format(epoch=1))
assert not os.path.exists(filepath.format(epoch=3))
os.remove(filepath.format(epoch=2))
os.remove(filepath.format(epoch=4))
assert not tmpdir.listdir()
@keras_test
def test_EarlyStopping():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
mode = 'max'
monitor = 'val_acc'
patience = 0
cbks = [callbacks.EarlyStopping(patience=patience, monitor=monitor, mode=mode)]
history = model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=20)
mode = 'auto'
monitor = 'val_acc'
patience = 2
cbks = [callbacks.EarlyStopping(patience=patience, monitor=monitor, mode=mode)]
history = model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=20)
@keras_test
def test_EarlyStopping_reuse():
np.random.seed(1337)
patience = 3
data = np.random.random((100, 1))
labels = np.where(data > 0.5, 1, 0)
model = Sequential((
Dense(1, input_dim=1, activation='relu'),
Dense(1, activation='sigmoid'),
))
model.compile(optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy'])
stopper = callbacks.EarlyStopping(monitor='acc', patience=patience)
weights = model.get_weights()
hist = model.fit(data, labels, callbacks=[stopper], epochs=20)
assert len(hist.epoch) >= patience
model.set_weights(weights)
hist = model.fit(data, labels, callbacks=[stopper], epochs=20)
assert len(hist.epoch) >= patience
@keras_test
def test_EarlyStopping_patience():
class DummyModel(object):
def __init__(self):
self.stop_training = False
early_stop = callbacks.EarlyStopping(monitor='val_loss', patience=2)
early_stop.model = DummyModel()
losses = [0.0860, 0.1096, 0.1040, 0.1019]
epochs_trained = 0
early_stop.on_train_begin()
for epoch in range(len(losses)):
epochs_trained += 1
early_stop.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})
if early_stop.model.stop_training:
break
assert epochs_trained == 3
@keras_test
def test_EarlyStopping_baseline():
class DummyModel(object):
def __init__(self):
self.stop_training = False
def baseline_tester(acc_levels):
early_stop = callbacks.EarlyStopping(monitor='val_acc', baseline=0.75, patience=2)
early_stop.model = DummyModel()
epochs_trained = 0
early_stop.on_train_begin()
for epoch in range(len(acc_levels)):
epochs_trained += 1
early_stop.on_epoch_end(epoch, logs={'val_acc': acc_levels[epoch]})
if early_stop.model.stop_training:
break
return epochs_trained
acc_levels = [0.55, 0.76, 0.81, 0.81]
baseline_met = baseline_tester(acc_levels)
acc_levels = [0.55, 0.74, 0.81, 0.81]
baseline_not_met = baseline_tester(acc_levels)
assert baseline_met == 4
assert baseline_not_met == 2
@keras_test
def test_LearningRateScheduler():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
cbks = [callbacks.LearningRateScheduler(lambda x: 1. / (1. + x))]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=5)
assert (float(K.get_value(model.optimizer.lr)) - 0.2) < K.epsilon()
@keras_test
def test_ReduceLROnPlateau():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
def make_model():
np.random.seed(1337)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizers.SGD(lr=0.1),
metrics=['accuracy'])
return model
model = make_model()
cbks = [callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.1, min_delta=10, patience=1, cooldown=5)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=5, verbose=2)
assert np.allclose(float(K.get_value(model.optimizer.lr)), 0.01, atol=K.epsilon())
model = make_model()
cbks = [callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.1, min_delta=0, patience=1, cooldown=5)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=5, verbose=2)
assert np.allclose(float(K.get_value(model.optimizer.lr)), 0.1, atol=K.epsilon())
@keras_test
def test_ReduceLROnPlateau_patience():
class DummyOptimizer(object):
def __init__(self):
self.lr = K.variable(1.0)
class DummyModel(object):
def __init__(self):
self.optimizer = DummyOptimizer()
reduce_on_plateau = callbacks.ReduceLROnPlateau(monitor='val_loss',
patience=2)
reduce_on_plateau.model = DummyModel()
losses = [0.0860, 0.1096, 0.1040]
lrs = []
for epoch in range(len(losses)):
reduce_on_plateau.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})
lrs.append(K.get_value(reduce_on_plateau.model.optimizer.lr))
assert all([lr == 1.0 for lr in lrs[:-1]]) and lrs[-1] < 1.0
@keras_test
def test_ReduceLROnPlateau_backwards_compatibility():
import warnings
with warnings.catch_warnings(record=True) as ws:
reduce_on_plateau = callbacks.ReduceLROnPlateau(epsilon=1e-13)
if os.environ.get("PYTHONWARNINGS") != "ignore":
assert "`epsilon` argument is deprecated" in str(ws[0].message)
assert not hasattr(reduce_on_plateau, 'epsilon')
assert hasattr(reduce_on_plateau, 'min_delta')
assert reduce_on_plateau.min_delta == 1e-13
@keras_test
def test_CSVLogger(tmpdir):
np.random.seed(1337)
filepath = str(tmpdir / 'log.tsv')
sep = '\t'
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
def make_model():
np.random.seed(1337)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizers.SGD(lr=0.1),
metrics=['accuracy'])
return model
model = make_model()
cbks = [callbacks.CSVLogger(filepath, separator=sep)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
assert os.path.isfile(filepath)
with open(filepath) as csvfile:
dialect = Sniffer().sniff(csvfile.read())
assert dialect.delimiter == sep
del model
del cbks
model = make_model()
cbks = [callbacks.CSVLogger(filepath, separator=sep, append=True)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
import re
with open(filepath) as csvfile:
output = " ".join(csvfile.readlines())
assert len(re.findall('epoch', output)) == 1
os.remove(filepath)
assert not tmpdir.listdir()
@keras_test
def test_TensorBoard(tmpdir):
np.random.seed(np.random.randint(1, 1e7))
filepath = str(tmpdir / 'logs')
(X_train, y_train), (X_test, y_test) = get_test_data(
num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
def data_generator(train):
if train:
max_batch_index = len(X_train) // batch_size
else:
max_batch_index = len(X_test) // batch_size
i = 0
while 1:
if train:
yield (X_train[i * batch_size: (i + 1) * batch_size],
y_train[i * batch_size: (i + 1) * batch_size])
else:
yield (X_test[i * batch_size: (i + 1) * batch_size],
y_test[i * batch_size: (i + 1) * batch_size])
i += 1
i = i % max_batch_index
inp = Input((input_dim,))
hidden = Dense(num_hidden, activation='relu')(inp)
hidden = Dropout(0.1)(hidden)
output = Dense(num_classes, activation='softmax')(hidden)
model = Model(inputs=inp, outputs=output)
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
def callbacks_factory(histogram_freq):
return [callbacks.TensorBoard(log_dir=filepath,
histogram_freq=histogram_freq,
write_images=True, write_grads=True,
embeddings_freq=1,
embeddings_layer_names=['dense_1'],
batch_size=5)]
# fit without validation data
model.fit(X_train, y_train, batch_size=batch_size,
callbacks=callbacks_factory(histogram_freq=0), epochs=3)
# fit with validation data and accuracy
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test),
callbacks=callbacks_factory(histogram_freq=0), epochs=2)
# fit generator without validation data
model.fit_generator(data_generator(True), len(X_train), epochs=2,
callbacks=callbacks_factory(histogram_freq=0))
# fit generator with validation data and accuracy
model.fit_generator(data_generator(True), len(X_train), epochs=2,
validation_data=(X_test, y_test),
callbacks=callbacks_factory(histogram_freq=1))
assert os.path.isdir(filepath)
shutil.rmtree(filepath)
assert not tmpdir.listdir()
@keras_test
@pytest.mark.skipif((K.backend() != 'tensorflow'),
reason='Requires TensorFlow backend')
def test_TensorBoard_histogram_freq_must_have_validation_data(tmpdir):
np.random.seed(np.random.randint(1, 1e7))
filepath = str(tmpdir / 'logs')
(X_train, y_train), (X_test, y_test) = get_test_data(
num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
def data_generator(train):
if train:
max_batch_index = len(X_train) // batch_size
else:
max_batch_index = len(X_test) // batch_size
i = 0
while 1:
if train:
# simulate multi-input/output models
yield (X_train[i * batch_size: (i + 1) * batch_size],
y_train[i * batch_size: (i + 1) * batch_size])
else:
yield (X_test[i * batch_size: (i + 1) * batch_size],
y_test[i * batch_size: (i + 1) * batch_size])
i += 1
i = i % max_batch_index
inp = Input((input_dim,))
hidden = Dense(num_hidden, activation='relu')(inp)
hidden = Dropout(0.1)(hidden)
output = Dense(num_classes, activation='softmax')(hidden)
model = Model(inputs=inp, outputs=output)
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# we must generate new callbacks for each test, as they aren't stateless
def callbacks_factory(histogram_freq):
return [callbacks.TensorBoard(log_dir=filepath,
histogram_freq=histogram_freq,
write_images=True, write_grads=True,
embeddings_freq=1,
embeddings_layer_names=['dense_1'],
batch_size=5)]
with pytest.raises(ValueError) as raised_exception:
model.fit(X_train, y_train, batch_size=batch_size,
callbacks=callbacks_factory(histogram_freq=1), epochs=3)
assert 'validation_data must be provided' in str(raised_exception.value)
with pytest.raises(ValueError) as raised_exception:
model.fit_generator(data_generator(True), len(X_train), epochs=2,
callbacks=callbacks_factory(histogram_freq=1))
assert 'validation_data must be provided' in str(raised_exception.value)
with pytest.raises(ValueError) as raised_exception:
model.fit_generator(data_generator(True), len(X_train), epochs=2,
validation_data=data_generator(False),
validation_steps=1,
callbacks=callbacks_factory(histogram_freq=1))
assert 'validation_data must be provided' in str(raised_exception.value)
@keras_test
def test_TensorBoard_multi_input_output(tmpdir):
np.random.seed(np.random.randint(1, 1e7))
filepath = str(tmpdir / 'logs')
(X_train, y_train), (X_test, y_test) = get_test_data(
num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim, input_dim),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
def data_generator(train):
if train:
max_batch_index = len(X_train) // batch_size
else:
max_batch_index = len(X_test) // batch_size
i = 0
while 1:
if train:
yield ([X_train[i * batch_size: (i + 1) * batch_size]] * 2,
[y_train[i * batch_size: (i + 1) * batch_size]] * 2)
else:
yield ([X_test[i * batch_size: (i + 1) * batch_size]] * 2,
[y_test[i * batch_size: (i + 1) * batch_size]] * 2)
i += 1
i = i % max_batch_index
inp1 = Input((input_dim, input_dim))
inp2 = Input((input_dim, input_dim))
inp_3d = add([inp1, inp2])
inp_2d = GlobalAveragePooling1D()(inp_3d)
inp_pair = Lambda(lambda x: x)([inp_3d, inp_2d])
hidden = dot(inp_pair, axes=-1)
hidden = Dense(num_hidden, activation='relu')(hidden)
hidden = Dropout(0.1)(hidden)
output1 = Dense(num_classes, activation='softmax')(hidden)
output2 = Dense(num_classes, activation='softmax')(hidden)
model = Model(inputs=[inp1, inp2], outputs=[output1, output2])
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
def callbacks_factory(histogram_freq):
return [callbacks.TensorBoard(log_dir=filepath,
histogram_freq=histogram_freq,
write_images=True, write_grads=True,
embeddings_freq=1,
embeddings_layer_names=['dense_1'],
batch_size=5)]
# fit without validation data
model.fit([X_train] * 2, [y_train] * 2, batch_size=batch_size,
callbacks=callbacks_factory(histogram_freq=0), epochs=3)
# fit with validation data and accuracy
model.fit([X_train] * 2, [y_train] * 2, batch_size=batch_size,
validation_data=([X_test] * 2, [y_test] * 2),
callbacks=callbacks_factory(histogram_freq=1), epochs=2)
# fit generator without validation data
model.fit_generator(data_generator(True), len(X_train), epochs=2,
callbacks=callbacks_factory(histogram_freq=0))
# fit generator with validation data and accuracy
model.fit_generator(data_generator(True), len(X_train), epochs=2,
validation_data=([X_test] * 2, [y_test] * 2),
callbacks=callbacks_factory(histogram_freq=1))
assert os.path.isdir(filepath)
shutil.rmtree(filepath)
assert not tmpdir.listdir()
@keras_test
def test_TensorBoard_convnet(tmpdir):
np.random.seed(np.random.randint(1, 1e7))
filepath = str(tmpdir / 'logs')
input_shape = (16, 16, 3)
(x_train, y_train), (x_test, y_test) = get_test_data(num_train=500,
num_test=200,
input_shape=input_shape,
classification=True,
num_classes=num_classes)
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
model = Sequential([
Conv2D(filters=8, kernel_size=3,
activation='relu',
input_shape=input_shape),
MaxPooling2D(pool_size=2),
Conv2D(filters=4, kernel_size=(3, 3),
activation='relu', padding='same'),
GlobalAveragePooling2D(),
Dense(num_classes, activation='softmax')
])
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1,
write_images=True, write_grads=True,
batch_size=16)
cbks = [tsb]
model.summary()
history = model.fit(x_train, y_train, epochs=2, batch_size=16,
validation_data=(x_test, y_test),
callbacks=cbks,
verbose=0)
assert os.path.isdir(filepath)
shutil.rmtree(filepath)
assert not tmpdir.listdir()
@keras_test
def test_CallbackValData():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
cbk = callbacks.LambdaCallback(on_train_end=lambda x: 1)
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=[cbk], epochs=1)
def data_generator(train):
if train:
max_batch_index = len(X_train) // batch_size
else:
max_batch_index = len(X_test) // batch_size
i = 0
while 1:
if train:
yield (X_train[i * batch_size: (i + 1) * batch_size],
y_train[i * batch_size: (i + 1) * batch_size])
else:
yield (X_test[i * batch_size: (i + 1) * batch_size],
y_test[i * batch_size: (i + 1) * batch_size])
i += 1
i = i % max_batch_index
cbk2 = callbacks.LambdaCallback(on_train_end=lambda x: 1)
model.fit_generator(data_generator(True), len(X_train), epochs=1,
validation_data=(X_test, y_test),
callbacks=[cbk2])
# callback validation data should always have x, y, and sample weights
assert len(cbk.validation_data) == len(cbk2.validation_data) == 3
assert cbk.validation_data[0] is cbk2.validation_data[0]
assert cbk.validation_data[1] is cbk2.validation_data[1]
assert cbk.validation_data[2].shape == cbk2.validation_data[2].shape
@keras_test
def test_LambdaCallback():
np.random.seed(1337)
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# Start an arbitrary process that should run during model training and be terminated after training has completed.
def f():
while True:
pass
p = multiprocessing.Process(target=f)
p.start()
cleanup_callback = callbacks.LambdaCallback(on_train_end=lambda logs: p.terminate())
cbks = [cleanup_callback]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=5)
p.join()
assert not p.is_alive()
@keras_test
def test_TensorBoard_with_ReduceLROnPlateau(tmpdir):
import shutil
np.random.seed(np.random.randint(1, 1e7))
filepath = str(tmpdir / 'logs')
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='binary_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
cbks = [
callbacks.ReduceLROnPlateau(
monitor='val_loss',
factor=0.5,
patience=4,
verbose=1),
callbacks.TensorBoard(
log_dir=filepath)]
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=2)
assert os.path.isdir(filepath)
shutil.rmtree(filepath)
assert not tmpdir.listdir()
@keras_test
def tests_RemoteMonitor():
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
cbks = [callbacks.RemoteMonitor()]
with patch('requests.post'):
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
@keras_test
def tests_RemoteMonitorWithJsonPayload():
(X_train, y_train), (X_test, y_test) = get_test_data(num_train=train_samples,
num_test=test_samples,
input_shape=(input_dim,),
classification=True,
num_classes=num_classes)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
cbks = [callbacks.RemoteMonitor(send_as_json=True)]
with patch('requests.post'):
model.fit(X_train, y_train, batch_size=batch_size,
validation_data=(X_test, y_test), callbacks=cbks, epochs=1)
if __name__ == '__main__':
pytest.main([__file__])
| true
| true
|
f718891f5d70f5bc34c238ce47c933a0dbeff2c0
| 27,465
|
py
|
Python
|
twisted/conch/scripts/cftp.py
|
twonds/twisted
|
d6e270a465d371c3bed01bf369af497b77eb9f1e
|
[
"Unlicense",
"MIT"
] | 1
|
2021-01-27T19:11:21.000Z
|
2021-01-27T19:11:21.000Z
|
twisted/conch/scripts/cftp.py
|
twonds/twisted
|
d6e270a465d371c3bed01bf369af497b77eb9f1e
|
[
"Unlicense",
"MIT"
] | null | null | null |
twisted/conch/scripts/cftp.py
|
twonds/twisted
|
d6e270a465d371c3bed01bf369af497b77eb9f1e
|
[
"Unlicense",
"MIT"
] | 3
|
2017-01-04T01:24:15.000Z
|
2020-06-18T16:14:56.000Z
|
# -*- test-case-name: twisted.conch.test.test_cftp -*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Implementation module for the I{cftp} command.
"""
import os, sys, getpass, struct, tty, fcntl, stat
import fnmatch, pwd, time, glob
from twisted.conch.client import connect, default, options
from twisted.conch.ssh import connection, common
from twisted.conch.ssh import channel, filetransfer
from twisted.protocols import basic
from twisted.internet import reactor, stdio, defer, utils
from twisted.python import log, usage, failure
class ClientOptions(options.ConchOptions):
synopsis = """Usage: cftp [options] [user@]host
cftp [options] [user@]host[:dir[/]]
cftp [options] [user@]host[:file [localfile]]
"""
optParameters = [
['buffersize', 'B', 32768, 'Size of the buffer to use for sending/receiving.'],
['batchfile', 'b', None, 'File to read commands from, or \'-\' for stdin.'],
['requests', 'R', 5, 'Number of requests to make before waiting for a reply.'],
['subsystem', 's', 'sftp', 'Subsystem/server program to connect to.']]
zsh_altArgDescr = {"buffersize":"Size of send/receive buffer (default: 32768)"}
#zsh_multiUse = ["foo", "bar"]
#zsh_mutuallyExclusive = [("foo", "bar"), ("bar", "baz")]
#zsh_actions = {"foo":'_files -g "*.foo"', "bar":"(one two three)"}
#zsh_actionDescr = {"logfile":"log file name", "random":"random seed"}
zsh_extras = ['2::localfile:{if [[ $words[1] == *:* ]]; then; _files; fi}']
def parseArgs(self, host, localPath=None):
self['remotePath'] = ''
if ':' in host:
host, self['remotePath'] = host.split(':', 1)
self['remotePath'].rstrip('/')
self['host'] = host
self['localPath'] = localPath
def run():
# import hotshot
# prof = hotshot.Profile('cftp.prof')
# prof.start()
args = sys.argv[1:]
if '-l' in args: # cvs is an idiot
i = args.index('-l')
args = args[i:i+2]+args
del args[i+2:i+4]
options = ClientOptions()
try:
options.parseOptions(args)
except usage.UsageError, u:
print 'ERROR: %s' % u
sys.exit(1)
if options['log']:
realout = sys.stdout
log.startLogging(sys.stderr)
sys.stdout = realout
else:
log.discardLogs()
doConnect(options)
reactor.run()
# prof.stop()
# prof.close()
def handleError():
global exitStatus
exitStatus = 2
try:
reactor.stop()
except: pass
log.err(failure.Failure())
raise
def doConnect(options):
# log.deferr = handleError # HACK
if '@' in options['host']:
options['user'], options['host'] = options['host'].split('@',1)
host = options['host']
if not options['user']:
options['user'] = getpass.getuser()
if not options['port']:
options['port'] = 22
else:
options['port'] = int(options['port'])
host = options['host']
port = options['port']
conn = SSHConnection()
conn.options = options
vhk = default.verifyHostKey
uao = default.SSHUserAuthClient(options['user'], options, conn)
connect.connect(host, port, options, vhk, uao).addErrback(_ebExit)
def _ebExit(f):
#global exitStatus
if hasattr(f.value, 'value'):
s = f.value.value
else:
s = str(f)
print s
#exitStatus = "conch: exiting with error %s" % f
try:
reactor.stop()
except: pass
def _ignore(*args): pass
class FileWrapper:
def __init__(self, f):
self.f = f
self.total = 0.0
f.seek(0, 2) # seek to the end
self.size = f.tell()
def __getattr__(self, attr):
return getattr(self.f, attr)
class StdioClient(basic.LineReceiver):
_pwd = pwd
ps = 'cftp> '
delimiter = '\n'
def __init__(self, client, f = None):
self.client = client
self.currentDirectory = ''
self.file = f
self.useProgressBar = (not f and 1) or 0
def connectionMade(self):
self.client.realPath('').addCallback(self._cbSetCurDir)
def _cbSetCurDir(self, path):
self.currentDirectory = path
self._newLine()
def lineReceived(self, line):
if self.client.transport.localClosed:
return
log.msg('got line %s' % repr(line))
line = line.lstrip()
if not line:
self._newLine()
return
if self.file and line.startswith('-'):
self.ignoreErrors = 1
line = line[1:]
else:
self.ignoreErrors = 0
d = self._dispatchCommand(line)
if d is not None:
d.addCallback(self._cbCommand)
d.addErrback(self._ebCommand)
def _dispatchCommand(self, line):
if ' ' in line:
command, rest = line.split(' ', 1)
rest = rest.lstrip()
else:
command, rest = line, ''
if command.startswith('!'): # command
f = self.cmd_EXEC
rest = (command[1:] + ' ' + rest).strip()
else:
command = command.upper()
log.msg('looking up cmd %s' % command)
f = getattr(self, 'cmd_%s' % command, None)
if f is not None:
return defer.maybeDeferred(f, rest)
else:
self._ebCommand(failure.Failure(NotImplementedError(
"No command called `%s'" % command)))
self._newLine()
def _printFailure(self, f):
log.msg(f)
e = f.trap(NotImplementedError, filetransfer.SFTPError, OSError, IOError)
if e == NotImplementedError:
self.transport.write(self.cmd_HELP(''))
elif e == filetransfer.SFTPError:
self.transport.write("remote error %i: %s\n" %
(f.value.code, f.value.message))
elif e in (OSError, IOError):
self.transport.write("local error %i: %s\n" %
(f.value.errno, f.value.strerror))
def _newLine(self):
if self.client.transport.localClosed:
return
self.transport.write(self.ps)
self.ignoreErrors = 0
if self.file:
l = self.file.readline()
if not l:
self.client.transport.loseConnection()
else:
self.transport.write(l)
self.lineReceived(l.strip())
def _cbCommand(self, result):
if result is not None:
self.transport.write(result)
if not result.endswith('\n'):
self.transport.write('\n')
self._newLine()
def _ebCommand(self, f):
self._printFailure(f)
if self.file and not self.ignoreErrors:
self.client.transport.loseConnection()
self._newLine()
def cmd_CD(self, path):
path, rest = self._getFilename(path)
if not path.endswith('/'):
path += '/'
newPath = path and os.path.join(self.currentDirectory, path) or ''
d = self.client.openDirectory(newPath)
d.addCallback(self._cbCd)
d.addErrback(self._ebCommand)
return d
def _cbCd(self, directory):
directory.close()
d = self.client.realPath(directory.name)
d.addCallback(self._cbCurDir)
return d
def _cbCurDir(self, path):
self.currentDirectory = path
def cmd_CHGRP(self, rest):
grp, rest = rest.split(None, 1)
path, rest = self._getFilename(rest)
grp = int(grp)
d = self.client.getAttrs(path)
d.addCallback(self._cbSetUsrGrp, path, grp=grp)
return d
def cmd_CHMOD(self, rest):
mod, rest = rest.split(None, 1)
path, rest = self._getFilename(rest)
mod = int(mod, 8)
d = self.client.setAttrs(path, {'permissions':mod})
d.addCallback(_ignore)
return d
def cmd_CHOWN(self, rest):
usr, rest = rest.split(None, 1)
path, rest = self._getFilename(rest)
usr = int(usr)
d = self.client.getAttrs(path)
d.addCallback(self._cbSetUsrGrp, path, usr=usr)
return d
def _cbSetUsrGrp(self, attrs, path, usr=None, grp=None):
new = {}
new['uid'] = (usr is not None) and usr or attrs['uid']
new['gid'] = (grp is not None) and grp or attrs['gid']
d = self.client.setAttrs(path, new)
d.addCallback(_ignore)
return d
def cmd_GET(self, rest):
remote, rest = self._getFilename(rest)
if '*' in remote or '?' in remote: # wildcard
if rest:
local, rest = self._getFilename(rest)
if not os.path.isdir(local):
return "Wildcard get with non-directory target."
else:
local = ''
d = self._remoteGlob(remote)
d.addCallback(self._cbGetMultiple, local)
return d
if rest:
local, rest = self._getFilename(rest)
else:
local = os.path.split(remote)[1]
log.msg((remote, local))
lf = file(local, 'w', 0)
path = os.path.join(self.currentDirectory, remote)
d = self.client.openFile(path, filetransfer.FXF_READ, {})
d.addCallback(self._cbGetOpenFile, lf)
d.addErrback(self._ebCloseLf, lf)
return d
def _cbGetMultiple(self, files, local):
#if self._useProgressBar: # one at a time
# XXX this can be optimized for times w/o progress bar
return self._cbGetMultipleNext(None, files, local)
def _cbGetMultipleNext(self, res, files, local):
if isinstance(res, failure.Failure):
self._printFailure(res)
elif res:
self.transport.write(res)
if not res.endswith('\n'):
self.transport.write('\n')
if not files:
return
f = files.pop(0)[0]
lf = file(os.path.join(local, os.path.split(f)[1]), 'w', 0)
path = os.path.join(self.currentDirectory, f)
d = self.client.openFile(path, filetransfer.FXF_READ, {})
d.addCallback(self._cbGetOpenFile, lf)
d.addErrback(self._ebCloseLf, lf)
d.addBoth(self._cbGetMultipleNext, files, local)
return d
def _ebCloseLf(self, f, lf):
lf.close()
return f
def _cbGetOpenFile(self, rf, lf):
return rf.getAttrs().addCallback(self._cbGetFileSize, rf, lf)
def _cbGetFileSize(self, attrs, rf, lf):
if not stat.S_ISREG(attrs['permissions']):
rf.close()
lf.close()
return "Can't get non-regular file: %s" % rf.name
rf.size = attrs['size']
bufferSize = self.client.transport.conn.options['buffersize']
numRequests = self.client.transport.conn.options['requests']
rf.total = 0.0
dList = []
chunks = []
startTime = time.time()
for i in range(numRequests):
d = self._cbGetRead('', rf, lf, chunks, 0, bufferSize, startTime)
dList.append(d)
dl = defer.DeferredList(dList, fireOnOneErrback=1)
dl.addCallback(self._cbGetDone, rf, lf)
return dl
def _getNextChunk(self, chunks):
end = 0
for chunk in chunks:
if end == 'eof':
return # nothing more to get
if end != chunk[0]:
i = chunks.index(chunk)
chunks.insert(i, (end, chunk[0]))
return (end, chunk[0] - end)
end = chunk[1]
bufSize = int(self.client.transport.conn.options['buffersize'])
chunks.append((end, end + bufSize))
return (end, bufSize)
def _cbGetRead(self, data, rf, lf, chunks, start, size, startTime):
if data and isinstance(data, failure.Failure):
log.msg('get read err: %s' % data)
reason = data
reason.trap(EOFError)
i = chunks.index((start, start + size))
del chunks[i]
chunks.insert(i, (start, 'eof'))
elif data:
log.msg('get read data: %i' % len(data))
lf.seek(start)
lf.write(data)
if len(data) != size:
log.msg('got less than we asked for: %i < %i' %
(len(data), size))
i = chunks.index((start, start + size))
del chunks[i]
chunks.insert(i, (start, start + len(data)))
rf.total += len(data)
if self.useProgressBar:
self._printProgessBar(rf, startTime)
chunk = self._getNextChunk(chunks)
if not chunk:
return
else:
start, length = chunk
log.msg('asking for %i -> %i' % (start, start+length))
d = rf.readChunk(start, length)
d.addBoth(self._cbGetRead, rf, lf, chunks, start, length, startTime)
return d
def _cbGetDone(self, ignored, rf, lf):
log.msg('get done')
rf.close()
lf.close()
if self.useProgressBar:
self.transport.write('\n')
return "Transferred %s to %s" % (rf.name, lf.name)
def cmd_PUT(self, rest):
local, rest = self._getFilename(rest)
if '*' in local or '?' in local: # wildcard
if rest:
remote, rest = self._getFilename(rest)
path = os.path.join(self.currentDirectory, remote)
d = self.client.getAttrs(path)
d.addCallback(self._cbPutTargetAttrs, remote, local)
return d
else:
remote = ''
files = glob.glob(local)
return self._cbPutMultipleNext(None, files, remote)
if rest:
remote, rest = self._getFilename(rest)
else:
remote = os.path.split(local)[1]
lf = file(local, 'r')
path = os.path.join(self.currentDirectory, remote)
flags = filetransfer.FXF_WRITE|filetransfer.FXF_CREAT|filetransfer.FXF_TRUNC
d = self.client.openFile(path, flags, {})
d.addCallback(self._cbPutOpenFile, lf)
d.addErrback(self._ebCloseLf, lf)
return d
def _cbPutTargetAttrs(self, attrs, path, local):
if not stat.S_ISDIR(attrs['permissions']):
return "Wildcard put with non-directory target."
return self._cbPutMultipleNext(None, files, path)
def _cbPutMultipleNext(self, res, files, path):
if isinstance(res, failure.Failure):
self._printFailure(res)
elif res:
self.transport.write(res)
if not res.endswith('\n'):
self.transport.write('\n')
f = None
while files and not f:
try:
f = files.pop(0)
lf = file(f, 'r')
except:
self._printFailure(failure.Failure())
f = None
if not f:
return
name = os.path.split(f)[1]
remote = os.path.join(self.currentDirectory, path, name)
log.msg((name, remote, path))
flags = filetransfer.FXF_WRITE|filetransfer.FXF_CREAT|filetransfer.FXF_TRUNC
d = self.client.openFile(remote, flags, {})
d.addCallback(self._cbPutOpenFile, lf)
d.addErrback(self._ebCloseLf, lf)
d.addBoth(self._cbPutMultipleNext, files, path)
return d
def _cbPutOpenFile(self, rf, lf):
numRequests = self.client.transport.conn.options['requests']
if self.useProgressBar:
lf = FileWrapper(lf)
dList = []
chunks = []
startTime = time.time()
for i in range(numRequests):
d = self._cbPutWrite(None, rf, lf, chunks, startTime)
if d:
dList.append(d)
dl = defer.DeferredList(dList, fireOnOneErrback=1)
dl.addCallback(self._cbPutDone, rf, lf)
return dl
def _cbPutWrite(self, ignored, rf, lf, chunks, startTime):
chunk = self._getNextChunk(chunks)
start, size = chunk
lf.seek(start)
data = lf.read(size)
if self.useProgressBar:
lf.total += len(data)
self._printProgessBar(lf, startTime)
if data:
d = rf.writeChunk(start, data)
d.addCallback(self._cbPutWrite, rf, lf, chunks, startTime)
return d
else:
return
def _cbPutDone(self, ignored, rf, lf):
lf.close()
rf.close()
if self.useProgressBar:
self.transport.write('\n')
return 'Transferred %s to %s' % (lf.name, rf.name)
def cmd_LCD(self, path):
os.chdir(path)
def cmd_LN(self, rest):
linkpath, rest = self._getFilename(rest)
targetpath, rest = self._getFilename(rest)
linkpath, targetpath = map(
lambda x: os.path.join(self.currentDirectory, x),
(linkpath, targetpath))
return self.client.makeLink(linkpath, targetpath).addCallback(_ignore)
def cmd_LS(self, rest):
# possible lines:
# ls current directory
# ls name_of_file that file
# ls name_of_directory that directory
# ls some_glob_string current directory, globbed for that string
options = []
rest = rest.split()
while rest and rest[0] and rest[0][0] == '-':
opts = rest.pop(0)[1:]
for o in opts:
if o == 'l':
options.append('verbose')
elif o == 'a':
options.append('all')
rest = ' '.join(rest)
path, rest = self._getFilename(rest)
if not path:
fullPath = self.currentDirectory + '/'
else:
fullPath = os.path.join(self.currentDirectory, path)
d = self._remoteGlob(fullPath)
d.addCallback(self._cbDisplayFiles, options)
return d
def _cbDisplayFiles(self, files, options):
files.sort()
if 'all' not in options:
files = [f for f in files if not f[0].startswith('.')]
if 'verbose' in options:
lines = [f[1] for f in files]
else:
lines = [f[0] for f in files]
if not lines:
return None
else:
return '\n'.join(lines)
def cmd_MKDIR(self, path):
path, rest = self._getFilename(path)
path = os.path.join(self.currentDirectory, path)
return self.client.makeDirectory(path, {}).addCallback(_ignore)
def cmd_RMDIR(self, path):
path, rest = self._getFilename(path)
path = os.path.join(self.currentDirectory, path)
return self.client.removeDirectory(path).addCallback(_ignore)
def cmd_LMKDIR(self, path):
os.system("mkdir %s" % path)
def cmd_RM(self, path):
path, rest = self._getFilename(path)
path = os.path.join(self.currentDirectory, path)
return self.client.removeFile(path).addCallback(_ignore)
def cmd_LLS(self, rest):
os.system("ls %s" % rest)
def cmd_RENAME(self, rest):
oldpath, rest = self._getFilename(rest)
newpath, rest = self._getFilename(rest)
oldpath, newpath = map (
lambda x: os.path.join(self.currentDirectory, x),
(oldpath, newpath))
return self.client.renameFile(oldpath, newpath).addCallback(_ignore)
def cmd_EXIT(self, ignored):
self.client.transport.loseConnection()
cmd_QUIT = cmd_EXIT
def cmd_VERSION(self, ignored):
return "SFTP version %i" % self.client.version
def cmd_HELP(self, ignored):
return """Available commands:
cd path Change remote directory to 'path'.
chgrp gid path Change gid of 'path' to 'gid'.
chmod mode path Change mode of 'path' to 'mode'.
chown uid path Change uid of 'path' to 'uid'.
exit Disconnect from the server.
get remote-path [local-path] Get remote file.
help Get a list of available commands.
lcd path Change local directory to 'path'.
lls [ls-options] [path] Display local directory listing.
lmkdir path Create local directory.
ln linkpath targetpath Symlink remote file.
lpwd Print the local working directory.
ls [-l] [path] Display remote directory listing.
mkdir path Create remote directory.
progress Toggle progress bar.
put local-path [remote-path] Put local file.
pwd Print the remote working directory.
quit Disconnect from the server.
rename oldpath newpath Rename remote file.
rmdir path Remove remote directory.
rm path Remove remote file.
version Print the SFTP version.
? Synonym for 'help'.
"""
def cmd_PWD(self, ignored):
return self.currentDirectory
def cmd_LPWD(self, ignored):
return os.getcwd()
def cmd_PROGRESS(self, ignored):
self.useProgressBar = not self.useProgressBar
return "%ssing progess bar." % (self.useProgressBar and "U" or "Not u")
def cmd_EXEC(self, rest):
"""
Run C{rest} using the user's shell (or /bin/sh if they do not have
one).
"""
shell = self._pwd.getpwnam(getpass.getuser())[6]
if not shell:
shell = '/bin/sh'
if rest:
cmds = ['-c', rest]
return utils.getProcessOutput(shell, cmds, errortoo=1)
else:
os.system(shell)
# accessory functions
def _remoteGlob(self, fullPath):
log.msg('looking up %s' % fullPath)
head, tail = os.path.split(fullPath)
if '*' in tail or '?' in tail:
glob = 1
else:
glob = 0
if tail and not glob: # could be file or directory
# try directory first
d = self.client.openDirectory(fullPath)
d.addCallback(self._cbOpenList, '')
d.addErrback(self._ebNotADirectory, head, tail)
else:
d = self.client.openDirectory(head)
d.addCallback(self._cbOpenList, tail)
return d
def _cbOpenList(self, directory, glob):
files = []
d = directory.read()
d.addBoth(self._cbReadFile, files, directory, glob)
return d
def _ebNotADirectory(self, reason, path, glob):
d = self.client.openDirectory(path)
d.addCallback(self._cbOpenList, glob)
return d
def _cbReadFile(self, files, l, directory, glob):
if not isinstance(files, failure.Failure):
if glob:
l.extend([f for f in files if fnmatch.fnmatch(f[0], glob)])
else:
l.extend(files)
d = directory.read()
d.addBoth(self._cbReadFile, l, directory, glob)
return d
else:
reason = files
reason.trap(EOFError)
directory.close()
return l
def _abbrevSize(self, size):
# from http://mail.python.org/pipermail/python-list/1999-December/018395.html
_abbrevs = [
(1<<50L, 'PB'),
(1<<40L, 'TB'),
(1<<30L, 'GB'),
(1<<20L, 'MB'),
(1<<10L, 'kb'),
(1, '')
]
for factor, suffix in _abbrevs:
if size > factor:
break
return '%.1f' % (size/factor) + suffix
def _abbrevTime(self, t):
if t > 3600: # 1 hour
hours = int(t / 3600)
t -= (3600 * hours)
mins = int(t / 60)
t -= (60 * mins)
return "%i:%02i:%02i" % (hours, mins, t)
else:
mins = int(t/60)
t -= (60 * mins)
return "%02i:%02i" % (mins, t)
def _printProgessBar(self, f, startTime):
diff = time.time() - startTime
total = f.total
try:
winSize = struct.unpack('4H',
fcntl.ioctl(0, tty.TIOCGWINSZ, '12345679'))
except IOError:
winSize = [None, 80]
speed = total/diff
if speed:
timeLeft = (f.size - total) / speed
else:
timeLeft = 0
front = f.name
back = '%3i%% %s %sps %s ' % ((total/f.size)*100, self._abbrevSize(total),
self._abbrevSize(total/diff), self._abbrevTime(timeLeft))
spaces = (winSize[1] - (len(front) + len(back) + 1)) * ' '
self.transport.write('\r%s%s%s' % (front, spaces, back))
def _getFilename(self, line):
line.lstrip()
if not line:
return None, ''
if line[0] in '\'"':
ret = []
line = list(line)
try:
for i in range(1,len(line)):
c = line[i]
if c == line[0]:
return ''.join(ret), ''.join(line[i+1:]).lstrip()
elif c == '\\': # quoted character
del line[i]
if line[i] not in '\'"\\':
raise IndexError, "bad quote: \\%s" % line[i]
ret.append(line[i])
else:
ret.append(line[i])
except IndexError:
raise IndexError, "unterminated quote"
ret = line.split(None, 1)
if len(ret) == 1:
return ret[0], ''
else:
return ret
StdioClient.__dict__['cmd_?'] = StdioClient.cmd_HELP
class SSHConnection(connection.SSHConnection):
def serviceStarted(self):
self.openChannel(SSHSession())
class SSHSession(channel.SSHChannel):
name = 'session'
def channelOpen(self, foo):
log.msg('session %s open' % self.id)
if self.conn.options['subsystem'].startswith('/'):
request = 'exec'
else:
request = 'subsystem'
d = self.conn.sendRequest(self, request, \
common.NS(self.conn.options['subsystem']), wantReply=1)
d.addCallback(self._cbSubsystem)
d.addErrback(_ebExit)
def _cbSubsystem(self, result):
self.client = filetransfer.FileTransferClient()
self.client.makeConnection(self)
self.dataReceived = self.client.dataReceived
f = None
if self.conn.options['batchfile']:
fn = self.conn.options['batchfile']
if fn != '-':
f = file(fn)
self.stdio = stdio.StandardIO(StdioClient(self.client, f))
def extReceived(self, t, data):
if t==connection.EXTENDED_DATA_STDERR:
log.msg('got %s stderr data' % len(data))
sys.stderr.write(data)
sys.stderr.flush()
def eofReceived(self):
log.msg('got eof')
self.stdio.closeStdin()
def closeReceived(self):
log.msg('remote side closed %s' % self)
self.conn.sendClose(self)
def closed(self):
try:
reactor.stop()
except:
pass
def stopWriting(self):
self.stdio.pauseProducing()
def startWriting(self):
self.stdio.resumeProducing()
if __name__ == '__main__':
run()
| 33.907407
| 99
| 0.548189
|
"""
Implementation module for the I{cftp} command.
"""
import os, sys, getpass, struct, tty, fcntl, stat
import fnmatch, pwd, time, glob
from twisted.conch.client import connect, default, options
from twisted.conch.ssh import connection, common
from twisted.conch.ssh import channel, filetransfer
from twisted.protocols import basic
from twisted.internet import reactor, stdio, defer, utils
from twisted.python import log, usage, failure
class ClientOptions(options.ConchOptions):
synopsis = """Usage: cftp [options] [user@]host
cftp [options] [user@]host[:dir[/]]
cftp [options] [user@]host[:file [localfile]]
"""
optParameters = [
['buffersize', 'B', 32768, 'Size of the buffer to use for sending/receiving.'],
['batchfile', 'b', None, 'File to read commands from, or \'-\' for stdin.'],
['requests', 'R', 5, 'Number of requests to make before waiting for a reply.'],
['subsystem', 's', 'sftp', 'Subsystem/server program to connect to.']]
zsh_altArgDescr = {"buffersize":"Size of send/receive buffer (default: 32768)"}
zsh_extras = ['2::localfile:{if [[ $words[1] == *:* ]]; then; _files; fi}']
def parseArgs(self, host, localPath=None):
self['remotePath'] = ''
if ':' in host:
host, self['remotePath'] = host.split(':', 1)
self['remotePath'].rstrip('/')
self['host'] = host
self['localPath'] = localPath
def run():
args = sys.argv[1:]
if '-l' in args:
i = args.index('-l')
args = args[i:i+2]+args
del args[i+2:i+4]
options = ClientOptions()
try:
options.parseOptions(args)
except usage.UsageError, u:
print 'ERROR: %s' % u
sys.exit(1)
if options['log']:
realout = sys.stdout
log.startLogging(sys.stderr)
sys.stdout = realout
else:
log.discardLogs()
doConnect(options)
reactor.run()
def handleError():
global exitStatus
exitStatus = 2
try:
reactor.stop()
except: pass
log.err(failure.Failure())
raise
def doConnect(options):
f '@' in options['host']:
options['user'], options['host'] = options['host'].split('@',1)
host = options['host']
if not options['user']:
options['user'] = getpass.getuser()
if not options['port']:
options['port'] = 22
else:
options['port'] = int(options['port'])
host = options['host']
port = options['port']
conn = SSHConnection()
conn.options = options
vhk = default.verifyHostKey
uao = default.SSHUserAuthClient(options['user'], options, conn)
connect.connect(host, port, options, vhk, uao).addErrback(_ebExit)
def _ebExit(f):
if hasattr(f.value, 'value'):
s = f.value.value
else:
s = str(f)
print s
try:
reactor.stop()
except: pass
def _ignore(*args): pass
class FileWrapper:
def __init__(self, f):
self.f = f
self.total = 0.0
f.seek(0, 2)
self.size = f.tell()
def __getattr__(self, attr):
return getattr(self.f, attr)
class StdioClient(basic.LineReceiver):
_pwd = pwd
ps = 'cftp> '
delimiter = '\n'
def __init__(self, client, f = None):
self.client = client
self.currentDirectory = ''
self.file = f
self.useProgressBar = (not f and 1) or 0
def connectionMade(self):
self.client.realPath('').addCallback(self._cbSetCurDir)
def _cbSetCurDir(self, path):
self.currentDirectory = path
self._newLine()
def lineReceived(self, line):
if self.client.transport.localClosed:
return
log.msg('got line %s' % repr(line))
line = line.lstrip()
if not line:
self._newLine()
return
if self.file and line.startswith('-'):
self.ignoreErrors = 1
line = line[1:]
else:
self.ignoreErrors = 0
d = self._dispatchCommand(line)
if d is not None:
d.addCallback(self._cbCommand)
d.addErrback(self._ebCommand)
def _dispatchCommand(self, line):
if ' ' in line:
command, rest = line.split(' ', 1)
rest = rest.lstrip()
else:
command, rest = line, ''
if command.startswith('!'):
f = self.cmd_EXEC
rest = (command[1:] + ' ' + rest).strip()
else:
command = command.upper()
log.msg('looking up cmd %s' % command)
f = getattr(self, 'cmd_%s' % command, None)
if f is not None:
return defer.maybeDeferred(f, rest)
else:
self._ebCommand(failure.Failure(NotImplementedError(
"No command called `%s'" % command)))
self._newLine()
def _printFailure(self, f):
log.msg(f)
e = f.trap(NotImplementedError, filetransfer.SFTPError, OSError, IOError)
if e == NotImplementedError:
self.transport.write(self.cmd_HELP(''))
elif e == filetransfer.SFTPError:
self.transport.write("remote error %i: %s\n" %
(f.value.code, f.value.message))
elif e in (OSError, IOError):
self.transport.write("local error %i: %s\n" %
(f.value.errno, f.value.strerror))
def _newLine(self):
if self.client.transport.localClosed:
return
self.transport.write(self.ps)
self.ignoreErrors = 0
if self.file:
l = self.file.readline()
if not l:
self.client.transport.loseConnection()
else:
self.transport.write(l)
self.lineReceived(l.strip())
def _cbCommand(self, result):
if result is not None:
self.transport.write(result)
if not result.endswith('\n'):
self.transport.write('\n')
self._newLine()
def _ebCommand(self, f):
self._printFailure(f)
if self.file and not self.ignoreErrors:
self.client.transport.loseConnection()
self._newLine()
def cmd_CD(self, path):
path, rest = self._getFilename(path)
if not path.endswith('/'):
path += '/'
newPath = path and os.path.join(self.currentDirectory, path) or ''
d = self.client.openDirectory(newPath)
d.addCallback(self._cbCd)
d.addErrback(self._ebCommand)
return d
def _cbCd(self, directory):
directory.close()
d = self.client.realPath(directory.name)
d.addCallback(self._cbCurDir)
return d
def _cbCurDir(self, path):
self.currentDirectory = path
def cmd_CHGRP(self, rest):
grp, rest = rest.split(None, 1)
path, rest = self._getFilename(rest)
grp = int(grp)
d = self.client.getAttrs(path)
d.addCallback(self._cbSetUsrGrp, path, grp=grp)
return d
def cmd_CHMOD(self, rest):
mod, rest = rest.split(None, 1)
path, rest = self._getFilename(rest)
mod = int(mod, 8)
d = self.client.setAttrs(path, {'permissions':mod})
d.addCallback(_ignore)
return d
def cmd_CHOWN(self, rest):
usr, rest = rest.split(None, 1)
path, rest = self._getFilename(rest)
usr = int(usr)
d = self.client.getAttrs(path)
d.addCallback(self._cbSetUsrGrp, path, usr=usr)
return d
def _cbSetUsrGrp(self, attrs, path, usr=None, grp=None):
new = {}
new['uid'] = (usr is not None) and usr or attrs['uid']
new['gid'] = (grp is not None) and grp or attrs['gid']
d = self.client.setAttrs(path, new)
d.addCallback(_ignore)
return d
def cmd_GET(self, rest):
remote, rest = self._getFilename(rest)
if '*' in remote or '?' in remote: # wildcard
if rest:
local, rest = self._getFilename(rest)
if not os.path.isdir(local):
return "Wildcard get with non-directory target."
else:
local = ''
d = self._remoteGlob(remote)
d.addCallback(self._cbGetMultiple, local)
return d
if rest:
local, rest = self._getFilename(rest)
else:
local = os.path.split(remote)[1]
log.msg((remote, local))
lf = file(local, 'w', 0)
path = os.path.join(self.currentDirectory, remote)
d = self.client.openFile(path, filetransfer.FXF_READ, {})
d.addCallback(self._cbGetOpenFile, lf)
d.addErrback(self._ebCloseLf, lf)
return d
def _cbGetMultiple(self, files, local):
#if self._useProgressBar: # one at a time
# XXX this can be optimized for times w/o progress bar
return self._cbGetMultipleNext(None, files, local)
def _cbGetMultipleNext(self, res, files, local):
if isinstance(res, failure.Failure):
self._printFailure(res)
elif res:
self.transport.write(res)
if not res.endswith('\n'):
self.transport.write('\n')
if not files:
return
f = files.pop(0)[0]
lf = file(os.path.join(local, os.path.split(f)[1]), 'w', 0)
path = os.path.join(self.currentDirectory, f)
d = self.client.openFile(path, filetransfer.FXF_READ, {})
d.addCallback(self._cbGetOpenFile, lf)
d.addErrback(self._ebCloseLf, lf)
d.addBoth(self._cbGetMultipleNext, files, local)
return d
def _ebCloseLf(self, f, lf):
lf.close()
return f
def _cbGetOpenFile(self, rf, lf):
return rf.getAttrs().addCallback(self._cbGetFileSize, rf, lf)
def _cbGetFileSize(self, attrs, rf, lf):
if not stat.S_ISREG(attrs['permissions']):
rf.close()
lf.close()
return "Can't get non-regular file: %s" % rf.name
rf.size = attrs['size']
bufferSize = self.client.transport.conn.options['buffersize']
numRequests = self.client.transport.conn.options['requests']
rf.total = 0.0
dList = []
chunks = []
startTime = time.time()
for i in range(numRequests):
d = self._cbGetRead('', rf, lf, chunks, 0, bufferSize, startTime)
dList.append(d)
dl = defer.DeferredList(dList, fireOnOneErrback=1)
dl.addCallback(self._cbGetDone, rf, lf)
return dl
def _getNextChunk(self, chunks):
end = 0
for chunk in chunks:
if end == 'eof':
return
if end != chunk[0]:
i = chunks.index(chunk)
chunks.insert(i, (end, chunk[0]))
return (end, chunk[0] - end)
end = chunk[1]
bufSize = int(self.client.transport.conn.options['buffersize'])
chunks.append((end, end + bufSize))
return (end, bufSize)
def _cbGetRead(self, data, rf, lf, chunks, start, size, startTime):
if data and isinstance(data, failure.Failure):
log.msg('get read err: %s' % data)
reason = data
reason.trap(EOFError)
i = chunks.index((start, start + size))
del chunks[i]
chunks.insert(i, (start, 'eof'))
elif data:
log.msg('get read data: %i' % len(data))
lf.seek(start)
lf.write(data)
if len(data) != size:
log.msg('got less than we asked for: %i < %i' %
(len(data), size))
i = chunks.index((start, start + size))
del chunks[i]
chunks.insert(i, (start, start + len(data)))
rf.total += len(data)
if self.useProgressBar:
self._printProgessBar(rf, startTime)
chunk = self._getNextChunk(chunks)
if not chunk:
return
else:
start, length = chunk
log.msg('asking for %i -> %i' % (start, start+length))
d = rf.readChunk(start, length)
d.addBoth(self._cbGetRead, rf, lf, chunks, start, length, startTime)
return d
def _cbGetDone(self, ignored, rf, lf):
log.msg('get done')
rf.close()
lf.close()
if self.useProgressBar:
self.transport.write('\n')
return "Transferred %s to %s" % (rf.name, lf.name)
def cmd_PUT(self, rest):
local, rest = self._getFilename(rest)
if '*' in local or '?' in local:
if rest:
remote, rest = self._getFilename(rest)
path = os.path.join(self.currentDirectory, remote)
d = self.client.getAttrs(path)
d.addCallback(self._cbPutTargetAttrs, remote, local)
return d
else:
remote = ''
files = glob.glob(local)
return self._cbPutMultipleNext(None, files, remote)
if rest:
remote, rest = self._getFilename(rest)
else:
remote = os.path.split(local)[1]
lf = file(local, 'r')
path = os.path.join(self.currentDirectory, remote)
flags = filetransfer.FXF_WRITE|filetransfer.FXF_CREAT|filetransfer.FXF_TRUNC
d = self.client.openFile(path, flags, {})
d.addCallback(self._cbPutOpenFile, lf)
d.addErrback(self._ebCloseLf, lf)
return d
def _cbPutTargetAttrs(self, attrs, path, local):
if not stat.S_ISDIR(attrs['permissions']):
return "Wildcard put with non-directory target."
return self._cbPutMultipleNext(None, files, path)
def _cbPutMultipleNext(self, res, files, path):
if isinstance(res, failure.Failure):
self._printFailure(res)
elif res:
self.transport.write(res)
if not res.endswith('\n'):
self.transport.write('\n')
f = None
while files and not f:
try:
f = files.pop(0)
lf = file(f, 'r')
except:
self._printFailure(failure.Failure())
f = None
if not f:
return
name = os.path.split(f)[1]
remote = os.path.join(self.currentDirectory, path, name)
log.msg((name, remote, path))
flags = filetransfer.FXF_WRITE|filetransfer.FXF_CREAT|filetransfer.FXF_TRUNC
d = self.client.openFile(remote, flags, {})
d.addCallback(self._cbPutOpenFile, lf)
d.addErrback(self._ebCloseLf, lf)
d.addBoth(self._cbPutMultipleNext, files, path)
return d
def _cbPutOpenFile(self, rf, lf):
numRequests = self.client.transport.conn.options['requests']
if self.useProgressBar:
lf = FileWrapper(lf)
dList = []
chunks = []
startTime = time.time()
for i in range(numRequests):
d = self._cbPutWrite(None, rf, lf, chunks, startTime)
if d:
dList.append(d)
dl = defer.DeferredList(dList, fireOnOneErrback=1)
dl.addCallback(self._cbPutDone, rf, lf)
return dl
def _cbPutWrite(self, ignored, rf, lf, chunks, startTime):
chunk = self._getNextChunk(chunks)
start, size = chunk
lf.seek(start)
data = lf.read(size)
if self.useProgressBar:
lf.total += len(data)
self._printProgessBar(lf, startTime)
if data:
d = rf.writeChunk(start, data)
d.addCallback(self._cbPutWrite, rf, lf, chunks, startTime)
return d
else:
return
def _cbPutDone(self, ignored, rf, lf):
lf.close()
rf.close()
if self.useProgressBar:
self.transport.write('\n')
return 'Transferred %s to %s' % (lf.name, rf.name)
def cmd_LCD(self, path):
os.chdir(path)
def cmd_LN(self, rest):
linkpath, rest = self._getFilename(rest)
targetpath, rest = self._getFilename(rest)
linkpath, targetpath = map(
lambda x: os.path.join(self.currentDirectory, x),
(linkpath, targetpath))
return self.client.makeLink(linkpath, targetpath).addCallback(_ignore)
def cmd_LS(self, rest):
options = []
rest = rest.split()
while rest and rest[0] and rest[0][0] == '-':
opts = rest.pop(0)[1:]
for o in opts:
if o == 'l':
options.append('verbose')
elif o == 'a':
options.append('all')
rest = ' '.join(rest)
path, rest = self._getFilename(rest)
if not path:
fullPath = self.currentDirectory + '/'
else:
fullPath = os.path.join(self.currentDirectory, path)
d = self._remoteGlob(fullPath)
d.addCallback(self._cbDisplayFiles, options)
return d
def _cbDisplayFiles(self, files, options):
files.sort()
if 'all' not in options:
files = [f for f in files if not f[0].startswith('.')]
if 'verbose' in options:
lines = [f[1] for f in files]
else:
lines = [f[0] for f in files]
if not lines:
return None
else:
return '\n'.join(lines)
def cmd_MKDIR(self, path):
path, rest = self._getFilename(path)
path = os.path.join(self.currentDirectory, path)
return self.client.makeDirectory(path, {}).addCallback(_ignore)
def cmd_RMDIR(self, path):
path, rest = self._getFilename(path)
path = os.path.join(self.currentDirectory, path)
return self.client.removeDirectory(path).addCallback(_ignore)
def cmd_LMKDIR(self, path):
os.system("mkdir %s" % path)
def cmd_RM(self, path):
path, rest = self._getFilename(path)
path = os.path.join(self.currentDirectory, path)
return self.client.removeFile(path).addCallback(_ignore)
def cmd_LLS(self, rest):
os.system("ls %s" % rest)
def cmd_RENAME(self, rest):
oldpath, rest = self._getFilename(rest)
newpath, rest = self._getFilename(rest)
oldpath, newpath = map (
lambda x: os.path.join(self.currentDirectory, x),
(oldpath, newpath))
return self.client.renameFile(oldpath, newpath).addCallback(_ignore)
def cmd_EXIT(self, ignored):
self.client.transport.loseConnection()
cmd_QUIT = cmd_EXIT
def cmd_VERSION(self, ignored):
return "SFTP version %i" % self.client.version
def cmd_HELP(self, ignored):
return """Available commands:
cd path Change remote directory to 'path'.
chgrp gid path Change gid of 'path' to 'gid'.
chmod mode path Change mode of 'path' to 'mode'.
chown uid path Change uid of 'path' to 'uid'.
exit Disconnect from the server.
get remote-path [local-path] Get remote file.
help Get a list of available commands.
lcd path Change local directory to 'path'.
lls [ls-options] [path] Display local directory listing.
lmkdir path Create local directory.
ln linkpath targetpath Symlink remote file.
lpwd Print the local working directory.
ls [-l] [path] Display remote directory listing.
mkdir path Create remote directory.
progress Toggle progress bar.
put local-path [remote-path] Put local file.
pwd Print the remote working directory.
quit Disconnect from the server.
rename oldpath newpath Rename remote file.
rmdir path Remove remote directory.
rm path Remove remote file.
version Print the SFTP version.
? Synonym for 'help'.
"""
def cmd_PWD(self, ignored):
return self.currentDirectory
def cmd_LPWD(self, ignored):
return os.getcwd()
def cmd_PROGRESS(self, ignored):
self.useProgressBar = not self.useProgressBar
return "%ssing progess bar." % (self.useProgressBar and "U" or "Not u")
def cmd_EXEC(self, rest):
"""
Run C{rest} using the user's shell (or /bin/sh if they do not have
one).
"""
shell = self._pwd.getpwnam(getpass.getuser())[6]
if not shell:
shell = '/bin/sh'
if rest:
cmds = ['-c', rest]
return utils.getProcessOutput(shell, cmds, errortoo=1)
else:
os.system(shell)
# accessory functions
def _remoteGlob(self, fullPath):
log.msg('looking up %s' % fullPath)
head, tail = os.path.split(fullPath)
if '*' in tail or '?' in tail:
glob = 1
else:
glob = 0
if tail and not glob: # could be file or directory
# try directory first
d = self.client.openDirectory(fullPath)
d.addCallback(self._cbOpenList, '')
d.addErrback(self._ebNotADirectory, head, tail)
else:
d = self.client.openDirectory(head)
d.addCallback(self._cbOpenList, tail)
return d
def _cbOpenList(self, directory, glob):
files = []
d = directory.read()
d.addBoth(self._cbReadFile, files, directory, glob)
return d
def _ebNotADirectory(self, reason, path, glob):
d = self.client.openDirectory(path)
d.addCallback(self._cbOpenList, glob)
return d
def _cbReadFile(self, files, l, directory, glob):
if not isinstance(files, failure.Failure):
if glob:
l.extend([f for f in files if fnmatch.fnmatch(f[0], glob)])
else:
l.extend(files)
d = directory.read()
d.addBoth(self._cbReadFile, l, directory, glob)
return d
else:
reason = files
reason.trap(EOFError)
directory.close()
return l
def _abbrevSize(self, size):
# from http://mail.python.org/pipermail/python-list/1999-December/018395.html
_abbrevs = [
(1<<50L, 'PB'),
(1<<40L, 'TB'),
(1<<30L, 'GB'),
(1<<20L, 'MB'),
(1<<10L, 'kb'),
(1, '')
]
for factor, suffix in _abbrevs:
if size > factor:
break
return '%.1f' % (size/factor) + suffix
def _abbrevTime(self, t):
if t > 3600: # 1 hour
hours = int(t / 3600)
t -= (3600 * hours)
mins = int(t / 60)
t -= (60 * mins)
return "%i:%02i:%02i" % (hours, mins, t)
else:
mins = int(t/60)
t -= (60 * mins)
return "%02i:%02i" % (mins, t)
def _printProgessBar(self, f, startTime):
diff = time.time() - startTime
total = f.total
try:
winSize = struct.unpack('4H',
fcntl.ioctl(0, tty.TIOCGWINSZ, '12345679'))
except IOError:
winSize = [None, 80]
speed = total/diff
if speed:
timeLeft = (f.size - total) / speed
else:
timeLeft = 0
front = f.name
back = '%3i%% %s %sps %s ' % ((total/f.size)*100, self._abbrevSize(total),
self._abbrevSize(total/diff), self._abbrevTime(timeLeft))
spaces = (winSize[1] - (len(front) + len(back) + 1)) * ' '
self.transport.write('\r%s%s%s' % (front, spaces, back))
def _getFilename(self, line):
line.lstrip()
if not line:
return None, ''
if line[0] in '\'"':
ret = []
line = list(line)
try:
for i in range(1,len(line)):
c = line[i]
if c == line[0]:
return ''.join(ret), ''.join(line[i+1:]).lstrip()
elif c == '\\': # quoted character
del line[i]
if line[i] not in '\'"\\':
raise IndexError, "bad quote: \\%s" % line[i]
ret.append(line[i])
else:
ret.append(line[i])
except IndexError:
raise IndexError, "unterminated quote"
ret = line.split(None, 1)
if len(ret) == 1:
return ret[0], ''
else:
return ret
StdioClient.__dict__['cmd_?'] = StdioClient.cmd_HELP
class SSHConnection(connection.SSHConnection):
def serviceStarted(self):
self.openChannel(SSHSession())
class SSHSession(channel.SSHChannel):
name = 'session'
def channelOpen(self, foo):
log.msg('session %s open' % self.id)
if self.conn.options['subsystem'].startswith('/'):
request = 'exec'
else:
request = 'subsystem'
d = self.conn.sendRequest(self, request, \
common.NS(self.conn.options['subsystem']), wantReply=1)
d.addCallback(self._cbSubsystem)
d.addErrback(_ebExit)
def _cbSubsystem(self, result):
self.client = filetransfer.FileTransferClient()
self.client.makeConnection(self)
self.dataReceived = self.client.dataReceived
f = None
if self.conn.options['batchfile']:
fn = self.conn.options['batchfile']
if fn != '-':
f = file(fn)
self.stdio = stdio.StandardIO(StdioClient(self.client, f))
def extReceived(self, t, data):
if t==connection.EXTENDED_DATA_STDERR:
log.msg('got %s stderr data' % len(data))
sys.stderr.write(data)
sys.stderr.flush()
def eofReceived(self):
log.msg('got eof')
self.stdio.closeStdin()
def closeReceived(self):
log.msg('remote side closed %s' % self)
self.conn.sendClose(self)
def closed(self):
try:
reactor.stop()
except:
pass
def stopWriting(self):
self.stdio.pauseProducing()
def startWriting(self):
self.stdio.resumeProducing()
if __name__ == '__main__':
run()
| false
| true
|
f7188a38996cf7c598da7499b124f3d25c63ff64
| 24
|
py
|
Python
|
trimesh/version.py
|
hroncok/trimesh
|
85c6af12f8bfdf7d3e6c0b8fa553142a9d4219fe
|
[
"MIT"
] | null | null | null |
trimesh/version.py
|
hroncok/trimesh
|
85c6af12f8bfdf7d3e6c0b8fa553142a9d4219fe
|
[
"MIT"
] | null | null | null |
trimesh/version.py
|
hroncok/trimesh
|
85c6af12f8bfdf7d3e6c0b8fa553142a9d4219fe
|
[
"MIT"
] | null | null | null |
__version__ = '2.35.24'
| 12
| 23
| 0.666667
|
__version__ = '2.35.24'
| true
| true
|
f7188a5d6697e456a89894a7330aa0af3fad00a2
| 146
|
py
|
Python
|
tests/filetwo.py
|
alexandrevicenzi/lazyconfig
|
a03aa0b92cf8f810a8652728d80dd0d792dd66ed
|
[
"MIT"
] | null | null | null |
tests/filetwo.py
|
alexandrevicenzi/lazyconfig
|
a03aa0b92cf8f810a8652728d80dd0d792dd66ed
|
[
"MIT"
] | null | null | null |
tests/filetwo.py
|
alexandrevicenzi/lazyconfig
|
a03aa0b92cf8f810a8652728d80dd0d792dd66ed
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
sys.path.append('./')
from lazyconfig import lazyconfig
def get_name():
return lazyconfig.config.name
| 12.166667
| 33
| 0.678082
|
import sys
sys.path.append('./')
from lazyconfig import lazyconfig
def get_name():
return lazyconfig.config.name
| true
| true
|
f7188bfb1008501bd297684979855b6ad8cfff58
| 2,432
|
py
|
Python
|
Framework/Sketch/Helpers/Metrices.py
|
Gruschwick/ECG_PLATFORM
|
4a1ee568e8593938a3b51c595d4834f861a6db6e
|
[
"MIT"
] | 5
|
2021-01-28T00:04:35.000Z
|
2022-03-05T05:35:10.000Z
|
Framework/Sketch/Helpers/Metrices.py
|
Gruschwick/ECG_PLATFORM
|
4a1ee568e8593938a3b51c595d4834f861a6db6e
|
[
"MIT"
] | null | null | null |
Framework/Sketch/Helpers/Metrices.py
|
Gruschwick/ECG_PLATFORM
|
4a1ee568e8593938a3b51c595d4834f861a6db6e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 11 16:56:51 2019
@author: x
"""
import numpy as np
from collections import Counter
class MetricesConstants(object):
#qrs_cutoff_distance = 0.2
qrs_cutoff_distance = 0.120 #https://www.sciencedirect.com/science/article/abs/pii/S1746809417300216
def sample_to_time(samples, freq):
return samples/freq
def match_peaks( ref_peaks, pred_peaks, cutoff_distance = None):
'''
calc best matching between ref_peaks and pred_peaks with cutoff (error time distance no longer than cutoff_distance)
[(ref_peaks[r], pred_peaks[c]) for r, c in zip(row_ind, col_ind)
'''
from scipy.optimize import linear_sum_assignment
assert np.all(ref_peaks >= 0), "positive time"
assert np.all(pred_peaks >= 0), "positive time"
if cutoff_distance is None:
cutoff_distance = MetricesConstants.qrs_cutoff_distance
max_ref_peaks = np.max(ref_peaks)
len_ref_peaks = len(ref_peaks)
max_pred_peaks = np.max(pred_peaks)
len_pred_peaks = len(pred_peaks)
max_len = max(len_ref_peaks, len_pred_peaks)
max_peaks = max(max_ref_peaks, max_pred_peaks)
max_distance = max_peaks*10000
ref_peaks = np.pad(ref_peaks, ((0,max_len - len_ref_peaks),), 'constant', constant_values=(0, max_distance))
pred_peaks = np.pad(pred_peaks, ((0,max_len - len_pred_peaks),), 'constant', constant_values=(0, max_distance))
distance_matrix = np.abs(ref_peaks[:,np.newaxis] - pred_peaks[np.newaxis,:])
distance_matrix[distance_matrix > cutoff_distance] = max_distance
row_ind, col_ind= linear_sum_assignment(distance_matrix)
matching_filtered = [(r,c) for r, c in zip(row_ind, col_ind) if distance_matrix[r,c] <= cutoff_distance]
#ref_peaks[r], pred_peaks[c]
return matching_filtered
def qrs_detection_scores( ref_peaks, pred_peaks, peaks_matching):
deltas = [(ref_peaks[r] - pred_peaks[c]) for r, c in peaks_matching]
tpr = len(peaks_matching)/len(ref_peaks)
ppv = len(peaks_matching)/len(pred_peaks)
return np.mean(deltas), np.std(deltas), tpr, ppv
def qrs_detection_by_class(ref_peaks_class, peaks_matching):
ref_counts = Counter(ref_peaks_class)
detected_counts = Counter(ref_peaks_class[r] for r, c in peaks_matching)
return {(k, detected_counts.get(k,0)/ref_counts[k]) for k in ref_counts.keys()}, ref_counts, detected_counts
| 37.415385
| 123
| 0.713816
|
import numpy as np
from collections import Counter
class MetricesConstants(object):
qrs_cutoff_distance = 0.120
def sample_to_time(samples, freq):
return samples/freq
def match_peaks( ref_peaks, pred_peaks, cutoff_distance = None):
from scipy.optimize import linear_sum_assignment
assert np.all(ref_peaks >= 0), "positive time"
assert np.all(pred_peaks >= 0), "positive time"
if cutoff_distance is None:
cutoff_distance = MetricesConstants.qrs_cutoff_distance
max_ref_peaks = np.max(ref_peaks)
len_ref_peaks = len(ref_peaks)
max_pred_peaks = np.max(pred_peaks)
len_pred_peaks = len(pred_peaks)
max_len = max(len_ref_peaks, len_pred_peaks)
max_peaks = max(max_ref_peaks, max_pred_peaks)
max_distance = max_peaks*10000
ref_peaks = np.pad(ref_peaks, ((0,max_len - len_ref_peaks),), 'constant', constant_values=(0, max_distance))
pred_peaks = np.pad(pred_peaks, ((0,max_len - len_pred_peaks),), 'constant', constant_values=(0, max_distance))
distance_matrix = np.abs(ref_peaks[:,np.newaxis] - pred_peaks[np.newaxis,:])
distance_matrix[distance_matrix > cutoff_distance] = max_distance
row_ind, col_ind= linear_sum_assignment(distance_matrix)
matching_filtered = [(r,c) for r, c in zip(row_ind, col_ind) if distance_matrix[r,c] <= cutoff_distance]
return matching_filtered
def qrs_detection_scores( ref_peaks, pred_peaks, peaks_matching):
deltas = [(ref_peaks[r] - pred_peaks[c]) for r, c in peaks_matching]
tpr = len(peaks_matching)/len(ref_peaks)
ppv = len(peaks_matching)/len(pred_peaks)
return np.mean(deltas), np.std(deltas), tpr, ppv
def qrs_detection_by_class(ref_peaks_class, peaks_matching):
ref_counts = Counter(ref_peaks_class)
detected_counts = Counter(ref_peaks_class[r] for r, c in peaks_matching)
return {(k, detected_counts.get(k,0)/ref_counts[k]) for k in ref_counts.keys()}, ref_counts, detected_counts
| true
| true
|
f7188c282ded875e0b10619595c7c3e809117a5a
| 222
|
py
|
Python
|
old_code/keras_main.py
|
pgruening/dlbio
|
0c4e468bcd5d7e298fbecba13003bcae36889486
|
[
"MIT"
] | 1
|
2020-10-08T11:14:48.000Z
|
2020-10-08T11:14:48.000Z
|
old_code/keras_main.py
|
pgruening/dlbio
|
0c4e468bcd5d7e298fbecba13003bcae36889486
|
[
"MIT"
] | 5
|
2020-03-24T18:01:02.000Z
|
2022-03-12T00:17:24.000Z
|
old_code/keras_main.py
|
pgruening/dlbio
|
0c4e468bcd5d7e298fbecba13003bcae36889486
|
[
"MIT"
] | 1
|
2021-11-29T10:31:28.000Z
|
2021-11-29T10:31:28.000Z
|
from keras.utils.generic_utils import get_custom_objects
from main import IMain
class KerasMain(IMain):
def init_costum_objects(self, costum_objects):
get_custom_objects().update(
costum_objects)
| 24.666667
| 56
| 0.752252
|
from keras.utils.generic_utils import get_custom_objects
from main import IMain
class KerasMain(IMain):
def init_costum_objects(self, costum_objects):
get_custom_objects().update(
costum_objects)
| true
| true
|
f7188d171f94cd5bbe951736476fdbef7da4879a
| 7,038
|
py
|
Python
|
source/conf.py
|
edgarriba/tutorials
|
781378818dde4b1e055e9b2d3cb8ea02d66a863e
|
[
"Apache-2.0"
] | 1
|
2021-05-03T06:42:35.000Z
|
2021-05-03T06:42:35.000Z
|
source/conf.py
|
edgarriba/tutorials
|
781378818dde4b1e055e9b2d3cb8ea02d66a863e
|
[
"Apache-2.0"
] | null | null | null |
source/conf.py
|
edgarriba/tutorials
|
781378818dde4b1e055e9b2d3cb8ea02d66a863e
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Kornia Tutorials'
copyright = '2021, Kornia Authors'
author = 'Kornia Authors'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = ''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'nbsphinx',
'sphinx.ext.mathjax',
'sphinx.ext.githubpages',
]
exclude_patterns = ['_build', '**.ipynb_checkpoints']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ['.rst', '.ipynb']
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
autosummary_generate = True
napolean_use_rtype = False
# -- Options for nbsphinx -----------------------------------------------------
# Execute notebooks before conversion: 'always', 'never', 'auto' (default)
# We execute all notebooks, exclude the slow ones using 'exclude_patterns'
nbsphinx_execute = 'never'
# Use this kernel instead of the one stored in the notebook metadata:
#nbsphinx_kernel_name = 'python3'
# List of arguments to be passed to the kernel that executes the notebooks:
# nbsphinx_execute_arguments = []
# If True, the build process is continued even if an exception occurs:
#nbsphinx_allow_errors = True
# Controls when a cell will time out (defaults to 30; use -1 for no timeout):
#nbsphinx_timeout = 180
# Default Pygments lexer for syntax highlighting in code cells:
#nbsphinx_codecell_lexer = 'ipython3'
# Width of input/output prompts used in CSS:
#nbsphinx_prompt_width = '8ex'
# If window is narrower than this, input/output prompts are on separate lines:
#nbsphinx_responsive_width = '700px'
# This is processed by Jinja2 and inserted before each notebook
nbsphinx_prolog = r"""
{% set docname = 'source/' + env.doc2path(env.docname, base=None) %}
.. only:: html
.. role:: raw-html(raw)
:format: html
.. nbinfo::
Interactive online version:
:raw-html:`<a href="https://colab.research.google.com/github/kornia/tutorials/blob/master/{{ docname }}" target="_blank" rel="noopener noreferrer><img alt="Open In Colab" src="https://colab.research.google.com/assets/colab-badge.svg" style="vertical-align:text-bottom"></a>`
__ https://github.com/kornia/tutorials/blob/
{{ env.config.release }}/{{ docname }}
"""
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'KorniaTutorialsdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'KorniaTutorials.tex', 'Kornia Tutorials Documentation',
'Kornia Authors', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'korniatutorials', 'Kornia Tutorials Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'KorniaTutorials', 'Kornia Tutorials Documentation',
author, 'KorniaTutorials', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
| 31.004405
| 282
| 0.66624
|
project = 'Kornia Tutorials'
copyright = '2021, Kornia Authors'
author = 'Kornia Authors'
version = ''
release = ''
extensions = [
'nbsphinx',
'sphinx.ext.mathjax',
'sphinx.ext.githubpages',
]
exclude_patterns = ['_build', '**.ipynb_checkpoints']
templates_path = ['_templates']
source_suffix = ['.rst', '.ipynb']
master_doc = 'index'
language = None
exclude_patterns = []
pygments_style = None
autosummary_generate = True
napolean_use_rtype = False
nbsphinx_execute = 'never'
nbsphinx_prolog = r"""
{% set docname = 'source/' + env.doc2path(env.docname, base=None) %}
.. only:: html
.. role:: raw-html(raw)
:format: html
.. nbinfo::
Interactive online version:
:raw-html:`<a href="https://colab.research.google.com/github/kornia/tutorials/blob/master/{{ docname }}" target="_blank" rel="noopener noreferrer><img alt="Open In Colab" src="https://colab.research.google.com/assets/colab-badge.svg" style="vertical-align:text-bottom"></a>`
__ https://github.com/kornia/tutorials/blob/
{{ env.config.release }}/{{ docname }}
"""
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'KorniaTutorialsdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'KorniaTutorials.tex', 'Kornia Tutorials Documentation',
'Kornia Authors', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'korniatutorials', 'Kornia Tutorials Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'KorniaTutorials', 'Kornia Tutorials Documentation',
author, 'KorniaTutorials', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
| true
| true
|
f7188d70a660f168ca65fa4e5633d1e3038f23fa
| 6,067
|
py
|
Python
|
venv/Lib/site-packages/IPython/lib/latextools.py
|
Kiiwi/Syssel
|
83705e3fd0edf40f09df950d5ce91c95586573f5
|
[
"BSD-3-Clause"
] | 1
|
2017-12-30T20:43:28.000Z
|
2017-12-30T20:43:28.000Z
|
venv/Lib/site-packages/IPython/lib/latextools.py
|
Kiiwi/Syssel
|
83705e3fd0edf40f09df950d5ce91c95586573f5
|
[
"BSD-3-Clause"
] | 7
|
2021-02-08T20:22:15.000Z
|
2022-03-11T23:19:41.000Z
|
venv/Lib/site-packages/IPython/lib/latextools.py
|
Kiiwi/Syssel
|
83705e3fd0edf40f09df950d5ce91c95586573f5
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Tools for handling LaTeX."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from io import BytesIO, open
from base64 import encodestring
import os
import tempfile
import shutil
import subprocess
from IPython.utils.process import find_cmd, FindCmdError
from IPython.config import get_config
from IPython.config.configurable import SingletonConfigurable
from IPython.utils.traitlets import List, Bool, Unicode
from IPython.utils.py3compat import cast_unicode, cast_unicode_py2 as u
class LaTeXTool(SingletonConfigurable):
"""An object to store configuration of the LaTeX tool."""
def _config_default(self):
return get_config()
backends = List(
Unicode, ["matplotlib", "dvipng"],
help="Preferred backend to draw LaTeX math equations. "
"Backends in the list are checked one by one and the first "
"usable one is used. Note that `matplotlib` backend "
"is usable only for inline style equations. To draw "
"display style equations, `dvipng` backend must be specified. ",
# It is a List instead of Enum, to make configuration more
# flexible. For example, to use matplotlib mainly but dvipng
# for display style, the default ["matplotlib", "dvipng"] can
# be used. To NOT use dvipng so that other repr such as
# unicode pretty printing is used, you can use ["matplotlib"].
config=True)
use_breqn = Bool(
True,
help="Use breqn.sty to automatically break long equations. "
"This configuration takes effect only for dvipng backend.",
config=True)
packages = List(
['amsmath', 'amsthm', 'amssymb', 'bm'],
help="A list of packages to use for dvipng backend. "
"'breqn' will be automatically appended when use_breqn=True.",
config=True)
preamble = Unicode(
help="Additional preamble to use when generating LaTeX source "
"for dvipng backend.",
config=True)
def latex_to_png(s, encode=False, backend=None, wrap=False):
"""Render a LaTeX string to PNG.
Parameters
----------
s : text
The raw string containing valid inline LaTeX.
encode : bool, optional
Should the PNG data base64 encoded to make it JSON'able.
backend : {matplotlib, dvipng}
Backend for producing PNG data.
wrap : bool
If true, Automatically wrap `s` as a LaTeX equation.
None is returned when the backend cannot be used.
"""
s = cast_unicode(s)
allowed_backends = LaTeXTool.instance().backends
if backend is None:
backend = allowed_backends[0]
if backend not in allowed_backends:
return None
if backend == 'matplotlib':
f = latex_to_png_mpl
elif backend == 'dvipng':
f = latex_to_png_dvipng
else:
raise ValueError('No such backend {0}'.format(backend))
bin_data = f(s, wrap)
if encode and bin_data:
bin_data = encodestring(bin_data)
return bin_data
def latex_to_png_mpl(s, wrap):
try:
from matplotlib import mathtext
except ImportError:
return None
# mpl mathtext doesn't support display math, force inline
s = s.replace('$$', '$')
if wrap:
s = u'${0}$'.format(s)
mt = mathtext.MathTextParser('bitmap')
f = BytesIO()
mt.to_png(f, s, fontsize=12)
return f.getvalue()
def latex_to_png_dvipng(s, wrap):
try:
find_cmd('latex')
find_cmd('dvipng')
except FindCmdError:
return None
try:
workdir = tempfile.mkdtemp()
tmpfile = os.path.join(workdir, "tmp.tex")
dvifile = os.path.join(workdir, "tmp.dvi")
outfile = os.path.join(workdir, "tmp.png")
with open(tmpfile, "w", encoding='utf8') as f:
f.writelines(genelatex(s, wrap))
with open(os.devnull, 'wb') as devnull:
subprocess.check_call(
["latex", "-halt-on-error", "-interaction", "batchmode", tmpfile],
cwd=workdir, stdout=devnull, stderr=devnull)
subprocess.check_call(
["dvipng", "-T", "tight", "-x", "1500", "-z", "9",
"-bg", "transparent", "-o", outfile, dvifile], cwd=workdir,
stdout=devnull, stderr=devnull)
with open(outfile, "rb") as f:
return f.read()
finally:
shutil.rmtree(workdir)
def kpsewhich(filename):
"""Invoke kpsewhich command with an argument `filename`."""
try:
find_cmd("kpsewhich")
proc = subprocess.Popen(
["kpsewhich", filename],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
return stdout.strip().decode('utf8', 'replace')
except FindCmdError:
pass
def genelatex(body, wrap):
"""Generate LaTeX document for dvipng backend."""
lt = LaTeXTool.instance()
breqn = wrap and lt.use_breqn and kpsewhich("breqn.sty")
yield u(r'\documentclass{article}')
packages = lt.packages
if breqn:
packages = packages + ['breqn']
for pack in packages:
yield u(r'\usepackage{{{0}}}'.format(pack))
yield u(r'\pagestyle{empty}')
if lt.preamble:
yield lt.preamble
yield u(r'\begin{document}')
if breqn:
yield u(r'\begin{dmath*}')
yield body
yield u(r'\end{dmath*}')
elif wrap:
yield u'$${0}$$'.format(body)
else:
yield body
yield u'\end{document}'
_data_uri_template_png = u"""<img src="data:image/png;base64,%s" alt=%s />"""
def latex_to_html(s, alt='image'):
"""Render LaTeX to HTML with embedded PNG data using data URIs.
Parameters
----------
s : str
The raw string containing valid inline LateX.
alt : str
The alt text to use for the HTML.
"""
base64_data = latex_to_png(s, encode=True).decode('ascii')
if base64_data:
return _data_uri_template_png % (base64_data, alt)
| 30.954082
| 82
| 0.624691
|
from io import BytesIO, open
from base64 import encodestring
import os
import tempfile
import shutil
import subprocess
from IPython.utils.process import find_cmd, FindCmdError
from IPython.config import get_config
from IPython.config.configurable import SingletonConfigurable
from IPython.utils.traitlets import List, Bool, Unicode
from IPython.utils.py3compat import cast_unicode, cast_unicode_py2 as u
class LaTeXTool(SingletonConfigurable):
def _config_default(self):
return get_config()
backends = List(
Unicode, ["matplotlib", "dvipng"],
help="Preferred backend to draw LaTeX math equations. "
"Backends in the list are checked one by one and the first "
"usable one is used. Note that `matplotlib` backend "
"is usable only for inline style equations. To draw "
"display style equations, `dvipng` backend must be specified. ",
config=True)
use_breqn = Bool(
True,
help="Use breqn.sty to automatically break long equations. "
"This configuration takes effect only for dvipng backend.",
config=True)
packages = List(
['amsmath', 'amsthm', 'amssymb', 'bm'],
help="A list of packages to use for dvipng backend. "
"'breqn' will be automatically appended when use_breqn=True.",
config=True)
preamble = Unicode(
help="Additional preamble to use when generating LaTeX source "
"for dvipng backend.",
config=True)
def latex_to_png(s, encode=False, backend=None, wrap=False):
s = cast_unicode(s)
allowed_backends = LaTeXTool.instance().backends
if backend is None:
backend = allowed_backends[0]
if backend not in allowed_backends:
return None
if backend == 'matplotlib':
f = latex_to_png_mpl
elif backend == 'dvipng':
f = latex_to_png_dvipng
else:
raise ValueError('No such backend {0}'.format(backend))
bin_data = f(s, wrap)
if encode and bin_data:
bin_data = encodestring(bin_data)
return bin_data
def latex_to_png_mpl(s, wrap):
try:
from matplotlib import mathtext
except ImportError:
return None
s = s.replace('$$', '$')
if wrap:
s = u'${0}$'.format(s)
mt = mathtext.MathTextParser('bitmap')
f = BytesIO()
mt.to_png(f, s, fontsize=12)
return f.getvalue()
def latex_to_png_dvipng(s, wrap):
try:
find_cmd('latex')
find_cmd('dvipng')
except FindCmdError:
return None
try:
workdir = tempfile.mkdtemp()
tmpfile = os.path.join(workdir, "tmp.tex")
dvifile = os.path.join(workdir, "tmp.dvi")
outfile = os.path.join(workdir, "tmp.png")
with open(tmpfile, "w", encoding='utf8') as f:
f.writelines(genelatex(s, wrap))
with open(os.devnull, 'wb') as devnull:
subprocess.check_call(
["latex", "-halt-on-error", "-interaction", "batchmode", tmpfile],
cwd=workdir, stdout=devnull, stderr=devnull)
subprocess.check_call(
["dvipng", "-T", "tight", "-x", "1500", "-z", "9",
"-bg", "transparent", "-o", outfile, dvifile], cwd=workdir,
stdout=devnull, stderr=devnull)
with open(outfile, "rb") as f:
return f.read()
finally:
shutil.rmtree(workdir)
def kpsewhich(filename):
try:
find_cmd("kpsewhich")
proc = subprocess.Popen(
["kpsewhich", filename],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
return stdout.strip().decode('utf8', 'replace')
except FindCmdError:
pass
def genelatex(body, wrap):
lt = LaTeXTool.instance()
breqn = wrap and lt.use_breqn and kpsewhich("breqn.sty")
yield u(r'\documentclass{article}')
packages = lt.packages
if breqn:
packages = packages + ['breqn']
for pack in packages:
yield u(r'\usepackage{{{0}}}'.format(pack))
yield u(r'\pagestyle{empty}')
if lt.preamble:
yield lt.preamble
yield u(r'\begin{document}')
if breqn:
yield u(r'\begin{dmath*}')
yield body
yield u(r'\end{dmath*}')
elif wrap:
yield u'$${0}$$'.format(body)
else:
yield body
yield u'\end{document}'
_data_uri_template_png = u"""<img src="data:image/png;base64,%s" alt=%s />"""
def latex_to_html(s, alt='image'):
base64_data = latex_to_png(s, encode=True).decode('ascii')
if base64_data:
return _data_uri_template_png % (base64_data, alt)
| true
| true
|
f7188e86396b3eca935897136cb622f3195ec895
| 2,702
|
py
|
Python
|
eval/eval_OTB.py
|
Existever/PyCFTrackers
|
3221e47aecca40de21ad9be875b2f8d960b4e09c
|
[
"MIT"
] | null | null | null |
eval/eval_OTB.py
|
Existever/PyCFTrackers
|
3221e47aecca40de21ad9be875b2f8d960b4e09c
|
[
"MIT"
] | null | null | null |
eval/eval_OTB.py
|
Existever/PyCFTrackers
|
3221e47aecca40de21ad9be875b2f8d960b4e09c
|
[
"MIT"
] | null | null | null |
import argparse
import glob
from os.path import join, realpath, dirname
from tqdm import tqdm
from multiprocessing import Pool
from lib.pysot.datasets import OTBDataset
from lib.pysot.evaluation import OPEBenchmark
from lib.pysot.visualization import draw_success_precision
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='VOT Evaluation')
parser.add_argument('--dataset', type=str, default='OTB50',help='dataset name')
parser.add_argument('--result_dir', type=str, default='test/OTB100',help='tracker result root')
parser.add_argument('--tracker_prefix', type=str,default='test', help='tracker prefix')
parser.add_argument('--show_video_level', action='store_true')
parser.add_argument('--num', type=int, help='number of processes to eval', default=8)
parser.add_argument('--vis',type=bool,default=True)
args = parser.parse_args()
root = join(realpath(dirname(__file__)), '../dataset/OTB100')
tracker_dir = args.result_dir
trackers = glob.glob(join(tracker_dir, args.tracker_prefix+'*'))
trackers = [t.split('/')[-1] for t in trackers]
trackers=['MCCTH-Staple','MKCFup-LP','MKCFup','CSRDCF-LP','DSST-LP','LDES','SAMF','Staple-CA','OPENCV-CSRDCF','DCF','MOSSE','KCF','CSK','Staple','DSST','CN','DAT','ECO-HC','ECO','BACF','CSRDCF']
trackers = ['STRCF', 'KCF', 'ECO']
print(trackers)
assert len(trackers) > 0
args.num = min(args.num, len(trackers))
if 'OTB' in args.dataset:
dataset = OTBDataset(args.dataset, root)
dataset.set_tracker(tracker_dir, trackers)
benchmark = OPEBenchmark(dataset)
success_ret = {}
with Pool(processes=args.num) as pool:
for ret in tqdm(pool.imap_unordered(benchmark.eval_success,
trackers), desc='eval success', total=len(trackers), ncols=100):
success_ret.update(ret)
precision_ret = {}
with Pool(processes=args.num) as pool:
for ret in tqdm(pool.imap_unordered(benchmark.eval_precision,
trackers), desc='eval precision', total=len(trackers), ncols=2):
precision_ret.update(ret)
benchmark.show_result(success_ret, precision_ret,
show_video_level=args.show_video_level)
if args.vis:
for attr, videos in dataset.attr.items():
draw_success_precision(success_ret,
name=dataset.name,
videos=videos,
attr=attr,
precision_ret=precision_ret)
| 49.127273
| 198
| 0.620651
|
import argparse
import glob
from os.path import join, realpath, dirname
from tqdm import tqdm
from multiprocessing import Pool
from lib.pysot.datasets import OTBDataset
from lib.pysot.evaluation import OPEBenchmark
from lib.pysot.visualization import draw_success_precision
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='VOT Evaluation')
parser.add_argument('--dataset', type=str, default='OTB50',help='dataset name')
parser.add_argument('--result_dir', type=str, default='test/OTB100',help='tracker result root')
parser.add_argument('--tracker_prefix', type=str,default='test', help='tracker prefix')
parser.add_argument('--show_video_level', action='store_true')
parser.add_argument('--num', type=int, help='number of processes to eval', default=8)
parser.add_argument('--vis',type=bool,default=True)
args = parser.parse_args()
root = join(realpath(dirname(__file__)), '../dataset/OTB100')
tracker_dir = args.result_dir
trackers = glob.glob(join(tracker_dir, args.tracker_prefix+'*'))
trackers = [t.split('/')[-1] for t in trackers]
trackers=['MCCTH-Staple','MKCFup-LP','MKCFup','CSRDCF-LP','DSST-LP','LDES','SAMF','Staple-CA','OPENCV-CSRDCF','DCF','MOSSE','KCF','CSK','Staple','DSST','CN','DAT','ECO-HC','ECO','BACF','CSRDCF']
trackers = ['STRCF', 'KCF', 'ECO']
print(trackers)
assert len(trackers) > 0
args.num = min(args.num, len(trackers))
if 'OTB' in args.dataset:
dataset = OTBDataset(args.dataset, root)
dataset.set_tracker(tracker_dir, trackers)
benchmark = OPEBenchmark(dataset)
success_ret = {}
with Pool(processes=args.num) as pool:
for ret in tqdm(pool.imap_unordered(benchmark.eval_success,
trackers), desc='eval success', total=len(trackers), ncols=100):
success_ret.update(ret)
precision_ret = {}
with Pool(processes=args.num) as pool:
for ret in tqdm(pool.imap_unordered(benchmark.eval_precision,
trackers), desc='eval precision', total=len(trackers), ncols=2):
precision_ret.update(ret)
benchmark.show_result(success_ret, precision_ret,
show_video_level=args.show_video_level)
if args.vis:
for attr, videos in dataset.attr.items():
draw_success_precision(success_ret,
name=dataset.name,
videos=videos,
attr=attr,
precision_ret=precision_ret)
| true
| true
|
f7188f40cc5531a2407a8ee908641bbb4f109aeb
| 6,566
|
py
|
Python
|
starthinker/task/cm_to_dv/run.py
|
arbrown/starthinker
|
1a14664fb1a8f2a757b100363ea8958833b7754c
|
[
"Apache-2.0"
] | 138
|
2018-11-28T21:42:44.000Z
|
2022-03-30T17:26:35.000Z
|
starthinker/task/cm_to_dv/run.py
|
arbrown/starthinker
|
1a14664fb1a8f2a757b100363ea8958833b7754c
|
[
"Apache-2.0"
] | 36
|
2019-02-19T18:33:20.000Z
|
2022-01-24T18:02:44.000Z
|
starthinker/task/cm_to_dv/run.py
|
arbrown/starthinker
|
1a14664fb1a8f2a757b100363ea8958833b7754c
|
[
"Apache-2.0"
] | 54
|
2018-12-06T05:47:32.000Z
|
2022-02-21T22:01:01.000Z
|
###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
from starthinker.util import has_values
from starthinker.util.data import get_rows
from starthinker.task.cm_to_dv.cm_account import cm_account_clear
from starthinker.task.cm_to_dv.cm_account import cm_account_load
from starthinker.task.cm_to_dv.cm_advertiser import cm_advertiser_clear
from starthinker.task.cm_to_dv.cm_advertiser import cm_advertiser_load
from starthinker.task.cm_to_dv.cm_campaign import cm_campaign_clear
from starthinker.task.cm_to_dv.cm_campaign import cm_campaign_load
from starthinker.task.cm_to_dv.cm_placement import cm_placement_clear
from starthinker.task.cm_to_dv.cm_placement import cm_placement_load
from starthinker.task.cm_to_dv.cm_placement_group import cm_placement_group_clear
from starthinker.task.cm_to_dv.cm_placement_group import cm_placement_group_load
from starthinker.task.cm_to_dv.cm_profile import cm_profile_clear
from starthinker.task.cm_to_dv.cm_profile import cm_profile_load
from starthinker.task.cm_to_dv.cm_site import cm_site_clear
from starthinker.task.cm_to_dv.cm_site import cm_site_load
from starthinker.task.cm_to_dv.dv_advertiser import dv_advertiser_clear
from starthinker.task.cm_to_dv.dv_advertiser import dv_advertiser_load
from starthinker.task.cm_to_dv.dv_algorithm import dv_algorithm_clear
from starthinker.task.cm_to_dv.dv_algorithm import dv_algorithm_load
from starthinker.task.cm_to_dv.dv_campaign import dv_campaign_clear
from starthinker.task.cm_to_dv.dv_campaign import dv_campaign_load
from starthinker.task.cm_to_dv.dv_insertion_order import dv_insertion_order_clear
from starthinker.task.cm_to_dv.dv_insertion_order import dv_insertion_order_load
from starthinker.task.cm_to_dv.dv_line_item import dv_line_item_clear
from starthinker.task.cm_to_dv.dv_line_item import dv_line_item_load
from starthinker.task.cm_to_dv.dv_partner import dv_partner_clear
from starthinker.task.cm_to_dv.dv_partner import dv_partner_load
from starthinker.task.cm_to_dv.preview_io import preview_io_clear
from starthinker.task.cm_to_dv.preview_io import preview_io_load
from starthinker.task.cm_to_dv.preview_io import preview_io_insert
from starthinker.task.cm_to_dv.preview_li import preview_li_clear
from starthinker.task.cm_to_dv.preview_li import preview_li_load
from starthinker.task.cm_to_dv.preview_li import preview_li_insert
from starthinker.task.cm_to_dv.log import log_clear
from starthinker.task.cm_to_dv.log import log_clear
def cm_to_dv(config, task):
print('COMMAND:', task['command'])
if task['command'] == 'Clear':
dv_line_item_clear(config, task)
dv_insertion_order_clear(config, task)
dv_campaign_clear(config, task)
dv_advertiser_clear(config, task)
dv_algorithm_clear(config, task)
dv_partner_clear(config, task)
cm_profile_clear(config, task)
cm_account_clear(config, task)
cm_advertiser_clear(config, task)
cm_campaign_clear(config, task)
cm_placement_clear(config, task)
cm_placement_group_clear(config, task)
cm_site_clear(config, task)
preview_io_clear(config, task)
preview_li_clear(config, task)
log_clear(config, task)
elif task['command'] == 'Load':
# load if profile filters are missing
if not has_values(get_rows(
config,
task['auth_sheets'],
{ 'sheets': {
'sheet': task['sheet'],
'tab': 'CM Profiles',
'header':False,
'range': 'A2:A'
}}
)):
print('CM Profile Load')
cm_profile_load(config, task)
# load if account filters are missing
elif not has_values(get_rows(
config,
task['auth_sheets'],
{ 'sheets': {
'sheet': task['sheet'],
'tab': 'CM Accounts',
'header':False,
'range': 'A2:A'
}}
)):
cm_account_load(config, task)
# load if advertiser filters are missing
elif not has_values(get_rows(
config,
task['auth_sheets'],
{ 'sheets': {
'sheet': task['sheet'],
'tab': 'CM Advertisers',
'header':False,
'range': 'A2:A'
}}
)):
print('CM Advertiser Load')
cm_advertiser_load(config, task)
# load if advertiser filters are missing
elif not has_values(get_rows(
config,
task['auth_sheets'],
{ 'sheets': {
'sheet': task['sheet'],
'tab': 'CM Campaigns',
'header':False,
'range': 'A2:A'
}}
)):
print('CM Campaigns Load')
cm_campaign_load(config, task)
else:
print('CM Placement Load')
cm_placement_load(config, task)
cm_placement_group_load(config, task)
cm_site_load(config, task)
# load if partner filters are missing
if not has_values(get_rows(
config,
task['auth_sheets'],
{ 'sheets': {
'sheet': task['sheet'],
'tab': 'DV Partners',
'header':False,
'range': 'A2:A'
}}
)):
print('DV Partner Load')
dv_partner_load(config, task)
# load if advertiser filters are missing
elif not has_values(get_rows(
config,
task['auth_sheets'],
{ 'sheets': {
'sheet': task['sheet'],
'tab': 'DV Advertisers',
'header':False,
'range': 'A2:A'
}}
)):
print('DV Advertiser Load')
dv_advertiser_load(config, task)
# load if advertiser filters are present
else:
print('DV Campaign / IO / LI Load')
dv_algorithm_load(config, task)
dv_campaign_load(config, task)
dv_insertion_order_load(config, task)
dv_line_item_load(config, task)
elif task['command'] == 'Preview':
log_clear(config, task)
preview_io_load(config, task)
preview_li_load(config, task)
elif task['command'] == 'Insert':
log_clear(config, task)
preview_io_insert(config, task)
preview_li_insert(config, task)
| 32.029268
| 81
| 0.700579
|
insert(config, task)
| true
| true
|
f7188fdf67798b6b524e83b8873542b335c4a5b4
| 8,649
|
py
|
Python
|
acq4/devices/PatchStar/patchstar.py
|
tropp/ACQ4
|
792e05e99cedfc175593d200aeabecd6fa6304ce
|
[
"MIT"
] | null | null | null |
acq4/devices/PatchStar/patchstar.py
|
tropp/ACQ4
|
792e05e99cedfc175593d200aeabecd6fa6304ce
|
[
"MIT"
] | null | null | null |
acq4/devices/PatchStar/patchstar.py
|
tropp/ACQ4
|
792e05e99cedfc175593d200aeabecd6fa6304ce
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import time
import numpy as np
from PyQt4 import QtGui, QtCore
from ..Stage import Stage, MoveFuture, StageInterface
from acq4.drivers.PatchStar import PatchStar as PatchStarDriver
from acq4.util.Mutex import Mutex
from acq4.util.Thread import Thread
from acq4.pyqtgraph import debug, ptime, SpinBox
class PatchStar(Stage):
"""
A Scientifica PatchStar manipulator.
port: <serial port> # eg. 'COM1' or '/dev/ttyACM0'
"""
def __init__(self, man, config, name):
self.port = config.pop('port')
self.scale = config.pop('scale', (1e-7, 1e-7, 1e-7))
self.dev = PatchStarDriver(self.port)
self._lastMove = None
man.sigAbortAll.connect(self.stop)
Stage.__init__(self, man, config, name)
# clear cached position for this device and re-read to generate an initial position update
self._lastPos = None
self.getPosition(refresh=True)
self.setUserSpeed(3e-3)
# Set scaling for each axis
self.dev.send('UUX 6.4')
self.dev.send('UUY 6.4')
self.dev.send('UUZ 6.4')
# makes 1 roe turn == 1 second movement for any speed
self.dev.send('JS 200')
# Set approach angle
self.dev.send('ANGLE %f' % self.pitch)
self.dev.send('APPROACH 0')
# thread for polling position changes
self.monitor = MonitorThread(self)
self.monitor.start()
def capabilities(self):
"""Return a structure describing the capabilities of this device"""
if 'capabilities' in self.config:
return self.config['capabilities']
else:
return {
'getPos': (True, True, True),
'setPos': (True, True, True),
'limits': (False, False, False),
}
def stop(self):
"""Stop the manipulator immediately.
"""
with self.lock:
self.dev.stop()
if self._lastMove is not None:
self._lastMove._stopped()
self._lastMove = None
def setUserSpeed(self, v):
"""Set the speed of the rotary controller (m/turn).
"""
self.userSpeed = v
self.dev.setSpeed(v / self.scale[0])
def _getPosition(self):
# Called by superclass when user requests position refresh
with self.lock:
pos = self.dev.getPos()
pos = [pos[i] * self.scale[i] for i in (0, 1, 2)]
if pos != self._lastPos:
self._lastPos = pos
emit = True
else:
emit = False
if emit:
# don't emit signal while locked
self.posChanged(pos)
return pos
def targetPosition(self):
with self.lock:
if self._lastMove is None or self._lastMove.isDone():
return self.getPosition()
else:
return self._lastMove.targetPos
def quit(self):
self.monitor.stop()
Stage.quit(self)
def _move(self, abs, rel, speed, linear):
with self.lock:
if self._lastMove is not None and not self._lastMove.isDone():
self.stop()
pos = self._toAbsolutePosition(abs, rel)
self._lastMove = PatchStarMoveFuture(self, pos, speed, self.userSpeed)
return self._lastMove
def deviceInterface(self, win):
return PatchStarGUI(self, win)
class MonitorThread(Thread):
"""Thread to poll for manipulator position changes.
"""
def __init__(self, dev):
self.dev = dev
self.lock = Mutex(recursive=True)
self.stopped = False
self.interval = 0.3
Thread.__init__(self)
def start(self):
self.stopped = False
Thread.start(self)
def stop(self):
with self.lock:
self.stopped = True
def setInterval(self, i):
with self.lock:
self.interval = i
def run(self):
minInterval = 100e-3
interval = minInterval
lastPos = None
while True:
try:
with self.lock:
if self.stopped:
break
maxInterval = self.interval
pos = self.dev._getPosition() # this causes sigPositionChanged to be emitted
if pos != lastPos:
# if there was a change, then loop more rapidly for a short time.
interval = minInterval
lastPos = pos
else:
interval = min(maxInterval, interval*2)
time.sleep(interval)
except:
debug.printExc('Error in PatchStar monitor thread:')
time.sleep(maxInterval)
class PatchStarMoveFuture(MoveFuture):
"""Provides access to a move-in-progress on a PatchStar manipulator.
"""
def __init__(self, dev, pos, speed, userSpeed):
MoveFuture.__init__(self, dev, pos, speed)
self._interrupted = False
self._errorMSg = None
self._finished = False
pos = (np.array(pos) / np.array(self.dev.scale)).astype(int)
if speed == 'fast':
speed = 1e-3
elif speed == 'slow':
speed = 1e-6
with self.dev.dev.lock:
self.dev.dev.moveTo(pos, speed / self.dev.scale[0])
# reset to user speed immediately after starting move
# (the move itself will run with the previous speed)
self.dev.dev.setSpeed(userSpeed / self.dev.scale[0])
def wasInterrupted(self):
"""Return True if the move was interrupted before completing.
"""
return self._interrupted
def isDone(self):
"""Return True if the move is complete.
"""
return self._getStatus() != 0
def _getStatus(self):
# check status of move unless we already know it is complete.
# 0: still moving; 1: finished successfully; -1: finished unsuccessfully
if self._finished:
if self._interrupted:
return -1
else:
return 1
if self.dev.dev.isMoving():
# Still moving
return 0
# did we reach target?
pos = self.dev._getPosition()
if ((np.array(pos) - np.array(self.targetPos))**2).sum()**0.5 < 1e-6:
# reached target
self._finished = True
return 1
else:
# missed
self._finished = True
self._interrupted = True
self._errorMsg = "Move did not complete."
return -1
def _stopped(self):
# Called when the manipulator is stopped, possibly interrupting this move.
status = self._getStatus()
if status == 1:
# finished; ignore stop
return
elif status == -1:
self._errorMsg = "Move was interrupted before completion."
elif status == 0:
# not actually stopped! This should not happen.
raise RuntimeError("Interrupted move but manipulator is still running!")
else:
raise Exception("Unknown status: %s" % status)
def errorMessage(self):
return self._errorMsg
class PatchStarGUI(StageInterface):
def __init__(self, dev, win):
StageInterface.__init__(self, dev, win)
# Insert patchstar-specific controls into GUI
self.psGroup = QtGui.QGroupBox('PatchStar Rotary Controller')
self.layout.addWidget(self.psGroup, self.nextRow, 0, 1, 2)
self.nextRow += 1
self.psLayout = QtGui.QGridLayout()
self.psGroup.setLayout(self.psLayout)
self.speedLabel = QtGui.QLabel('Speed')
self.speedSpin = SpinBox(value=self.dev.userSpeed, suffix='m/turn', siPrefix=True, dec=True, limits=[1e-6, 10e-3])
self.revXBtn = QtGui.QPushButton('Reverse X')
self.revYBtn = QtGui.QPushButton('Reverse Y')
self.revZBtn = QtGui.QPushButton('Reverse Z')
self.psLayout.addWidget(self.speedLabel, 0, 0)
self.psLayout.addWidget(self.speedSpin, 0, 1)
self.psLayout.addWidget(self.revXBtn, 1, 1)
self.psLayout.addWidget(self.revYBtn, 2, 1)
self.psLayout.addWidget(self.revZBtn, 3, 1)
self.revXBtn.clicked.connect(lambda: self.dev.dev.send('JDX'))
self.revYBtn.clicked.connect(lambda: self.dev.dev.send('JDY'))
self.revZBtn.clicked.connect(lambda: self.dev.dev.send('JDZ'))
self.speedSpin.valueChanged.connect(lambda v: self.dev.setDefaultSpeed(v))
| 33.01145
| 122
| 0.575327
|
import time
import numpy as np
from PyQt4 import QtGui, QtCore
from ..Stage import Stage, MoveFuture, StageInterface
from acq4.drivers.PatchStar import PatchStar as PatchStarDriver
from acq4.util.Mutex import Mutex
from acq4.util.Thread import Thread
from acq4.pyqtgraph import debug, ptime, SpinBox
class PatchStar(Stage):
def __init__(self, man, config, name):
self.port = config.pop('port')
self.scale = config.pop('scale', (1e-7, 1e-7, 1e-7))
self.dev = PatchStarDriver(self.port)
self._lastMove = None
man.sigAbortAll.connect(self.stop)
Stage.__init__(self, man, config, name)
self._lastPos = None
self.getPosition(refresh=True)
self.setUserSpeed(3e-3)
self.dev.send('UUX 6.4')
self.dev.send('UUY 6.4')
self.dev.send('UUZ 6.4')
self.dev.send('JS 200')
self.dev.send('ANGLE %f' % self.pitch)
self.dev.send('APPROACH 0')
self.monitor = MonitorThread(self)
self.monitor.start()
def capabilities(self):
if 'capabilities' in self.config:
return self.config['capabilities']
else:
return {
'getPos': (True, True, True),
'setPos': (True, True, True),
'limits': (False, False, False),
}
def stop(self):
with self.lock:
self.dev.stop()
if self._lastMove is not None:
self._lastMove._stopped()
self._lastMove = None
def setUserSpeed(self, v):
self.userSpeed = v
self.dev.setSpeed(v / self.scale[0])
def _getPosition(self):
with self.lock:
pos = self.dev.getPos()
pos = [pos[i] * self.scale[i] for i in (0, 1, 2)]
if pos != self._lastPos:
self._lastPos = pos
emit = True
else:
emit = False
if emit:
self.posChanged(pos)
return pos
def targetPosition(self):
with self.lock:
if self._lastMove is None or self._lastMove.isDone():
return self.getPosition()
else:
return self._lastMove.targetPos
def quit(self):
self.monitor.stop()
Stage.quit(self)
def _move(self, abs, rel, speed, linear):
with self.lock:
if self._lastMove is not None and not self._lastMove.isDone():
self.stop()
pos = self._toAbsolutePosition(abs, rel)
self._lastMove = PatchStarMoveFuture(self, pos, speed, self.userSpeed)
return self._lastMove
def deviceInterface(self, win):
return PatchStarGUI(self, win)
class MonitorThread(Thread):
def __init__(self, dev):
self.dev = dev
self.lock = Mutex(recursive=True)
self.stopped = False
self.interval = 0.3
Thread.__init__(self)
def start(self):
self.stopped = False
Thread.start(self)
def stop(self):
with self.lock:
self.stopped = True
def setInterval(self, i):
with self.lock:
self.interval = i
def run(self):
minInterval = 100e-3
interval = minInterval
lastPos = None
while True:
try:
with self.lock:
if self.stopped:
break
maxInterval = self.interval
pos = self.dev._getPosition() # this causes sigPositionChanged to be emitted
if pos != lastPos:
# if there was a change, then loop more rapidly for a short time.
interval = minInterval
lastPos = pos
else:
interval = min(maxInterval, interval*2)
time.sleep(interval)
except:
debug.printExc('Error in PatchStar monitor thread:')
time.sleep(maxInterval)
class PatchStarMoveFuture(MoveFuture):
def __init__(self, dev, pos, speed, userSpeed):
MoveFuture.__init__(self, dev, pos, speed)
self._interrupted = False
self._errorMSg = None
self._finished = False
pos = (np.array(pos) / np.array(self.dev.scale)).astype(int)
if speed == 'fast':
speed = 1e-3
elif speed == 'slow':
speed = 1e-6
with self.dev.dev.lock:
self.dev.dev.moveTo(pos, speed / self.dev.scale[0])
# reset to user speed immediately after starting move
# (the move itself will run with the previous speed)
self.dev.dev.setSpeed(userSpeed / self.dev.scale[0])
def wasInterrupted(self):
return self._interrupted
def isDone(self):
return self._getStatus() != 0
def _getStatus(self):
# check status of move unless we already know it is complete.
# 0: still moving; 1: finished successfully; -1: finished unsuccessfully
if self._finished:
if self._interrupted:
return -1
else:
return 1
if self.dev.dev.isMoving():
# Still moving
return 0
# did we reach target?
pos = self.dev._getPosition()
if ((np.array(pos) - np.array(self.targetPos))**2).sum()**0.5 < 1e-6:
# reached target
self._finished = True
return 1
else:
# missed
self._finished = True
self._interrupted = True
self._errorMsg = "Move did not complete."
return -1
def _stopped(self):
# Called when the manipulator is stopped, possibly interrupting this move.
status = self._getStatus()
if status == 1:
# finished; ignore stop
return
elif status == -1:
self._errorMsg = "Move was interrupted before completion."
elif status == 0:
# not actually stopped! This should not happen.
raise RuntimeError("Interrupted move but manipulator is still running!")
else:
raise Exception("Unknown status: %s" % status)
def errorMessage(self):
return self._errorMsg
class PatchStarGUI(StageInterface):
def __init__(self, dev, win):
StageInterface.__init__(self, dev, win)
# Insert patchstar-specific controls into GUI
self.psGroup = QtGui.QGroupBox('PatchStar Rotary Controller')
self.layout.addWidget(self.psGroup, self.nextRow, 0, 1, 2)
self.nextRow += 1
self.psLayout = QtGui.QGridLayout()
self.psGroup.setLayout(self.psLayout)
self.speedLabel = QtGui.QLabel('Speed')
self.speedSpin = SpinBox(value=self.dev.userSpeed, suffix='m/turn', siPrefix=True, dec=True, limits=[1e-6, 10e-3])
self.revXBtn = QtGui.QPushButton('Reverse X')
self.revYBtn = QtGui.QPushButton('Reverse Y')
self.revZBtn = QtGui.QPushButton('Reverse Z')
self.psLayout.addWidget(self.speedLabel, 0, 0)
self.psLayout.addWidget(self.speedSpin, 0, 1)
self.psLayout.addWidget(self.revXBtn, 1, 1)
self.psLayout.addWidget(self.revYBtn, 2, 1)
self.psLayout.addWidget(self.revZBtn, 3, 1)
self.revXBtn.clicked.connect(lambda: self.dev.dev.send('JDX'))
self.revYBtn.clicked.connect(lambda: self.dev.dev.send('JDY'))
self.revZBtn.clicked.connect(lambda: self.dev.dev.send('JDZ'))
self.speedSpin.valueChanged.connect(lambda v: self.dev.setDefaultSpeed(v))
| true
| true
|
f7188fe85522ebf6c7d8d5ad84760673a34d5f14
| 3,440
|
py
|
Python
|
plusportals/client.py
|
DhruvBisla/PlusPortalsAPI
|
606d145f1a61c474907db5b034af5c887783882a
|
[
"MIT"
] | 4
|
2021-02-16T23:25:08.000Z
|
2022-01-04T01:11:39.000Z
|
plusportals/client.py
|
DhruvBisla/PlusPortalsAPI
|
606d145f1a61c474907db5b034af5c887783882a
|
[
"MIT"
] | 2
|
2021-12-17T15:27:46.000Z
|
2021-12-18T04:21:48.000Z
|
plusportals/client.py
|
DhruvBisla/PlusPortalsAPI
|
606d145f1a61c474907db5b034af5c887783882a
|
[
"MIT"
] | 2
|
2021-08-01T01:39:45.000Z
|
2021-12-17T15:22:51.000Z
|
import os
import json
import requests
from typing import Optional
from . import credentials
from . import info
from . import session
class Client(session.Session):
_SCHOOL_NAME : str = None
_EMAIL : str = None
_ID : int = None
_PASSWORD : str = None
markingPeriods : list = []
hasCachedCredentials : bool = (os.path.isfile(os.path.join((os.path.dirname(__file__)), 'credentials.json')))
def __init__(self, cacheCredentials : Optional[bool] = False, schoolName : Optional[str] = None, email : Optional[str] = None, ID : Optional[int] = None, password : Optional[str] = None):
if cacheCredentials: Client.setCredentials(schoolName, email, ID, password)
else:
Client._SCHOOL_NAME = schoolName
Client._EMAIL = email
Client._ID = ID
Client._PASSWORD = password
if Client.hasCachedCredentials:
Client._SCHOOL_NAME = credentials.getCredential('schoolName')
Client._EMAIL = credentials.getCredential('email')
Client._ID = credentials.getCredential('ID')
Client._PASSWORD = credentials.getCredential('password')
super().__init__(Client._SCHOOL_NAME, Client._EMAIL, Client._PASSWORD)
Client.markingPeriods = self.getMarkingPeriods()
self.hasGetGrades : bool = False
self.grades : list[dict] = []
def reset(self) -> None:
self.session.cookies.clear()
self.getDetails()
@classmethod
def setCredentials(cls, schoolName: str, email: str, ID: int, password: str) -> None:
credentials.setCredentials(schoolName, email, ID, password)
Client.hasCachedCredentials = True
def getGrades(self) -> requests.Response:
None if (Client.markingPeriods is not None) else self.getMarkingPeriods()
specHeaders = {
'__requestverificationtoken': '{}'.format(self.requestVerificationToken),
'cookie': '__cfduid={}; ppschoollink={}; __RequestVerificationToken={}; _pps=-480; ASP.NET_SessionId={}; emailoption={}; UGUID={}; ppusername={}; .ASPXAUTH={}'.format(self.session.cookies.get_dict().get('__cfduid'), Client._SCHOOL_NAME, self.session.cookies.get_dict().get('__RequestVerificationToken'), self.session.cookies.get_dict().get('ASP.NET_SessionId'), self.session.cookies.get_dict().get('emailoption'), self.session.cookies.get_dict().get('UGUID'), self.session.cookies.get_dict().get('ppusername'), self.session.cookies.get_dict().get('.ASPXAUTH'))
}
try:
agrades : list[dict] = []
responses : list[requests.Response.status_code] = []
for i in range(len(Client.markingPeriods)):
response = (self.session.post(info.GRADES(self.markingPeriods[i]), headers=dict(info.BASE_HEADERS, **specHeaders)))
agrades.append(json.loads(response.content.decode('utf-8')))
responses.append(response.status_code)
except:
print("Information provided was incorrect; Login was not successful.")
self.grades = agrades
self.hasGetGrades = True
return responses
def printGrades(self, markingPeriod : int) -> None:
None if (self.hasGetGrades) else self.getGrades()
mgrades = self.grades[markingPeriod-1]
for i in mgrades["Data"]:
print("{}'s grade is {}".format(i.get("CourseName")[:(len(i.get("CourseName")))-12],i.get("Average")))
| 51.343284
| 572
| 0.659593
|
import os
import json
import requests
from typing import Optional
from . import credentials
from . import info
from . import session
class Client(session.Session):
_SCHOOL_NAME : str = None
_EMAIL : str = None
_ID : int = None
_PASSWORD : str = None
markingPeriods : list = []
hasCachedCredentials : bool = (os.path.isfile(os.path.join((os.path.dirname(__file__)), 'credentials.json')))
def __init__(self, cacheCredentials : Optional[bool] = False, schoolName : Optional[str] = None, email : Optional[str] = None, ID : Optional[int] = None, password : Optional[str] = None):
if cacheCredentials: Client.setCredentials(schoolName, email, ID, password)
else:
Client._SCHOOL_NAME = schoolName
Client._EMAIL = email
Client._ID = ID
Client._PASSWORD = password
if Client.hasCachedCredentials:
Client._SCHOOL_NAME = credentials.getCredential('schoolName')
Client._EMAIL = credentials.getCredential('email')
Client._ID = credentials.getCredential('ID')
Client._PASSWORD = credentials.getCredential('password')
super().__init__(Client._SCHOOL_NAME, Client._EMAIL, Client._PASSWORD)
Client.markingPeriods = self.getMarkingPeriods()
self.hasGetGrades : bool = False
self.grades : list[dict] = []
def reset(self) -> None:
self.session.cookies.clear()
self.getDetails()
@classmethod
def setCredentials(cls, schoolName: str, email: str, ID: int, password: str) -> None:
credentials.setCredentials(schoolName, email, ID, password)
Client.hasCachedCredentials = True
def getGrades(self) -> requests.Response:
None if (Client.markingPeriods is not None) else self.getMarkingPeriods()
specHeaders = {
'__requestverificationtoken': '{}'.format(self.requestVerificationToken),
'cookie': '__cfduid={}; ppschoollink={}; __RequestVerificationToken={}; _pps=-480; ASP.NET_SessionId={}; emailoption={}; UGUID={}; ppusername={}; .ASPXAUTH={}'.format(self.session.cookies.get_dict().get('__cfduid'), Client._SCHOOL_NAME, self.session.cookies.get_dict().get('__RequestVerificationToken'), self.session.cookies.get_dict().get('ASP.NET_SessionId'), self.session.cookies.get_dict().get('emailoption'), self.session.cookies.get_dict().get('UGUID'), self.session.cookies.get_dict().get('ppusername'), self.session.cookies.get_dict().get('.ASPXAUTH'))
}
try:
agrades : list[dict] = []
responses : list[requests.Response.status_code] = []
for i in range(len(Client.markingPeriods)):
response = (self.session.post(info.GRADES(self.markingPeriods[i]), headers=dict(info.BASE_HEADERS, **specHeaders)))
agrades.append(json.loads(response.content.decode('utf-8')))
responses.append(response.status_code)
except:
print("Information provided was incorrect; Login was not successful.")
self.grades = agrades
self.hasGetGrades = True
return responses
def printGrades(self, markingPeriod : int) -> None:
None if (self.hasGetGrades) else self.getGrades()
mgrades = self.grades[markingPeriod-1]
for i in mgrades["Data"]:
print("{}'s grade is {}".format(i.get("CourseName")[:(len(i.get("CourseName")))-12],i.get("Average")))
| true
| true
|
f71890129fc28aa45ba1867efbaf6e4c01b12e1b
| 5,309
|
py
|
Python
|
agent/DQN_agent.py
|
JiaXingBinggan/MSRL
|
fcc8b06eb1938a78549868b27f2962cb47b3d866
|
[
"Apache-2.0"
] | null | null | null |
agent/DQN_agent.py
|
JiaXingBinggan/MSRL
|
fcc8b06eb1938a78549868b27f2962cb47b3d866
|
[
"Apache-2.0"
] | null | null | null |
agent/DQN_agent.py
|
JiaXingBinggan/MSRL
|
fcc8b06eb1938a78549868b27f2962cb47b3d866
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import mindspore
from mindspore import context, ops, Tensor, nn
from mindspore.common.parameter import Parameter, ParameterTuple
import copy
context.set_context(mode=context.PYNATIVE_MODE, device_target="CPU")
_update_op = ops.MultitypeFuncGraph("update_op")
@_update_op.register("Tensor", "Tensor")
def _parameter_update(policy_param, target_param):
assign = ops.Assign()
output = assign(target_param, policy_param)
return output
class DQN(nn.Cell):
neuron_nums = 16
def __init__(self, n_features, n_actions):
super(DQN, self).__init__()
self.net = nn.SequentialCell(
nn.Dense(n_features, self.neuron_nums),
nn.ReLU(),
nn.Dense(self.neuron_nums, n_actions),
)
def construct(self, s):
return self.net(s)
class PolicyNetWithLossCell(nn.Cell):
"""DQN policy network with loss cell"""
def __init__(self, backbone, loss_fn):
super(PolicyNetWithLossCell,
self).__init__(auto_prefix=False)
self._backbone = backbone
self._loss_fn = loss_fn
self.gather = ops.GatherD()
def construct(self, x, a0, label):
"""constructor for Loss Cell"""
out = self._backbone(x)
out = self.gather(out, 1, a0)
loss = self._loss_fn(out, label)
return loss
# Deep Q Network off-policy
class DeepQNetwork:
def __init__(
self,
n_actions,
n_features,
learning_rate=0.01,
reward_decay=0.9,
e_greedy=0.9,
replace_target_iter=300,
memory_size=500,
batch_size=3,
e_greedy_increment=None,
):
self.n_actions = n_actions
self.n_features = n_features
self.lr = learning_rate
self.gamma = reward_decay
self.epsilon_max = e_greedy
self.replace_target_iter = replace_target_iter
self.memory_size = memory_size
self.batch_size = batch_size
self.epsilon_increment = e_greedy_increment
self.epsilon = 0 if e_greedy_increment is not None else self.epsilon_max
# total learning step
self.learn_step_counter = 0
# initialize zero memory [s, a, r, s_]
self.memory = np.zeros((self.memory_size, n_features * 2 + 2))
self.eval_net = DQN(self.n_features, self.n_actions)
self.target_net = copy.deepcopy(self.eval_net)
self.policy_param = ParameterTuple(
self.eval_net.get_parameters())
self.target_param = ParameterTuple(
self.target_net.get_parameters())
if not hasattr(self, 'memory_counter'):
self.memory_counter = 0
loss_func = nn.MSELoss()
opt = nn.Adam(self.eval_net.trainable_params(), learning_rate=self.lr)
loss_q_net = PolicyNetWithLossCell(self.eval_net, loss_func)
self.policy_network_train = nn.TrainOneStepCell(loss_q_net, opt)
self.policy_network_train.set_train(mode=True)
self.hyper_map = ops.HyperMap()
self.cost_his = []
def store_transition(self, transition):
index = self.memory_counter % self.memory_size
self.memory[index, :] = transition
self.memory_counter += 1
def reset_epsilon(self, epsilon):
self.epsilon = epsilon
def choose_action(self, observation):
observation = Tensor(observation[np.newaxis, :], mindspore.float32)
if np.random.uniform() < self.epsilon:
self.eval_net.set_train(mode=False)
action_v = self.eval_net(observation)
action = np.argmax(action_v)
else:
action = np.random.randint(0, self.n_actions)
return action
def update_param(self):
assign_result = self.hyper_map(
_update_op,
self.policy_param,
self.target_param
)
return assign_result
def learn(self):
if self.learn_step_counter % self.replace_target_iter == 0:
self.update_param()
if self.memory_counter > self.memory_size:
sample_index = np.random.choice(self.memory_size, size=self.batch_size, replace=False)
else:
sample_index = np.random.choice(self.memory_counter, size=self.batch_size, replace=False)
batch_memory = Tensor(self.memory[sample_index, :], mindspore.float32)
b_s = batch_memory[:, :self.n_features]
b_a = ops.ExpandDims()(batch_memory[:, self.n_features], 1).astype(mindspore.int32)
b_r = ops.ExpandDims()(batch_memory[:, self.n_features + 1], 1)
b_s_ = batch_memory[:, -self.n_features:]
q_next = self.target_net(b_s_).max(axis=1)
q_target = b_r + self.gamma * q_next
loss = self.policy_network_train(b_s, b_a, q_target)
self.cost_his.append(round(float(np.mean(loss.asnumpy())), 3))
# increasing epsilon
self.epsilon = self.epsilon + self.epsilon_increment if self.epsilon < self.epsilon_max else self.epsilon_max
self.learn_step_counter += 1
return loss
def plot_cost(self):
import matplotlib.pyplot as plt
plt.plot(np.arange(len(self.cost_his)), self.cost_his)
plt.ylabel('Cost')
plt.xlabel('training steps')
plt.show()
| 32.175758
| 117
| 0.638727
|
import numpy as np
import mindspore
from mindspore import context, ops, Tensor, nn
from mindspore.common.parameter import Parameter, ParameterTuple
import copy
context.set_context(mode=context.PYNATIVE_MODE, device_target="CPU")
_update_op = ops.MultitypeFuncGraph("update_op")
@_update_op.register("Tensor", "Tensor")
def _parameter_update(policy_param, target_param):
assign = ops.Assign()
output = assign(target_param, policy_param)
return output
class DQN(nn.Cell):
neuron_nums = 16
def __init__(self, n_features, n_actions):
super(DQN, self).__init__()
self.net = nn.SequentialCell(
nn.Dense(n_features, self.neuron_nums),
nn.ReLU(),
nn.Dense(self.neuron_nums, n_actions),
)
def construct(self, s):
return self.net(s)
class PolicyNetWithLossCell(nn.Cell):
def __init__(self, backbone, loss_fn):
super(PolicyNetWithLossCell,
self).__init__(auto_prefix=False)
self._backbone = backbone
self._loss_fn = loss_fn
self.gather = ops.GatherD()
def construct(self, x, a0, label):
out = self._backbone(x)
out = self.gather(out, 1, a0)
loss = self._loss_fn(out, label)
return loss
class DeepQNetwork:
def __init__(
self,
n_actions,
n_features,
learning_rate=0.01,
reward_decay=0.9,
e_greedy=0.9,
replace_target_iter=300,
memory_size=500,
batch_size=3,
e_greedy_increment=None,
):
self.n_actions = n_actions
self.n_features = n_features
self.lr = learning_rate
self.gamma = reward_decay
self.epsilon_max = e_greedy
self.replace_target_iter = replace_target_iter
self.memory_size = memory_size
self.batch_size = batch_size
self.epsilon_increment = e_greedy_increment
self.epsilon = 0 if e_greedy_increment is not None else self.epsilon_max
self.learn_step_counter = 0
self.memory = np.zeros((self.memory_size, n_features * 2 + 2))
self.eval_net = DQN(self.n_features, self.n_actions)
self.target_net = copy.deepcopy(self.eval_net)
self.policy_param = ParameterTuple(
self.eval_net.get_parameters())
self.target_param = ParameterTuple(
self.target_net.get_parameters())
if not hasattr(self, 'memory_counter'):
self.memory_counter = 0
loss_func = nn.MSELoss()
opt = nn.Adam(self.eval_net.trainable_params(), learning_rate=self.lr)
loss_q_net = PolicyNetWithLossCell(self.eval_net, loss_func)
self.policy_network_train = nn.TrainOneStepCell(loss_q_net, opt)
self.policy_network_train.set_train(mode=True)
self.hyper_map = ops.HyperMap()
self.cost_his = []
def store_transition(self, transition):
index = self.memory_counter % self.memory_size
self.memory[index, :] = transition
self.memory_counter += 1
def reset_epsilon(self, epsilon):
self.epsilon = epsilon
def choose_action(self, observation):
observation = Tensor(observation[np.newaxis, :], mindspore.float32)
if np.random.uniform() < self.epsilon:
self.eval_net.set_train(mode=False)
action_v = self.eval_net(observation)
action = np.argmax(action_v)
else:
action = np.random.randint(0, self.n_actions)
return action
def update_param(self):
assign_result = self.hyper_map(
_update_op,
self.policy_param,
self.target_param
)
return assign_result
def learn(self):
if self.learn_step_counter % self.replace_target_iter == 0:
self.update_param()
if self.memory_counter > self.memory_size:
sample_index = np.random.choice(self.memory_size, size=self.batch_size, replace=False)
else:
sample_index = np.random.choice(self.memory_counter, size=self.batch_size, replace=False)
batch_memory = Tensor(self.memory[sample_index, :], mindspore.float32)
b_s = batch_memory[:, :self.n_features]
b_a = ops.ExpandDims()(batch_memory[:, self.n_features], 1).astype(mindspore.int32)
b_r = ops.ExpandDims()(batch_memory[:, self.n_features + 1], 1)
b_s_ = batch_memory[:, -self.n_features:]
q_next = self.target_net(b_s_).max(axis=1)
q_target = b_r + self.gamma * q_next
loss = self.policy_network_train(b_s, b_a, q_target)
self.cost_his.append(round(float(np.mean(loss.asnumpy())), 3))
self.epsilon = self.epsilon + self.epsilon_increment if self.epsilon < self.epsilon_max else self.epsilon_max
self.learn_step_counter += 1
return loss
def plot_cost(self):
import matplotlib.pyplot as plt
plt.plot(np.arange(len(self.cost_his)), self.cost_his)
plt.ylabel('Cost')
plt.xlabel('training steps')
plt.show()
| true
| true
|
f718913f5c1602a006b860d8a1473f9c5e8ad63a
| 1,114
|
py
|
Python
|
base/base_train.py
|
AndersDHenriksen/Tensorflow-Project-Template
|
32dfeaaf1243587af4ceb7b378c135092ddb9258
|
[
"Apache-2.0"
] | null | null | null |
base/base_train.py
|
AndersDHenriksen/Tensorflow-Project-Template
|
32dfeaaf1243587af4ceb7b378c135092ddb9258
|
[
"Apache-2.0"
] | null | null | null |
base/base_train.py
|
AndersDHenriksen/Tensorflow-Project-Template
|
32dfeaaf1243587af4ceb7b378c135092ddb9258
|
[
"Apache-2.0"
] | 1
|
2018-07-09T03:01:18.000Z
|
2018-07-09T03:01:18.000Z
|
import tensorflow as tf
class BaseTrain:
def __init__(self, sess, model, data, config, logger):
self.model = model
self.logger = logger
self.config = config
self.sess = sess
self.data = data
self.init = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
if not self.model.is_loaded:
self.sess.run(self.init)
def train(self):
for cur_epoch in range(self.model.cur_epoch_tensor.eval(self.sess), self.config.num_epochs + 1, 1):
self.train_epoch()
self.sess.run(self.model.increment_cur_epoch_tensor)
def train_epoch(self):
"""
implement the logic of epoch:
-loop over the number of iterations in the config and call the train step
-add any summaries you want using the summary
"""
raise NotImplementedError
def train_step(self):
"""
implement the logic of the train step
- run the tensorflow session
- return any metrics you need to summarize
"""
raise NotImplementedError
| 31.828571
| 107
| 0.630162
|
import tensorflow as tf
class BaseTrain:
def __init__(self, sess, model, data, config, logger):
self.model = model
self.logger = logger
self.config = config
self.sess = sess
self.data = data
self.init = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
if not self.model.is_loaded:
self.sess.run(self.init)
def train(self):
for cur_epoch in range(self.model.cur_epoch_tensor.eval(self.sess), self.config.num_epochs + 1, 1):
self.train_epoch()
self.sess.run(self.model.increment_cur_epoch_tensor)
def train_epoch(self):
raise NotImplementedError
def train_step(self):
raise NotImplementedError
| true
| true
|
f71891d9166196c7b6043b3a9276fa1418b86155
| 10,866
|
py
|
Python
|
modules/nmt.py
|
tjuwlz/MachineTranslation
|
6e6fc757060ccd076e0ab4313562b1c34892fc60
|
[
"Apache-2.0"
] | 1
|
2019-09-26T08:23:20.000Z
|
2019-09-26T08:23:20.000Z
|
modules/nmt.py
|
LindgeW/MachineTranslation
|
7335c7e95d2ca23ca7e26c45d4b8b13e2ce96704
|
[
"Apache-2.0"
] | null | null | null |
modules/nmt.py
|
LindgeW/MachineTranslation
|
7335c7e95d2ca23ca7e26c45d4b8b13e2ce96704
|
[
"Apache-2.0"
] | null | null | null |
from datautil.dataloader import batch_iter
import torch.nn.functional as F
import torch.optim as optim
import torch.nn.utils as nn_utils
import time
import torch
import numpy as np
from config.Const import *
class NMT(object):
def __init__(self, encoder, decoder):
super(NMT, self).__init__()
self.encoder = encoder
self.decoder = decoder
def summary(self):
print('encoder:', self.encoder)
print('decoder:', self.decoder)
# 训练一轮
def train(self, train_pairs, enc_optimizer, dec_optimizer, args, src_vocab, tgt_vocab):
train_loss = 0
for src_batch, tgt_batch in batch_iter(train_pairs, args, src_vocab, tgt_vocab):
loss = 0
# enc_out: (batch_size, seq_len, hidden_size * nb_directions)
# enc_hidden: (num_layers * nb_directions, batch_size, hidden_size)
enc_out, enc_hidden = self.encoder(src_batch.src_idxs, mask=src_batch.non_pad_mask)
self.encoder.zero_grad()
self.decoder.zero_grad()
dec_hidden = enc_hidden
dec_input = tgt_batch.src_idxs[0].unsqueeze(1)
if np.random.uniform(0, 1) <= args.teacher_force:
# print('以目标作为下一个输入')
for i in range(1, tgt_batch.src_idxs.size(0)):
dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
loss += self.calc_loss(dec_out, tgt_batch.src_idxs[i])
train_loss += loss.data.item()
dec_input = tgt_batch.src_idxs[i].unsqueeze(1)
else:
# print('以网络的预测输出作为下一个输入')
for i in range(1, tgt_batch.src_idxs.size(0)):
dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
loss += self.calc_loss(dec_out, tgt_batch.src_idxs[i])
train_loss += loss.data.item()
_, top_i = dec_out.data.topk(1)
dec_input = top_i # (batch_size, 1)
loss.backward()
nn_utils.clip_grad_norm_(filter(lambda p: p.requires_grad, self.encoder.parameters()), max_norm=5.0)
nn_utils.clip_grad_norm_(filter(lambda p: p.requires_grad, self.decoder.parameters()), max_norm=5.0)
enc_optimizer.step()
dec_optimizer.step()
return train_loss / len(train_pairs)
# 训练多轮
def train_iter(self, train_pairs, args, src_vocab, tgt_vocab):
self.encoder.train()
self.decoder.train()
enc_optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.encoder.parameters()), lr=args.lr)
dec_optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.decoder.parameters()), lr=args.lr)
enc_lr_scheduler = optim.lr_scheduler.LambdaLR(enc_optimizer, lambda ep: max(0.95**ep, 1e-4))
dec_lr_scheduler = optim.lr_scheduler.LambdaLR(dec_optimizer, lambda ep: max(0.95**ep, 1e-4))
# enc_lr_scheduler = optim.lr_scheduler.LambdaLR(enc_optimizer, lambda ep: max(1 - 0.75 * ep / args.epoch, 1e-4))
# dec_lr_scheduler = optim.lr_scheduler.LambdaLR(dec_optimizer, lambda ep: max(1 - 0.75 * ep / args.epoch, 1e-4))
for i in range(args.epoch):
enc_lr_scheduler.step()
dec_lr_scheduler.step()
t1 = time.time()
train_loss = self.train(train_pairs, enc_optimizer, dec_optimizer, args, src_vocab, tgt_vocab)
t2 = time.time()
print('[Epoch %d] train loss: %.3f' % (i+1, train_loss))
print('encoder lr:', enc_lr_scheduler.get_lr())
print('decoder lr:', dec_lr_scheduler.get_lr())
print('time cost: %.2fs' % (t2 - t1))
def calc_loss(self, pred, tgt):
return F.nll_loss(pred, tgt, ignore_index=0)
# def evaluate(self, test_pairs, args, src_vocab, tgt_vocab):
# self.encoder.eval()
# self.decoder.eval()
# pred_wds, tgt_wds = [], []
# for src_batch, tgt_batch in batch_iter(test_pairs, args, src_vocab, tgt_vocab):
# batch_pred_wds, batch_tgt_wds = [], []
# enc_out, enc_hidden = self.encoder(src_batch.src_idxs, mask=src_batch.non_pad_mask)
#
# dec_hidden = enc_hidden
# dec_input = tgt_batch.src_idxs[0]
# for i in range(1, tgt_batch.src_idxs.size(0)):
# dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
#
# dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
# tgt_idxs = tgt_batch.src_idxs[i]
# # greedy search
# pred_idxs = dec_out.data.argmax(dim=1)
# batch_pred_wds.append(tgt_vocab.index2word(pred_idxs.tolist()))
# batch_tgt_wds.append(tgt_vocab.index2word(tgt_idxs.tolist()))
# dec_input = pred_idxs
#
# pred_wds.extend(self.extract_valid(np.asarray(batch_pred_wds).T.tolist()))
# tgt_wds.extend(self.extract_valid(np.asarray(batch_tgt_wds).T.tolist()))
#
# print('BLEU:', self.corpus_bleu(pred_wds, tgt_wds))
# beam search
'''
执行过程:设beam size = 3
1、选择t1时刻输出的概率分数最大的3个词
2、分别将t-1时刻选择的3个词作为当前时刻的输入
3、求t时刻累积的(序列)概率分数(历史所选择词的对数似然和),选择分数值最大的3个词
4、重复2-3过程,直到到达最大长度(或遇到<eos>)
'''
def evaluate(self, test_pairs, args, src_vocab, tgt_vocab):
self.encoder.eval()
self.decoder.eval()
# pred_wds, tgt_wds = [], []
for src_batch, tgt_batch in batch_iter(test_pairs, args, src_vocab, tgt_vocab):
# batch_pred_wds, batch_tgt_wds = [], []
enc_out, enc_hidden = self.encoder(src_batch.src_idxs, mask=src_batch.non_pad_mask)
# 保存历史分数
seq_len, batch_size = tgt_batch.src_idxs.size()
# (bz, beam_size)
hist_score = torch.zeros((batch_size, args.beam_size), device=args.device)
# (beam_size, bz, vocab_size)
beam_score = torch.zeros((args.beam_size, batch_size, tgt_vocab.vocab_size), device=args.device)
# (bz, beam_size, max_len)
best_paths = torch.zeros((MAX_LEN, batch_size, args.beam_size), device=args.device)
dec_hidden = enc_hidden
dec_input = tgt_batch.src_idxs[0].unsqueeze(1)
for i in range(1, min(MAX_LEN, seq_len)):
if i == 1:
# dec_input: (bz, 1)
# dec_out: (bz, vocab_size)
dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
# (bz, beam_size)
top_prob, top_idxs = dec_out.data.topk(args.beam_size, dim=1)
hist_score = top_prob
best_paths[i] = top_idxs
# (bz, beam_size)
dec_input = top_idxs
else:
# dec_input: (bz, beam_size) -> (beam_size, bz)
dec_input = dec_input.transpose(0, 1)
for j in range(args.beam_size):
# dec_out: (bz, vocab_size)
dec_out, dec_hidden = self.decoder(dec_input[j].unsqueeze(1), dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
beam_score[j] = dec_out
# (bz, beam_size, 1) -> (bz, beam_size, vocab_size)
hist_score = hist_score.unsqueeze(-1).expand((-1, -1, tgt_vocab.vocab_size))
hist_score += beam_score.transpose(0, 1) # (bz, beam_size, vocab_size)
# (bz, beam_size * vocab_size)
hist_score = hist_score.reshape((batch_size, -1))
# (bz, beam_size)
top_prob, top_idxs = hist_score.topk(args.beam_size, dim=1)
hist_score = top_prob
top_idxs %= tgt_vocab.vocab_size
best_paths[i] = top_idxs
dec_input = top_idxs
# pred_wds.extend(self.extract_valid(np.asarray(batch_pred_wds).T.tolist()))
# tgt_wds.extend(self.extract_valid(np.asarray(batch_tgt_wds).T.tolist()))
# 提取序列的非填充部分
def extract_valid(self, seqs: list):
return list(map(lambda x: x[:x.index(EOS)] if EOS in x else x, seqs))
# 统计ngram数目
def count_ngram(self, cand: list, ref: list, n=1) -> int:
assert len(cand) != 0 and len(ref) != 0
total_count = 0
for i in range(len(cand) - n + 1):
cand_count, ref_count = 1, 0
ngram = cand[i: i + n]
# 统计ngram在机器翻译译文中出现的次数
for j in range(i + n, len(cand) - n + 1):
if ngram == cand[j: j + n]:
cand_count += 1
# 统计ngram在人工译文中出现的次数
for k in range(len(ref) - n + 1):
if ngram == ref[k: k + n]:
ref_count += 1
total_count += min(cand_count, ref_count)
return total_count
# 计算单句话的BLEU值,取值在[0, 1]之间,越大越好
def sentence_bleu(self, cand: list, ref: list, N=4) -> float:
'''
:param cand: sentence_tokens
:param ref: sentence_tokens
:return:
'''
assert len(cand) != 0 and len(ref) != 0
# n-gram中n的取值在[1, 4]之间
res = 0
cand_len, ref_len = len(cand), len(ref)
for n in range(1, N+1):
cand_gram = max(0, cand_len - n + 1)
res += 0.25 * np.log(self.count_ngram(cand, ref, n) / cand_gram)
# 短译句惩罚因子
# bp = np.exp(1 - max(1., len(ref) / len(cand)))
return np.exp(res + min(0., 1 - ref_len / cand_len))
# 计算多句话的BLEU值(注:不是直接对sentence bleu求和求平均)
def corpus_bleu(self, cands: list, refs: list, N=4) -> float:
'''
:param cands: [sentence_tokens1, sentence_tokens2]
:param refs: [sentence_tokens1, sentence_tokens2]
:return:
'''
assert len(cands) != 0 and len(cands) == len(refs)
ref_len, cand_len = 0, 0
for cand, ref in zip(cands, refs):
ref_len += len(ref)
cand_len += len(cand)
res = 0
for n in range(1, N+1):
n_match, n_grams = 0, 0
for cand, ref in zip(cands, refs):
n_match += self.count_ngram(cand, ref, n)
n_grams += max(0, len(cand) - n + 1)
res += 0.25 * np.log(n_match / n_grams + 1e-8)
return np.exp(res + min(0., 1 - ref_len / cand_len))
| 44.716049
| 121
| 0.571323
|
from datautil.dataloader import batch_iter
import torch.nn.functional as F
import torch.optim as optim
import torch.nn.utils as nn_utils
import time
import torch
import numpy as np
from config.Const import *
class NMT(object):
def __init__(self, encoder, decoder):
super(NMT, self).__init__()
self.encoder = encoder
self.decoder = decoder
def summary(self):
print('encoder:', self.encoder)
print('decoder:', self.decoder)
def train(self, train_pairs, enc_optimizer, dec_optimizer, args, src_vocab, tgt_vocab):
train_loss = 0
for src_batch, tgt_batch in batch_iter(train_pairs, args, src_vocab, tgt_vocab):
loss = 0
enc_out, enc_hidden = self.encoder(src_batch.src_idxs, mask=src_batch.non_pad_mask)
self.encoder.zero_grad()
self.decoder.zero_grad()
dec_hidden = enc_hidden
dec_input = tgt_batch.src_idxs[0].unsqueeze(1)
if np.random.uniform(0, 1) <= args.teacher_force:
for i in range(1, tgt_batch.src_idxs.size(0)):
dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
loss += self.calc_loss(dec_out, tgt_batch.src_idxs[i])
train_loss += loss.data.item()
dec_input = tgt_batch.src_idxs[i].unsqueeze(1)
else:
for i in range(1, tgt_batch.src_idxs.size(0)):
dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
loss += self.calc_loss(dec_out, tgt_batch.src_idxs[i])
train_loss += loss.data.item()
_, top_i = dec_out.data.topk(1)
dec_input = top_i
loss.backward()
nn_utils.clip_grad_norm_(filter(lambda p: p.requires_grad, self.encoder.parameters()), max_norm=5.0)
nn_utils.clip_grad_norm_(filter(lambda p: p.requires_grad, self.decoder.parameters()), max_norm=5.0)
enc_optimizer.step()
dec_optimizer.step()
return train_loss / len(train_pairs)
def train_iter(self, train_pairs, args, src_vocab, tgt_vocab):
self.encoder.train()
self.decoder.train()
enc_optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.encoder.parameters()), lr=args.lr)
dec_optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.decoder.parameters()), lr=args.lr)
enc_lr_scheduler = optim.lr_scheduler.LambdaLR(enc_optimizer, lambda ep: max(0.95**ep, 1e-4))
dec_lr_scheduler = optim.lr_scheduler.LambdaLR(dec_optimizer, lambda ep: max(0.95**ep, 1e-4))
for i in range(args.epoch):
enc_lr_scheduler.step()
dec_lr_scheduler.step()
t1 = time.time()
train_loss = self.train(train_pairs, enc_optimizer, dec_optimizer, args, src_vocab, tgt_vocab)
t2 = time.time()
print('[Epoch %d] train loss: %.3f' % (i+1, train_loss))
print('encoder lr:', enc_lr_scheduler.get_lr())
print('decoder lr:', dec_lr_scheduler.get_lr())
print('time cost: %.2fs' % (t2 - t1))
def calc_loss(self, pred, tgt):
return F.nll_loss(pred, tgt, ignore_index=0)
def evaluate(self, test_pairs, args, src_vocab, tgt_vocab):
self.encoder.eval()
self.decoder.eval()
for src_batch, tgt_batch in batch_iter(test_pairs, args, src_vocab, tgt_vocab):
enc_out, enc_hidden = self.encoder(src_batch.src_idxs, mask=src_batch.non_pad_mask)
seq_len, batch_size = tgt_batch.src_idxs.size()
hist_score = torch.zeros((batch_size, args.beam_size), device=args.device)
beam_score = torch.zeros((args.beam_size, batch_size, tgt_vocab.vocab_size), device=args.device)
best_paths = torch.zeros((MAX_LEN, batch_size, args.beam_size), device=args.device)
dec_hidden = enc_hidden
dec_input = tgt_batch.src_idxs[0].unsqueeze(1)
for i in range(1, min(MAX_LEN, seq_len)):
if i == 1:
dec_out, dec_hidden = self.decoder(dec_input, dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
top_prob, top_idxs = dec_out.data.topk(args.beam_size, dim=1)
hist_score = top_prob
best_paths[i] = top_idxs
dec_input = top_idxs
else:
dec_input = dec_input.transpose(0, 1)
for j in range(args.beam_size):
dec_out, dec_hidden = self.decoder(dec_input[j].unsqueeze(1), dec_hidden, enc_out)
dec_hidden *= tgt_batch.non_pad_mask[i].unsqueeze(1).repeat(1, dec_hidden.size(-1))
beam_score[j] = dec_out
hist_score = hist_score.unsqueeze(-1).expand((-1, -1, tgt_vocab.vocab_size))
hist_score += beam_score.transpose(0, 1)
hist_score = hist_score.reshape((batch_size, -1))
top_prob, top_idxs = hist_score.topk(args.beam_size, dim=1)
hist_score = top_prob
top_idxs %= tgt_vocab.vocab_size
best_paths[i] = top_idxs
dec_input = top_idxs
def extract_valid(self, seqs: list):
return list(map(lambda x: x[:x.index(EOS)] if EOS in x else x, seqs))
def count_ngram(self, cand: list, ref: list, n=1) -> int:
assert len(cand) != 0 and len(ref) != 0
total_count = 0
for i in range(len(cand) - n + 1):
cand_count, ref_count = 1, 0
ngram = cand[i: i + n]
for j in range(i + n, len(cand) - n + 1):
if ngram == cand[j: j + n]:
cand_count += 1
for k in range(len(ref) - n + 1):
if ngram == ref[k: k + n]:
ref_count += 1
total_count += min(cand_count, ref_count)
return total_count
def sentence_bleu(self, cand: list, ref: list, N=4) -> float:
assert len(cand) != 0 and len(ref) != 0
res = 0
cand_len, ref_len = len(cand), len(ref)
for n in range(1, N+1):
cand_gram = max(0, cand_len - n + 1)
res += 0.25 * np.log(self.count_ngram(cand, ref, n) / cand_gram)
return np.exp(res + min(0., 1 - ref_len / cand_len))
def corpus_bleu(self, cands: list, refs: list, N=4) -> float:
assert len(cands) != 0 and len(cands) == len(refs)
ref_len, cand_len = 0, 0
for cand, ref in zip(cands, refs):
ref_len += len(ref)
cand_len += len(cand)
res = 0
for n in range(1, N+1):
n_match, n_grams = 0, 0
for cand, ref in zip(cands, refs):
n_match += self.count_ngram(cand, ref, n)
n_grams += max(0, len(cand) - n + 1)
res += 0.25 * np.log(n_match / n_grams + 1e-8)
return np.exp(res + min(0., 1 - ref_len / cand_len))
| true
| true
|
f71892180c36d626c06032d591d63adab692cc84
| 8,517
|
py
|
Python
|
indigo-web/cdmi/models.py
|
pericles-project/ERMR
|
99e19c476c813632d0508cdef65b4683e36f8e43
|
[
"Apache-2.0"
] | null | null | null |
indigo-web/cdmi/models.py
|
pericles-project/ERMR
|
99e19c476c813632d0508cdef65b4683e36f8e43
|
[
"Apache-2.0"
] | null | null | null |
indigo-web/cdmi/models.py
|
pericles-project/ERMR
|
99e19c476c813632d0508cdef65b4683e36f8e43
|
[
"Apache-2.0"
] | null | null | null |
""""CDMI Models
Copyright 2015 Archive Analytics Solutions - University of Liverpool
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import mimetypes
from collections import OrderedDict
from indigo.models.collection import Collection
class CDMIContainer(object):
"""Wrapper to return CDMI fields fro an Indigo Collection"""
def __init__(self, indigo_container, api_root):
self.collection = indigo_container
self.api_root = api_root
def get_capabilitiesURI(self):
"""Mandatory URI to the capabilities for the object"""
return (u'{0}/cdmi_capabilities/container{1}'
''.format(self.api_root, self.collection.path)
)
def get_children(self, range=None):
"""Mandatory - Names of the children objects in the container object."""
child_c , child_r = self.collection.get_child()
child_c = [ u"{}/".format(c) for c in child_c ]
res = child_c + child_r
if range:
start, stop = ( int(el) for el in range.split("-", 1))
# map CDMI range value to python index
stop += 1
else:
start = 0
stop = len(res)
return res[start:stop]
def get_childrenrange(self):
"""Mandatory - The children of the container expressed as a range"""
child_container , child_dataobject = self.collection.get_child()
nb_child = len(child_container) + len(child_dataobject)
if nb_child != 0:
return "{}-{}".format(0, nb_child-1)
else:
return "0-0"
def get_completionStatus(self):
"""Mandatory - A string indicating if the object is still in the
process of being created or updated by another operation,"""
val = self.collection.get_metadata_key("cdmi_completionStatus")
if not val:
val = "Complete"
return val
def get_domainURI(self):
"""Mandatory URI of the owning domain"""
return ('{0}/cdmi_domains/indigo/'.format(self.api_root))
def get_metadata(self):
md = self.collection.get_cdmi_metadata()
md.update(self.collection.get_acl_metadata())
return md
def get_objectID(self):
"""Mandatory object ID of the object"""
return self.collection.uuid
def get_objectName(self):
"""Conditional name of the object
We don't support objects only accessible by ID so this is mandatory"""
return self.collection.name
def get_objectType(self):
"""Mandatory Object type"""
return "application/cdmi-container"
def get_parentID(self):
"""Conditional Object ID of the parent container object
We don't support objects only accessible by ID so this is mandatory"""
parent_path = self.collection.container
if self.collection.is_root:
parent_path = u"/"
parent = Collection.find(parent_path)
return parent.uuid
def get_parentURI(self):
"""Conditional URI for the parent object
We don't support objects only accessible by ID so this is mandatory"""
# A container in CDMI has a '/' at the end but we don't (except for the
# root)
parent_path = self.collection.container
if parent_path != '/' and parent_path != "null":
parent_path = u"{}/".format(parent_path)
return u"{}".format(parent_path)
def get_path(self):
return self.collection.path
def get_percentComplete(self):
"""Optional - Indicate the percentage of completion as a numeric
integer value from 0 through 100. 100 if the completionStatus is
'Complete'"""
val = self.collection.get_metadata_key("cdmi_percentComplete")
if not val:
val = "100"
return val
class CDMIResource(object):
"""Wrapper to return CDMI fields fro an Indigo Resource"""
def __init__(self, indigo_resource, api_root):
self.resource = indigo_resource
self.api_root = api_root
def chunk_content(self):
return self.resource.chunk_content()
def get_capabilitiesURI(self):
"""Mandatory URI to the capabilities for the object"""
return (u'{0}/cdmi_capabilities/dataobject{1}'
''.format(self.api_root, self.resource.path)
)
def get_completionStatus(self):
"""Mandatory - A string indicating if the object is still in the
process of being created or updated by another operation,"""
val = self.resource.get_metadata_key("cdmi_completionStatus")
if not val:
val = "Complete"
return val
def get_domainURI(self):
"""Mandatory URI of the owning domain"""
return ('{0}/cdmi_domains/indigo/'.format(self.api_root))
def get_length(self):
return self.resource.size
def get_metadata(self):
md = self.resource.get_cdmi_metadata()
md.update(self.resource.get_acl_metadata())
return md
def get_mimetype(self):
if self.resource.get_mimetype():
return self.resource.get_mimetype()
# Give best guess at mimetype
mimetype = mimetypes.guess_type(self.resource.name)
if mimetype[0]:
return mimetype[0]
else:
# Interpret as binary data
return 'application/octet-stream'
def get_objectID(self):
"""Mandatory object ID of the object"""
return self.resource.uuid
def get_objectName(self):
"""Conditional name of the object
We don't support objects only accessible by ID so this is mandatory"""
return self.resource.get_name()
def get_objectType(self):
"""Mandatory Object type"""
return "application/cdmi-object"
def get_parentID(self):
"""Conditional Object ID of the parent container object
We don't support objects only accessible by ID so this is mandatory"""
parent = Collection.find(self.resource.container)
return parent.uuid
def get_parentURI(self):
"""Conditional URI for the parent object
We don't support objects only accessible by ID so this is mandatory"""
# A container in CDMI has a '/' at the end but we don't (except for the
# root)
parent_path = self.resource.container
if parent_path != '/':
parent_path = u"{}/".format(parent_path)
return u"{}".format(parent_path)
def get_path(self):
return self.resource.path
def get_percentComplete(self):
"""Optional - Indicate the percentage of completion as a numeric
integer value from 0 through 100. 100 if the completionStatus is
'Complete'"""
val = self.resource.get_metadata_key("cdmi_percentComplete")
if not val:
val = "100"
return val
def get_reference(self):
return self.resource.url
def get_url(self):
return self.resource.url
def get_value(self, range=None):
driver = get_driver(self.resource.url)
# TODO: Improve that for large files. Check what CDMI recommends
# for stream access
data = []
for chk in driver.chunk_content():
data.append(chk)
res = ''.join([s for s in data])
if range:
start, stop = (int(el) for el in range.split("-", 1))
# map CDMI range value to python index
stop += 1
else:
start = 0
stop = len(res)
return res[start:stop]
def get_valuerange(self):
"""Mandatory - The range of bytes of the data object to be returned in
the value field"""
return "0-{}".format(self.resource.size-1)
def get_valuetransferencoding(self):
"""Mandatory - The value transfer encoding used for the data object
value"""
return "utf-8"
def is_reference(self):
"""Check if the resource is a reference"""
return self.resource.is_reference
| 34.481781
| 80
| 0.633909
|
import mimetypes
from collections import OrderedDict
from indigo.models.collection import Collection
class CDMIContainer(object):
def __init__(self, indigo_container, api_root):
self.collection = indigo_container
self.api_root = api_root
def get_capabilitiesURI(self):
return (u'{0}/cdmi_capabilities/container{1}'
''.format(self.api_root, self.collection.path)
)
def get_children(self, range=None):
child_c , child_r = self.collection.get_child()
child_c = [ u"{}/".format(c) for c in child_c ]
res = child_c + child_r
if range:
start, stop = ( int(el) for el in range.split("-", 1))
stop += 1
else:
start = 0
stop = len(res)
return res[start:stop]
def get_childrenrange(self):
child_container , child_dataobject = self.collection.get_child()
nb_child = len(child_container) + len(child_dataobject)
if nb_child != 0:
return "{}-{}".format(0, nb_child-1)
else:
return "0-0"
def get_completionStatus(self):
val = self.collection.get_metadata_key("cdmi_completionStatus")
if not val:
val = "Complete"
return val
def get_domainURI(self):
return ('{0}/cdmi_domains/indigo/'.format(self.api_root))
def get_metadata(self):
md = self.collection.get_cdmi_metadata()
md.update(self.collection.get_acl_metadata())
return md
def get_objectID(self):
return self.collection.uuid
def get_objectName(self):
return self.collection.name
def get_objectType(self):
return "application/cdmi-container"
def get_parentID(self):
parent_path = self.collection.container
if self.collection.is_root:
parent_path = u"/"
parent = Collection.find(parent_path)
return parent.uuid
def get_parentURI(self):
# root)
parent_path = self.collection.container
if parent_path != '/' and parent_path != "null":
parent_path = u"{}/".format(parent_path)
return u"{}".format(parent_path)
def get_path(self):
return self.collection.path
def get_percentComplete(self):
val = self.collection.get_metadata_key("cdmi_percentComplete")
if not val:
val = "100"
return val
class CDMIResource(object):
def __init__(self, indigo_resource, api_root):
self.resource = indigo_resource
self.api_root = api_root
def chunk_content(self):
return self.resource.chunk_content()
def get_capabilitiesURI(self):
return (u'{0}/cdmi_capabilities/dataobject{1}'
''.format(self.api_root, self.resource.path)
)
def get_completionStatus(self):
val = self.resource.get_metadata_key("cdmi_completionStatus")
if not val:
val = "Complete"
return val
def get_domainURI(self):
return ('{0}/cdmi_domains/indigo/'.format(self.api_root))
def get_length(self):
return self.resource.size
def get_metadata(self):
md = self.resource.get_cdmi_metadata()
md.update(self.resource.get_acl_metadata())
return md
def get_mimetype(self):
if self.resource.get_mimetype():
return self.resource.get_mimetype()
# Give best guess at mimetype
mimetype = mimetypes.guess_type(self.resource.name)
if mimetype[0]:
return mimetype[0]
else:
# Interpret as binary data
return 'application/octet-stream'
def get_objectID(self):
return self.resource.uuid
def get_objectName(self):
return self.resource.get_name()
def get_objectType(self):
return "application/cdmi-object"
def get_parentID(self):
parent = Collection.find(self.resource.container)
return parent.uuid
def get_parentURI(self):
# A container in CDMI has a '/' at the end but we don't (except for the
parent_path = self.resource.container
if parent_path != '/':
parent_path = u"{}/".format(parent_path)
return u"{}".format(parent_path)
def get_path(self):
return self.resource.path
def get_percentComplete(self):
val = self.resource.get_metadata_key("cdmi_percentComplete")
if not val:
val = "100"
return val
def get_reference(self):
return self.resource.url
def get_url(self):
return self.resource.url
def get_value(self, range=None):
driver = get_driver(self.resource.url)
data = []
for chk in driver.chunk_content():
data.append(chk)
res = ''.join([s for s in data])
if range:
start, stop = (int(el) for el in range.split("-", 1))
stop += 1
else:
start = 0
stop = len(res)
return res[start:stop]
def get_valuerange(self):
return "0-{}".format(self.resource.size-1)
def get_valuetransferencoding(self):
return "utf-8"
def is_reference(self):
return self.resource.is_reference
| true
| true
|
f7189242314d212cfcbc3ba03b7ee8ad651c0080
| 6,401
|
py
|
Python
|
tests/test_slot.py
|
glichtner/fhir.resources
|
94896d8f8a0b7dd69253762aab968f4fd6eb69a0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_slot.py
|
glichtner/fhir.resources
|
94896d8f8a0b7dd69253762aab968f4fd6eb69a0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_slot.py
|
glichtner/fhir.resources
|
94896d8f8a0b7dd69253762aab968f4fd6eb69a0
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/Slot
Release: R5
Version: 4.5.0
Build ID: 0d95498
Last updated: 2021-04-03T00:34:11.075+00:00
"""
from pydantic.validators import bytes_validator # noqa: F401
from fhir.resources import fhirtypes # noqa: F401
from fhir.resources import slot
def impl_slot_1(inst):
assert inst.appointmentType.coding[0].code == "WALKIN"
assert (
inst.appointmentType.coding[0].display
== "A previously unscheduled walk-in visit"
)
assert (
inst.appointmentType.coding[0].system
== "http://terminology.hl7.org/CodeSystem/v2-0276"
)
assert inst.comment == (
"Assessments should be performed before requesting "
"appointments in this slot."
)
assert inst.end == fhirtypes.Instant.validate("2013-12-25T09:30:00Z")
assert inst.id == "example"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.schedule.reference == "Schedule/example"
assert inst.serviceCategory[0].coding[0].code == "17"
assert inst.serviceCategory[0].coding[0].display == "General Practice"
assert inst.serviceType[0].coding[0].code == "57"
assert inst.serviceType[0].coding[0].display == "Immunization"
assert inst.specialty[0].coding[0].code == "408480009"
assert inst.specialty[0].coding[0].display == "Clinical immunology"
assert inst.start == fhirtypes.Instant.validate("2013-12-25T09:15:00Z")
assert inst.status == "free"
assert inst.text.status == "generated"
def test_slot_1(base_settings):
"""No. 1 tests collection for Slot.
Test File: slot-example.json
"""
filename = base_settings["unittest_data_dir"] / "slot-example.json"
inst = slot.Slot.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Slot" == inst.resource_type
impl_slot_1(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Slot" == data["resourceType"]
inst2 = slot.Slot(**data)
impl_slot_1(inst2)
def impl_slot_2(inst):
assert inst.comment == "Dr Careful is out of the office"
assert inst.end == fhirtypes.Instant.validate("2013-12-25T10:00:00Z")
assert inst.id == "2"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.schedule.reference == "Schedule/example"
assert inst.serviceCategory[0].coding[0].code == "17"
assert inst.serviceCategory[0].coding[0].display == "General Practice"
assert inst.start == fhirtypes.Instant.validate("2013-12-25T09:45:00Z")
assert inst.status == "busy-tentative"
assert inst.text.status == "generated"
def test_slot_2(base_settings):
"""No. 2 tests collection for Slot.
Test File: slot-example-tentative.json
"""
filename = base_settings["unittest_data_dir"] / "slot-example-tentative.json"
inst = slot.Slot.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Slot" == inst.resource_type
impl_slot_2(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Slot" == data["resourceType"]
inst2 = slot.Slot(**data)
impl_slot_2(inst2)
def impl_slot_3(inst):
assert inst.comment == "Dr Careful is out of the office"
assert inst.end == fhirtypes.Instant.validate("2013-12-25T09:45:00Z")
assert inst.id == "3"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.schedule.reference == "Schedule/example"
assert inst.serviceCategory[0].coding[0].code == "17"
assert inst.serviceCategory[0].coding[0].display == "General Practice"
assert inst.start == fhirtypes.Instant.validate("2013-12-25T09:30:00Z")
assert inst.status == "busy-unavailable"
assert inst.text.status == "generated"
def test_slot_3(base_settings):
"""No. 3 tests collection for Slot.
Test File: slot-example-unavailable.json
"""
filename = base_settings["unittest_data_dir"] / "slot-example-unavailable.json"
inst = slot.Slot.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Slot" == inst.resource_type
impl_slot_3(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Slot" == data["resourceType"]
inst2 = slot.Slot(**data)
impl_slot_3(inst2)
def impl_slot_4(inst):
assert inst.comment == (
"Assessments should be performed before requesting "
"appointments in this slot."
)
assert inst.end == fhirtypes.Instant.validate("2013-12-25T09:15:00Z")
assert inst.id == "1"
assert inst.identifier[0].system == "http://example.org/identifiers/slots"
assert inst.identifier[0].value == "123132"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.overbooked is True
assert inst.schedule.reference == "Schedule/example"
assert inst.serviceCategory[0].coding[0].code == "17"
assert inst.serviceCategory[0].coding[0].display == "General Practice"
assert inst.start == fhirtypes.Instant.validate("2013-12-25T09:00:00Z")
assert inst.status == "busy"
assert inst.text.status == "generated"
def test_slot_4(base_settings):
"""No. 4 tests collection for Slot.
Test File: slot-example-busy.json
"""
filename = base_settings["unittest_data_dir"] / "slot-example-busy.json"
inst = slot.Slot.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Slot" == inst.resource_type
impl_slot_4(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Slot" == data["resourceType"]
inst2 = slot.Slot(**data)
impl_slot_4(inst2)
| 35.17033
| 87
| 0.675051
|
from pydantic.validators import bytes_validator
from fhir.resources import fhirtypes
from fhir.resources import slot
def impl_slot_1(inst):
assert inst.appointmentType.coding[0].code == "WALKIN"
assert (
inst.appointmentType.coding[0].display
== "A previously unscheduled walk-in visit"
)
assert (
inst.appointmentType.coding[0].system
== "http://terminology.hl7.org/CodeSystem/v2-0276"
)
assert inst.comment == (
"Assessments should be performed before requesting "
"appointments in this slot."
)
assert inst.end == fhirtypes.Instant.validate("2013-12-25T09:30:00Z")
assert inst.id == "example"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.schedule.reference == "Schedule/example"
assert inst.serviceCategory[0].coding[0].code == "17"
assert inst.serviceCategory[0].coding[0].display == "General Practice"
assert inst.serviceType[0].coding[0].code == "57"
assert inst.serviceType[0].coding[0].display == "Immunization"
assert inst.specialty[0].coding[0].code == "408480009"
assert inst.specialty[0].coding[0].display == "Clinical immunology"
assert inst.start == fhirtypes.Instant.validate("2013-12-25T09:15:00Z")
assert inst.status == "free"
assert inst.text.status == "generated"
def test_slot_1(base_settings):
filename = base_settings["unittest_data_dir"] / "slot-example.json"
inst = slot.Slot.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Slot" == inst.resource_type
impl_slot_1(inst)
data = inst.dict()
assert "Slot" == data["resourceType"]
inst2 = slot.Slot(**data)
impl_slot_1(inst2)
def impl_slot_2(inst):
assert inst.comment == "Dr Careful is out of the office"
assert inst.end == fhirtypes.Instant.validate("2013-12-25T10:00:00Z")
assert inst.id == "2"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.schedule.reference == "Schedule/example"
assert inst.serviceCategory[0].coding[0].code == "17"
assert inst.serviceCategory[0].coding[0].display == "General Practice"
assert inst.start == fhirtypes.Instant.validate("2013-12-25T09:45:00Z")
assert inst.status == "busy-tentative"
assert inst.text.status == "generated"
def test_slot_2(base_settings):
filename = base_settings["unittest_data_dir"] / "slot-example-tentative.json"
inst = slot.Slot.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Slot" == inst.resource_type
impl_slot_2(inst)
data = inst.dict()
assert "Slot" == data["resourceType"]
inst2 = slot.Slot(**data)
impl_slot_2(inst2)
def impl_slot_3(inst):
assert inst.comment == "Dr Careful is out of the office"
assert inst.end == fhirtypes.Instant.validate("2013-12-25T09:45:00Z")
assert inst.id == "3"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.schedule.reference == "Schedule/example"
assert inst.serviceCategory[0].coding[0].code == "17"
assert inst.serviceCategory[0].coding[0].display == "General Practice"
assert inst.start == fhirtypes.Instant.validate("2013-12-25T09:30:00Z")
assert inst.status == "busy-unavailable"
assert inst.text.status == "generated"
def test_slot_3(base_settings):
filename = base_settings["unittest_data_dir"] / "slot-example-unavailable.json"
inst = slot.Slot.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Slot" == inst.resource_type
impl_slot_3(inst)
data = inst.dict()
assert "Slot" == data["resourceType"]
inst2 = slot.Slot(**data)
impl_slot_3(inst2)
def impl_slot_4(inst):
assert inst.comment == (
"Assessments should be performed before requesting "
"appointments in this slot."
)
assert inst.end == fhirtypes.Instant.validate("2013-12-25T09:15:00Z")
assert inst.id == "1"
assert inst.identifier[0].system == "http://example.org/identifiers/slots"
assert inst.identifier[0].value == "123132"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.overbooked is True
assert inst.schedule.reference == "Schedule/example"
assert inst.serviceCategory[0].coding[0].code == "17"
assert inst.serviceCategory[0].coding[0].display == "General Practice"
assert inst.start == fhirtypes.Instant.validate("2013-12-25T09:00:00Z")
assert inst.status == "busy"
assert inst.text.status == "generated"
def test_slot_4(base_settings):
filename = base_settings["unittest_data_dir"] / "slot-example-busy.json"
inst = slot.Slot.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Slot" == inst.resource_type
impl_slot_4(inst)
data = inst.dict()
assert "Slot" == data["resourceType"]
inst2 = slot.Slot(**data)
impl_slot_4(inst2)
| true
| true
|
f71892bb0c58c579cec15c6e116e3bf81ee58e49
| 1,959
|
py
|
Python
|
celestial/client/rootfs/__init__.py
|
ams-tech/celestial
|
0c4c264563fe79d6838a1c40a1d114c1d6fcf23f
|
[
"MIT"
] | null | null | null |
celestial/client/rootfs/__init__.py
|
ams-tech/celestial
|
0c4c264563fe79d6838a1c40a1d114c1d6fcf23f
|
[
"MIT"
] | null | null | null |
celestial/client/rootfs/__init__.py
|
ams-tech/celestial
|
0c4c264563fe79d6838a1c40a1d114c1d6fcf23f
|
[
"MIT"
] | null | null | null |
import os
import subprocess
from celestial.strings import Filesystems
from celestial.client.system import cmdline
def get_fs_types(path):
"""
Fetch a list of possible filesystem types
:param path:
:return: a list of strings with the possible filesystem type, else None
"""
if not os.path.exists(path):
return None
output = subprocess.check_output(
['''(eval $(blkid {} | awk ' {{ print $3 }} '); echo $TYPE)'''.format(path)],
shell=True,
executable='/bin/bash').decode().rstrip()
if output == "":
retval = []
elif output == Filesystems.EXT2:
# ext3 filesystems misidentify as ext2. Consider both as possible outputs
retval = [Filesystems.EXT2, Filesystems.EXT3]
else:
retval = [output]
return retval
def install(rootfs_file, device_node, block_size_kb=10, expected_fs=Filesystems.NONE):
"""
Install rootfs_file into device_node
"""
if expected_fs is not None:
fs_types = get_fs_types(rootfs_file)
if expected_fs not in fs_types:
raise ValueError("rootfs_file is type {}, expected {}".format(rootfs_file, expected_fs))
result = subprocess.run([
'dd',
'if={}'.format(rootfs_file),
'of={}'.format(device_node),
'bs={}K'.format(block_size_kb)
])
return result
def get_boot_device(cmdline_file="/proc/cmdline"):
"""
Retrieve the "root" parameter of "/proc/cmdline"
:param cmdline_file: The location of the cmdline file (that we booted with)
:return:
"""
return cmdline.get_parameter("root", cmdline_file)
def set_boot_device(boot_device, cmdline_file="/boot/cmdline"):
"""
Update the "root" parameter of the "cmdline_file" to "boot_device"
:param boot_device:
:param cmdline_file: The location of the boot partition's commandline file
:return:
"""
cmdline.set_parameter("root", boot_device, cmdline_file)
| 30.609375
| 100
| 0.654926
|
import os
import subprocess
from celestial.strings import Filesystems
from celestial.client.system import cmdline
def get_fs_types(path):
if not os.path.exists(path):
return None
output = subprocess.check_output(
['''(eval $(blkid {} | awk ' {{ print $3 }} '); echo $TYPE)'''.format(path)],
shell=True,
executable='/bin/bash').decode().rstrip()
if output == "":
retval = []
elif output == Filesystems.EXT2:
retval = [Filesystems.EXT2, Filesystems.EXT3]
else:
retval = [output]
return retval
def install(rootfs_file, device_node, block_size_kb=10, expected_fs=Filesystems.NONE):
if expected_fs is not None:
fs_types = get_fs_types(rootfs_file)
if expected_fs not in fs_types:
raise ValueError("rootfs_file is type {}, expected {}".format(rootfs_file, expected_fs))
result = subprocess.run([
'dd',
'if={}'.format(rootfs_file),
'of={}'.format(device_node),
'bs={}K'.format(block_size_kb)
])
return result
def get_boot_device(cmdline_file="/proc/cmdline"):
return cmdline.get_parameter("root", cmdline_file)
def set_boot_device(boot_device, cmdline_file="/boot/cmdline"):
cmdline.set_parameter("root", boot_device, cmdline_file)
| true
| true
|
f71892da316822a589e48a3fac5a0c42deab2e4e
| 22
|
py
|
Python
|
raster/tester.py
|
xiaoyingpu/gis
|
44c2ef2e604f6547e5bd29aa991e5930342adaba
|
[
"MIT"
] | 1
|
2019-08-20T13:29:42.000Z
|
2019-08-20T13:29:42.000Z
|
raster/tester.py
|
xiaoyingpu/gis
|
44c2ef2e604f6547e5bd29aa991e5930342adaba
|
[
"MIT"
] | null | null | null |
raster/tester.py
|
xiaoyingpu/gis
|
44c2ef2e604f6547e5bd29aa991e5930342adaba
|
[
"MIT"
] | null | null | null |
def test():
print(ds)
| 11
| 11
| 0.636364
|
def test():
print(ds)
| true
| true
|
f718935aaa88e73d7b6202df20fac852419becfe
| 759
|
py
|
Python
|
scripts/prepare_syllable_counts.py
|
voberoi/pysyllables
|
e1950ac306975f5d197bca7ad5ed9b0c680b0fb2
|
[
"MIT"
] | null | null | null |
scripts/prepare_syllable_counts.py
|
voberoi/pysyllables
|
e1950ac306975f5d197bca7ad5ed9b0c680b0fb2
|
[
"MIT"
] | 3
|
2020-03-24T17:17:49.000Z
|
2021-02-02T22:15:36.000Z
|
scripts/prepare_syllable_counts.py
|
voberoi/pysyllables
|
e1950ac306975f5d197bca7ad5ed9b0c680b0fb2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
import codecs
def main():
syllable_counts = {}
filepath = sys.argv[1]
lines = codecs.open(filepath, encoding="iso-8859-1").read().split("\n")
for line in lines:
if line.startswith(";;;") or len(line) == 0 or line.isspace():
continue
word, phonemes = line.split(maxsplit=1)
word = word.lower()
syllable_count = 0
for phoneme in phonemes.split():
if phoneme[-1].isdigit():
syllable_count += 1
syllable_counts[word] = syllable_count
for word in sorted(syllable_counts.keys()):
syllable_count = syllable_counts[word]
print(word + " " + str(syllable_count))
if __name__ == "__main__":
main()
| 26.172414
| 75
| 0.59025
|
import sys
import codecs
def main():
syllable_counts = {}
filepath = sys.argv[1]
lines = codecs.open(filepath, encoding="iso-8859-1").read().split("\n")
for line in lines:
if line.startswith(";;;") or len(line) == 0 or line.isspace():
continue
word, phonemes = line.split(maxsplit=1)
word = word.lower()
syllable_count = 0
for phoneme in phonemes.split():
if phoneme[-1].isdigit():
syllable_count += 1
syllable_counts[word] = syllable_count
for word in sorted(syllable_counts.keys()):
syllable_count = syllable_counts[word]
print(word + " " + str(syllable_count))
if __name__ == "__main__":
main()
| true
| true
|
f7189442bf63f4b7c1154480aea820c9a1a2688c
| 13,745
|
py
|
Python
|
startrek/script_mixins.py
|
drinkfalconpunch/star-trek
|
33c8155f94f11832d925a733b1f3ed6eecdcb31e
|
[
"MIT"
] | 1
|
2019-08-21T18:56:42.000Z
|
2019-08-21T18:56:42.000Z
|
startrek/script_mixins.py
|
drinkfalconpunch/star-trek
|
33c8155f94f11832d925a733b1f3ed6eecdcb31e
|
[
"MIT"
] | 2
|
2021-03-31T19:22:55.000Z
|
2021-06-02T00:17:04.000Z
|
startrek/script_mixins.py
|
drinkfalconpunch/star-trek
|
33c8155f94f11832d925a733b1f3ed6eecdcb31e
|
[
"MIT"
] | null | null | null |
import itertools
import re
from abc import ABCMeta, abstractmethod
from collections import deque
from pathlib import Path
from typing import List
from startrek.exceptions import ScriptException
from startrek.utils import pairwise
OMITTED = 'OMITTED'
class ScriptBase(metaclass=ABCMeta):
def __init__(self, script_text=None, script_path=None, series_name=None,
season_number=0, episode_number=0):
if script_text:
self.script = script_text
self.script_path = None
elif script_path:
self.script = self._get_script_path_contents(script_path)
self.script_path = script_path
else:
raise ScriptException('No valid script.')
self.series_name = series_name
self.season_number = season_number
self.episode_number = episode_number
self.dialogue = None
self.characters = None
@abstractmethod
def extract_dialogue_from_script(self, remove_blank_lines=False):
pass
@abstractmethod
def section_headers(self):
pass
@abstractmethod
def sectioned_script(self):
pass
@staticmethod
def _get_script_path_contents(script_path):
if isinstance(script_path, str):
script_path = Path(script_path)
if not script_path.exists():
raise ScriptException(f'Invalid script path: {script_path}')
return open(script_path, 'r').read()
@staticmethod
def separate_dialogue(block):
pass
def _script_to_lines(self):
return [line for line in self.script]
class ScriptBlocks(ScriptBase):
# TODO: Rename functions/attributes.
SECTION_HEADER = ''
ACT = ['ACT']
END = ['END OF']
NUMBERS = ['ONE', 'TWO', 'THREE', 'FOUR', 'FIVE', 'SIX', 'SEVEN', 'EIGHT', 'NINE', 'TEN']
SKIPS = ['THE END', 'END OF TEASER', 'FADE OUT', 'FADE OUT.']
for combo in itertools.product(END, ACT, NUMBERS):
SKIPS.append(' '.join(combo))
for combo in itertools.product(ACT, NUMBERS):
SKIPS.append(' '.join(combo))
_regex_section_number = r'^\d+[a-zA-Z]?'
regex_section_number = re.compile(_regex_section_number)
# regex to get everything between two brackets if it is the only thing in the line.
_regex_header = r'^\d+[a-zA-Z]?\s*(.+)$'
regex_header = re.compile(_regex_header)
# regex to match line starting with capitalized words with a colon, signifying character dialogue.
_regex_character = r"^\s*([A-Z-.'\"() ]+)\s*$"
regex_character = re.compile(_regex_character)
# # regex to match everything after the character name, colon, and space
# _regex_dialogue_line = r'^[A-Z]{1,}.+:\s*(.+)'
# regex_dialogue_line = re.compile(_regex_dialogue_line)
def get_characters(self):
if not self.dialogue:
self.extract_dialogue_from_script()
characters = set()
for line in self.dialogue:
matches = re.findall(self.regex_character, line.strip())
if matches:
for match in matches:
# Yay random corner cases!
if match in self.SKIPS:
continue
parens = match.find('(')
quote = match.find('\'')
if parens != -1:
match = match[:parens - 1]
if quote != -1:
match = match[:quote]
if match.endswith('.'):
while match.endswith('.'):
match = match[:-1]
if match.startswith('('):
continue
if match.endswith(')'):
continue
characters.add(match.replace('"', ''))
self.characters = characters
return characters
def _iterate_lines_words(self, string, remove_blank_lines=True):
if isinstance(string, list):
string_list = string
else:
string_list = string.splitlines()
for line in string_list:
line = line.strip()
words = line.split()
if remove_blank_lines:
if line:
yield line, words
else:
yield line, words
@staticmethod
def _separate_dialogue_block(block):
if not block:
return # dict(name='None', text='')
block = deque(block)
temp = ''
# Check if any initial lines are text and save them.
while True:
if not block:
break
line = block.popleft()
if not line.isupper():
# First line is dialogue/text
temp = f"{temp} {line}"
else:
block.appendleft(line)
break
dialogue = {}
dialogue[0] = dict(name='None', text=temp.strip())
name = ''
text = ''
index = 1
for line in block:
if line.isupper():
if name == line:
continue
else:
if name:
dialogue[index] = dict(name=name, text=text.strip())
name = line
text = ''
index += 1
else:
name = line
continue
else:
text = f"{text} {line}"
return dialogue
def combine_lines_by_character(self):
sectioned = self.sectioned_script()
for number, section in sectioned.items():
section['part'] = self._separate_dialogue_block(section['part'])
return sectioned
def _iterate_dialogue_dict(self):
pass
@staticmethod
def replace_character_names(dialogue, characters):
# Dict[str, Dict[str, Dict[int, Dict[str, str]]]].
characters.add('None')
for section, content in dialogue.items():
parts = content['part']
for section, stuff in parts.items():
name = stuff['name']
for check in characters:
if check in name:
stuff['name'] = check
break
return dialogue
def extract_dialogue_from_script(self, remove_blank_lines=True):
script = deque(self.script.splitlines())
# Iterate through the lines until a number is found as the first character.
while True:
line = script.popleft()
words = line.split()
# Skip blank lines or a corner case where 2ND REV. FINAL DRAFT is in the script.
if not words or any(x in words for x in ('REV', 'REV.', 'FINAL', 'OMITTED')):
continue
if words[0][0].isdigit():
# Put it back and break. Runs in O(1) time.
script.appendleft(line)
break
if remove_blank_lines:
script = list(filter(None, script))
# Strip out the white space in lines with text
script = [s.lstrip() for s in script]
# Remove page header lines and lines with OMITTED in between section numbers
dialogue = list(filter(lambda line: line[:len(self.SECTION_HEADER)] != self.SECTION_HEADER, script))
# dialogue = list(filter(lambda line: 'OMITTED' not in line or line[0][0].isdigit(), dialogue))
self.dialogue = dialogue
def _number_header_from_line(self, line):
line = line.split()
return line[0], ' '.join(line[1:])
def get_between_indices(self, s, begin, end):
return s[begin:end]
def section_headers(self):
'''Returns the section headers from a block of dialogue and their
respective line numbers in said block.'''
if not self.dialogue:
self.extract_dialogue_from_script()
sections = {}
indices = []
_regex_number = r'^\d{1,3}?[a-zA-Z]{0,1}'
regex_number = re.compile(_regex_number)
for index, line in enumerate(self.dialogue):
words = line.split()
if not words:
continue
try:
int(words[0][0])
number = words[0]
name = " ".join(words[1:]).replace(':', '')
if not name:
name = 'OMITTED'
# Corner case check if year is in section number
# if not re.findall(regex_number, number):
# print(number, re.findall(regex_number, number))
if len(number) > 3 and number[3].isdigit():
continue
# Check for same section number
if number in sections.keys():
sections[number].append(name)
else:
sections[number] = [name]
indices.append(index)
except:
continue
setattr(self, 'section_names', sections)
setattr(self, 'header_indices', indices)
def sectioned_script(self):
if not self.dialogue:
self.extract_dialogue_from_script()
if not hasattr(self, 'header_indices'):
self.section_headers()
sections = {}
index_pairs = pairwise(self.header_indices)
for pair in index_pairs:
part = self.get_between_indices(self.dialogue, *pair)
head = part.pop(0)
number, header = self._number_header_from_line(head)
sections[number] = dict(header=header, part=part)
return sections
class ScriptLines(ScriptBase):
# regex to get everything between two brackets if it is the only thing in the line.
_regex_header = r'^\[([^\]]+?)\]$'
regex_header = re.compile(_regex_header)
# regex to match line starting with capitalized words with a colon, signifying character dialogue.
_regex_character = r'^([A-Z]{1,}.+):'
regex_character = re.compile(_regex_character)
# regex to match everything after the character name, colon, and space
_regex_dialogue_line = r'^[A-Z]{1,}.+:\s*(.+)'
regex_dialogue_line = re.compile(_regex_dialogue_line)
def extract_dialogue_from_script(self, remove_blank_lines=False):
script = deque(self.script.split('\n'))
# Iterate through the lines until a number is found as the first character.
while True:
line = script.popleft()
words = line.split()
# Skip blank lines
if not words:
continue
if words[0][0].isdigit():
# Put it back and break. Runs in O(1) time.
script.appendleft(line)
break
if remove_blank_lines:
script = list(filter(None, script))
# Strip out the white space in lines with text
script = [s.lstrip() for s in script]
# Remove page header lines and lines with OMITTED in between section numbers
dialogue = list(filter(lambda line: line[:len(self.SECTION_HEADER)] != self.SECTION_HEADER, script))
dialogue = list(filter(lambda line: 'OMITTED' not in line or line[0][0].isdigit(), dialogue))
self.dialogue = dialogue
return dialogue
def section_headers(self):
'''Returns the section headers from a block of dialogue and their
respective line numbers in said block.'''
if not self.script:
raise AttributeError('') # 'Dialogue not found. Script.extract_entire_dialogue()')
sections = {}
indices = []
for index, line in enumerate(self.script):
words = line.split()
if not words:
continue
try:
int(words[0][0])
number = words[0]
name = " ".join(words[1:])
if not name:
name = 'OMITTED'
# Check for same section number
if number in sections.keys():
sections[number].append(name)
else:
sections[number] = [name]
indices.append(index)
except:
continue
return sections, indices
def sectioned_script(self):
script = deque(self.script)
sections = {}
section_number = 0
while len(script) > 0:
line = script.popleft().split()
if line[0][0].isdigit():
# Check for duplicate section number
if line[0] == section_number:
continue
else:
section_number = line[0]
sections[section_number] = {}
sections[section_number]['section_header'] = ' '.join(line[1:])
sections[section_number]['text'] = []
else:
sections[section_number]['text'].append(' '.join(line))
return sections
def _check_header(self, line: str, starts_with='(', ends_with=')'):
if line.strip().startswith(starts_with) and line.strip().endswith(ends_with):
return True
return False
class ScriptTNG(ScriptBlocks):
"""Script class for The Next Generation."""
SECTION_HEADER = 'STAR TREK'
pass
class ScriptDeepSpaceNine(ScriptBlocks):
"""Script class for Deep Space Nine."""
SECTION_HEADER = 'DEEP SPACE'
pass
class ScriptEnterprise(ScriptLines):
"""Script class for Enterprise."""
pass
class ScriptTOS(ScriptLines):
"""Script class for The Original Series."""
pass
class ScriptVoyager(ScriptLines):
"""Script class for Voyager."""
pass
| 34.276808
| 108
| 0.555838
|
import itertools
import re
from abc import ABCMeta, abstractmethod
from collections import deque
from pathlib import Path
from typing import List
from startrek.exceptions import ScriptException
from startrek.utils import pairwise
OMITTED = 'OMITTED'
class ScriptBase(metaclass=ABCMeta):
def __init__(self, script_text=None, script_path=None, series_name=None,
season_number=0, episode_number=0):
if script_text:
self.script = script_text
self.script_path = None
elif script_path:
self.script = self._get_script_path_contents(script_path)
self.script_path = script_path
else:
raise ScriptException('No valid script.')
self.series_name = series_name
self.season_number = season_number
self.episode_number = episode_number
self.dialogue = None
self.characters = None
@abstractmethod
def extract_dialogue_from_script(self, remove_blank_lines=False):
pass
@abstractmethod
def section_headers(self):
pass
@abstractmethod
def sectioned_script(self):
pass
@staticmethod
def _get_script_path_contents(script_path):
if isinstance(script_path, str):
script_path = Path(script_path)
if not script_path.exists():
raise ScriptException(f'Invalid script path: {script_path}')
return open(script_path, 'r').read()
@staticmethod
def separate_dialogue(block):
pass
def _script_to_lines(self):
return [line for line in self.script]
class ScriptBlocks(ScriptBase):
SECTION_HEADER = ''
ACT = ['ACT']
END = ['END OF']
NUMBERS = ['ONE', 'TWO', 'THREE', 'FOUR', 'FIVE', 'SIX', 'SEVEN', 'EIGHT', 'NINE', 'TEN']
SKIPS = ['THE END', 'END OF TEASER', 'FADE OUT', 'FADE OUT.']
for combo in itertools.product(END, ACT, NUMBERS):
SKIPS.append(' '.join(combo))
for combo in itertools.product(ACT, NUMBERS):
SKIPS.append(' '.join(combo))
_regex_section_number = r'^\d+[a-zA-Z]?'
regex_section_number = re.compile(_regex_section_number)
_regex_header = r'^\d+[a-zA-Z]?\s*(.+)$'
regex_header = re.compile(_regex_header)
_regex_character = r"^\s*([A-Z-.'\"() ]+)\s*$"
regex_character = re.compile(_regex_character)
# # regex to match everything after the character name, colon, and space
# _regex_dialogue_line = r'^[A-Z]{1,}.+:\s*(.+)'
# regex_dialogue_line = re.compile(_regex_dialogue_line)
def get_characters(self):
if not self.dialogue:
self.extract_dialogue_from_script()
characters = set()
for line in self.dialogue:
matches = re.findall(self.regex_character, line.strip())
if matches:
for match in matches:
# Yay random corner cases!
if match in self.SKIPS:
continue
parens = match.find('(')
quote = match.find('\'')
if parens != -1:
match = match[:parens - 1]
if quote != -1:
match = match[:quote]
if match.endswith('.'):
while match.endswith('.'):
match = match[:-1]
if match.startswith('('):
continue
if match.endswith(')'):
continue
characters.add(match.replace('"', ''))
self.characters = characters
return characters
def _iterate_lines_words(self, string, remove_blank_lines=True):
if isinstance(string, list):
string_list = string
else:
string_list = string.splitlines()
for line in string_list:
line = line.strip()
words = line.split()
if remove_blank_lines:
if line:
yield line, words
else:
yield line, words
@staticmethod
def _separate_dialogue_block(block):
if not block:
return
block = deque(block)
temp = ''
while True:
if not block:
break
line = block.popleft()
if not line.isupper():
temp = f"{temp} {line}"
else:
block.appendleft(line)
break
dialogue = {}
dialogue[0] = dict(name='None', text=temp.strip())
name = ''
text = ''
index = 1
for line in block:
if line.isupper():
if name == line:
continue
else:
if name:
dialogue[index] = dict(name=name, text=text.strip())
name = line
text = ''
index += 1
else:
name = line
continue
else:
text = f"{text} {line}"
return dialogue
def combine_lines_by_character(self):
sectioned = self.sectioned_script()
for number, section in sectioned.items():
section['part'] = self._separate_dialogue_block(section['part'])
return sectioned
def _iterate_dialogue_dict(self):
pass
@staticmethod
def replace_character_names(dialogue, characters):
characters.add('None')
for section, content in dialogue.items():
parts = content['part']
for section, stuff in parts.items():
name = stuff['name']
for check in characters:
if check in name:
stuff['name'] = check
break
return dialogue
def extract_dialogue_from_script(self, remove_blank_lines=True):
script = deque(self.script.splitlines())
while True:
line = script.popleft()
words = line.split()
if not words or any(x in words for x in ('REV', 'REV.', 'FINAL', 'OMITTED')):
continue
if words[0][0].isdigit():
script.appendleft(line)
break
if remove_blank_lines:
script = list(filter(None, script))
script = [s.lstrip() for s in script]
dialogue = list(filter(lambda line: line[:len(self.SECTION_HEADER)] != self.SECTION_HEADER, script))
self.dialogue = dialogue
def _number_header_from_line(self, line):
line = line.split()
return line[0], ' '.join(line[1:])
def get_between_indices(self, s, begin, end):
return s[begin:end]
def section_headers(self):
if not self.dialogue:
self.extract_dialogue_from_script()
sections = {}
indices = []
_regex_number = r'^\d{1,3}?[a-zA-Z]{0,1}'
regex_number = re.compile(_regex_number)
for index, line in enumerate(self.dialogue):
words = line.split()
if not words:
continue
try:
int(words[0][0])
number = words[0]
name = " ".join(words[1:]).replace(':', '')
if not name:
name = 'OMITTED'
if len(number) > 3 and number[3].isdigit():
continue
if number in sections.keys():
sections[number].append(name)
else:
sections[number] = [name]
indices.append(index)
except:
continue
setattr(self, 'section_names', sections)
setattr(self, 'header_indices', indices)
def sectioned_script(self):
if not self.dialogue:
self.extract_dialogue_from_script()
if not hasattr(self, 'header_indices'):
self.section_headers()
sections = {}
index_pairs = pairwise(self.header_indices)
for pair in index_pairs:
part = self.get_between_indices(self.dialogue, *pair)
head = part.pop(0)
number, header = self._number_header_from_line(head)
sections[number] = dict(header=header, part=part)
return sections
class ScriptLines(ScriptBase):
_regex_header = r'^\[([^\]]+?)\]$'
regex_header = re.compile(_regex_header)
_regex_character = r'^([A-Z]{1,}.+):'
regex_character = re.compile(_regex_character)
_regex_dialogue_line = r'^[A-Z]{1,}.+:\s*(.+)'
regex_dialogue_line = re.compile(_regex_dialogue_line)
def extract_dialogue_from_script(self, remove_blank_lines=False):
script = deque(self.script.split('\n'))
while True:
line = script.popleft()
words = line.split()
if not words:
continue
if words[0][0].isdigit():
script.appendleft(line)
break
if remove_blank_lines:
script = list(filter(None, script))
script = [s.lstrip() for s in script]
dialogue = list(filter(lambda line: line[:len(self.SECTION_HEADER)] != self.SECTION_HEADER, script))
dialogue = list(filter(lambda line: 'OMITTED' not in line or line[0][0].isdigit(), dialogue))
self.dialogue = dialogue
return dialogue
def section_headers(self):
if not self.script:
raise AttributeError('')
sections = {}
indices = []
for index, line in enumerate(self.script):
words = line.split()
if not words:
continue
try:
int(words[0][0])
number = words[0]
name = " ".join(words[1:])
if not name:
name = 'OMITTED'
if number in sections.keys():
sections[number].append(name)
else:
sections[number] = [name]
indices.append(index)
except:
continue
return sections, indices
def sectioned_script(self):
script = deque(self.script)
sections = {}
section_number = 0
while len(script) > 0:
line = script.popleft().split()
if line[0][0].isdigit():
if line[0] == section_number:
continue
else:
section_number = line[0]
sections[section_number] = {}
sections[section_number]['section_header'] = ' '.join(line[1:])
sections[section_number]['text'] = []
else:
sections[section_number]['text'].append(' '.join(line))
return sections
def _check_header(self, line: str, starts_with='(', ends_with=')'):
if line.strip().startswith(starts_with) and line.strip().endswith(ends_with):
return True
return False
class ScriptTNG(ScriptBlocks):
SECTION_HEADER = 'STAR TREK'
pass
class ScriptDeepSpaceNine(ScriptBlocks):
SECTION_HEADER = 'DEEP SPACE'
pass
class ScriptEnterprise(ScriptLines):
pass
class ScriptTOS(ScriptLines):
pass
class ScriptVoyager(ScriptLines):
pass
| true
| true
|
f7189717d2883a50d191f134c319e5e2e641ca0c
| 71
|
py
|
Python
|
ddtools/_version.py
|
Jyyin333/DMTools
|
42fed226ffc6291bc8c8438eea49b8488fb692d6
|
[
"MIT"
] | null | null | null |
ddtools/_version.py
|
Jyyin333/DMTools
|
42fed226ffc6291bc8c8438eea49b8488fb692d6
|
[
"MIT"
] | null | null | null |
ddtools/_version.py
|
Jyyin333/DMTools
|
42fed226ffc6291bc8c8438eea49b8488fb692d6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '1.0.0'
| 17.75
| 24
| 0.549296
|
__version__ = '1.0.0'
| true
| true
|
f71897df774811c2c7e969c6d67c5191a419a861
| 2,722
|
py
|
Python
|
experimentmanager/utils.py
|
sciexpem/sciexpem
|
6de9a8039356588a5e817f0fa6bafd948220fc8f
|
[
"MIT"
] | null | null | null |
experimentmanager/utils.py
|
sciexpem/sciexpem
|
6de9a8039356588a5e817f0fa6bafd948220fc8f
|
[
"MIT"
] | 3
|
2019-05-10T14:57:30.000Z
|
2021-06-10T21:14:21.000Z
|
experimentmanager/utils.py
|
sciexpem/sciexpem
|
6de9a8039356588a5e817f0fa6bafd948220fc8f
|
[
"MIT"
] | 1
|
2020-09-11T09:16:15.000Z
|
2020-09-11T09:16:15.000Z
|
from . import models
import pandas as pd
import zipfile
import os
import re
# def curve_io_formatter(x_column, y_columns, y_names, x_axis, y_axis, log=False):
# curves = []
# for index, output in enumerate(y_columns):
# curve = {round(float(k), 3): round(float(v), 3) for k, v in zip(x_column, output)}
# curves.append({"name": y_names[index], "data": curve})
# return {"curves": curves, "x_axis": x_axis, "y_axis": y_axis, "log": log}
excel_colunn_pattern = re.compile("(?P<name>[A-Za-z0-9_/]*)[ \t]+\[(?P<units>[(A-Za-z0-9_/)]*)\]")
def curve_io_formatter(curves, x_axis, y_axis, logY=False):
return {"curves": curves, "x_axis": x_axis, "y_axis": y_axis, "logY": logY}
def extract_experiment_table(exp_id, units_row=False, units_brackets=True, reorder=True):
dc = models.DataColumn.objects.filter(experiment_id=exp_id)
# dict: name -> (units, data)
column_names_units_data = {d.name if d.species is None else ",".join(d.species): (d.units, d.data) for d in dc}
column_names = list(column_names_units_data.keys())
# we can freely reorder names
if reorder:
e = models.Experiment.objects.get(pk=exp_id)
if (
e.reactor == "shock tube" and e.experiment_type == "ignition delay measurement") or e.reactor == "stirred reactor":
column_names.remove("temperature")
column_names.insert(0, "temperature")
# units and data are taken as a consequence of the reordered names
column_units = [column_names_units_data[cn][0] for cn in column_names]
column_data = [[float(i) for i in column_names_units_data[cn][1]] for cn in
column_names] # decimal to float (for excel seeing it as a number)
if units_row:
column_data = [[i] + j for i, j in zip(column_units, column_data)]
if units_brackets:
column_names = ["{} [{}]".format(i, j) for i, j in zip(column_names, column_units)]
r = pd.DataFrame(dict(zip(column_names, column_data)))
return r
def zip_folders(f, folders, zipname, remove_trailing=""):
with zipfile.ZipFile(f, 'w') as myzip:
for fp in folders:
for root, dirs, files in os.walk(fp):
for f in files:
new_name = os.path.relpath(os.path.join(root,f), remove_trailing)
myzip.write(os.path.join(root, f), arcname=new_name)
def check_data_excel(df):
# check if the dataframe contain nan
has_nan = df.isnull().values.any()
if has_nan:
return False
columns = df.columns
columns_extracted = []
for column in columns:
p = excel_colunn_pattern.match(column)
if not p:
return False
return True
| 34.025
| 131
| 0.640705
|
from . import models
import pandas as pd
import zipfile
import os
import re
excel_colunn_pattern = re.compile("(?P<name>[A-Za-z0-9_/]*)[ \t]+\[(?P<units>[(A-Za-z0-9_/)]*)\]")
def curve_io_formatter(curves, x_axis, y_axis, logY=False):
return {"curves": curves, "x_axis": x_axis, "y_axis": y_axis, "logY": logY}
def extract_experiment_table(exp_id, units_row=False, units_brackets=True, reorder=True):
dc = models.DataColumn.objects.filter(experiment_id=exp_id)
column_names_units_data = {d.name if d.species is None else ",".join(d.species): (d.units, d.data) for d in dc}
column_names = list(column_names_units_data.keys())
if reorder:
e = models.Experiment.objects.get(pk=exp_id)
if (
e.reactor == "shock tube" and e.experiment_type == "ignition delay measurement") or e.reactor == "stirred reactor":
column_names.remove("temperature")
column_names.insert(0, "temperature")
column_units = [column_names_units_data[cn][0] for cn in column_names]
column_data = [[float(i) for i in column_names_units_data[cn][1]] for cn in
column_names]
if units_row:
column_data = [[i] + j for i, j in zip(column_units, column_data)]
if units_brackets:
column_names = ["{} [{}]".format(i, j) for i, j in zip(column_names, column_units)]
r = pd.DataFrame(dict(zip(column_names, column_data)))
return r
def zip_folders(f, folders, zipname, remove_trailing=""):
with zipfile.ZipFile(f, 'w') as myzip:
for fp in folders:
for root, dirs, files in os.walk(fp):
for f in files:
new_name = os.path.relpath(os.path.join(root,f), remove_trailing)
myzip.write(os.path.join(root, f), arcname=new_name)
def check_data_excel(df):
has_nan = df.isnull().values.any()
if has_nan:
return False
columns = df.columns
columns_extracted = []
for column in columns:
p = excel_colunn_pattern.match(column)
if not p:
return False
return True
| true
| true
|
f71898c3ed083524faabeea56c687bae2ca86d8e
| 807
|
py
|
Python
|
src/python/pants/backend/python/rules/setup_py_util_test.py
|
mpopenko-exos/pants
|
47d27037c8b13291fc9023e56ddd1b1defdf1b8e
|
[
"Apache-2.0"
] | null | null | null |
src/python/pants/backend/python/rules/setup_py_util_test.py
|
mpopenko-exos/pants
|
47d27037c8b13291fc9023e56ddd1b1defdf1b8e
|
[
"Apache-2.0"
] | 1
|
2018-09-04T17:37:34.000Z
|
2018-09-04T19:42:58.000Z
|
src/python/pants/backend/python/rules/setup_py_util_test.py
|
mpopenko-exos/pants
|
47d27037c8b13291fc9023e56ddd1b1defdf1b8e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pants.backend.python.rules.setup_py_util import distutils_repr
testdata = {
'foo': 'bar',
'baz': {
'qux': [123, 456],
'quux': ('abc', b'xyz'),
'corge': {1, 2, 3}
},
'various_strings': [
"x'y",
'aaa\nbbb'
]
}
expected = """
{
'foo': 'bar',
'baz': {
'qux': [
123,
456,
],
'quux': (
'abc',
'xyz',
),
'corge': {
1,
2,
3,
},
},
'various_strings': [
'x\\\'y',
\"\"\"aaa\nbbb\"\"\",
],
}
""".strip()
def test_distutils_repr():
assert expected == distutils_repr(testdata)
| 16.469388
| 67
| 0.448575
|
from pants.backend.python.rules.setup_py_util import distutils_repr
testdata = {
'foo': 'bar',
'baz': {
'qux': [123, 456],
'quux': ('abc', b'xyz'),
'corge': {1, 2, 3}
},
'various_strings': [
"x'y",
'aaa\nbbb'
]
}
expected = """
{
'foo': 'bar',
'baz': {
'qux': [
123,
456,
],
'quux': (
'abc',
'xyz',
),
'corge': {
1,
2,
3,
},
},
'various_strings': [
'x\\\'y',
\"\"\"aaa\nbbb\"\"\",
],
}
""".strip()
def test_distutils_repr():
assert expected == distutils_repr(testdata)
| true
| true
|
f71898d420e214a47c23384a2b7b0302f44ef350
| 175
|
py
|
Python
|
paginas/admin.py
|
DSheridanmt/Safety-Life
|
522578858f8e063e14d0274de008c345ef2c0a75
|
[
"MIT"
] | null | null | null |
paginas/admin.py
|
DSheridanmt/Safety-Life
|
522578858f8e063e14d0274de008c345ef2c0a75
|
[
"MIT"
] | null | null | null |
paginas/admin.py
|
DSheridanmt/Safety-Life
|
522578858f8e063e14d0274de008c345ef2c0a75
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
#importar classes
from .models import Publicacao, Tag
# Register your models here.
admin.site.register(Publicacao)
admin.site.register(Tag)
| 19.444444
| 35
| 0.805714
|
from django.contrib import admin
from .models import Publicacao, Tag
admin.site.register(Publicacao)
admin.site.register(Tag)
| true
| true
|
f7189913fe684be000682693e8c8998c0035fdb1
| 5,622
|
py
|
Python
|
2parser/sample.py
|
formalabstracts/CNL-CIC
|
c857ee0d52b4ba91dd06a51c8f9f3ec2749ca0eb
|
[
"MIT"
] | 14
|
2019-06-27T16:34:39.000Z
|
2021-01-07T18:13:04.000Z
|
2parser/sample.py
|
formalabstracts/CNL-CIC
|
c857ee0d52b4ba91dd06a51c8f9f3ec2749ca0eb
|
[
"MIT"
] | 8
|
2019-10-17T06:09:51.000Z
|
2020-03-25T15:51:32.000Z
|
2parser/sample.py
|
formalabstracts/CNL-CIC
|
c857ee0d52b4ba91dd06a51c8f9f3ec2749ca0eb
|
[
"MIT"
] | 17
|
2019-06-27T16:34:53.000Z
|
2020-08-15T01:30:32.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 16 05:48:26 2021
@author: thales
Generate random samples from parsers
"""
from numpy.random import (poisson , binomial, randint)
from tokenlib import (Item , Etok, mk_stream)
import lib
import state
def bernoulli(p):
return binomial(1,p)
def ran(ls):
if not ls:
raise TypeError(f'ran, expected nonempty list {ls}')
return ls
return ls[randint(0,len(ls))]
def mk_tok(v):
toks = mk_stream(v)
try:
return toks.stream[0]
except:
raise IndexError(f'List index out of range. Empty list mk_tok({v})')
def mk_toks(vs):
toks = mk_stream(vs)
return toks.stream
def next_token():
return mk_tok('blah')
def none():
return None
def add_sample(self,other):
def sample():
try: # debug
acc1 = self.sample()
acc2 = other.sample()
return (acc1,acc2)
except AttributeError as ex:
raise AttributeError(f'MyAttributeError {other}')
return sample
def or_sample(self,other):
def sample():
if bernoulli(0.5):
return self.sample()
return other.sample()
return sample
def treat_sample(self,treatment):
def sample():
return treatment(self.sample())
return sample
def some(self,sep,m):
def sample():
if sep:
if m==0:
return []
return lib.flatten((self.sample(),sep.sample()) for _ in range(0,m-1))+[self.sample()]
return [self.sample() for _ in range(0,m-1)]
return sample
def plus(self,sep):
return some(self,sep,1 + poisson(0.5))
def many(self,sep):
return some(self,sep,0 + poisson(0.5))
def atleast(self,n):
return some(self,None,n + poisson(0.5))
def possibly(self):
def sample():
if state.state.include_possibly:
return self.sample()
if bernoulli(0.5):
return self.sample()
return None
return sample
def if_test(self,p):
def sample():
iteration_limit = 10 # arbitrary limit
for _ in range(0,iteration_limit):
acc = self.sample() # randomized guess
if p(acc):
return acc
return next_token() # give up on test
return sample
def if_value(v):
def sample():
return mk_tok(v)
return sample
def if_rawvalue(v):
return if_value(v)
def type_sample(ty:str):
"""
>>> type_sample('WORD')
'...'
"""
d = {'STRING': ['"'+s+'"' for s in 'hello world so little time'.split()],
'CONTROLSEQ':['\\'+s for s in 'alpha beta gamma delta sum prod deg circ ast lneg times rtimes'.split()],
'DECIMAL':['3.14','2.718','1.0','4.96'],
'INTEGER': [str(i) for i in range(0,10)] ,
'SYMBOL':['<','>','!=','+','-','*','^'],
'SYMBOL_QED':[r'\qed'],
'MAPSTO':[r'\mapsto'],
'MID':[r'\mid'],
'TMID':[r'\tmid'],
'ASSIGN':[':='],
'ARROW':[r'\to'],
'BLANK':['_'],
'ALT':['|'],
'PERIOD':['.'],
'COLON':[':'],
'APPLYSUB':[r'\sub'],
'COERCION': [r'\^'],
'LAMBDA':[r'\lambda'],
'PITY':[r'\Pity'],
'QUANTIFIER':[r'\forall',r'\exists'],
'VAR':[ f'{x}{n}' for x in 'b c x y z u v w'.split() for n in range(0,5)],
'WORD':"""estimate equation solution expression inequality random sample
mean pair ordered function evaluate order operation property divisible
exponent base multiple square common prime form factorization point
plane line angle ray parallel intersecting perpendicular regular
polygon degree circle diameter chord similar congruent symmetry
leg triangle scalene equilateral trapezoid rotation transformation
translation polyhedron integer positive opposite value origin
coordinate area circumference word number blah part""".split(),
'ATOMIC_IDENTIFIER':'foo_bar bar3 foo22 sin_ cos_ atan2 ceil_ comb_ fabs_ factorial_ floor_ gcd_ sqrt_ log2 log10 pow_ '.split(),
'HIERARCHICAL_IDENTIFIER':['math.pi','math.ceil','math.abs'],
'FIELD_ACCESSOR':['.assoc','.distrib'],
'UNKNOWN':['?'],
'TEX_ERROR':[r'\error']
}
return ran(d[ty])
def if_types(tys):
"""
>>> if_types(['WORD','INTEGER','DECIMAL'])()
LexToken(...)
"""
def sample():
ty = ran(tys)
return mk_tok(type_sample(ty))
return sample
def all_sample(prs):
def sample():
return [p.sample() for p in prs]
return sample
def first(prs):
def sample():
if not prs:
return None
i = randint(0,len(prs))
return prs[i].sample()
return sample
#def lazy_call(pr):
# def sample():
# return pr().sample()
# return sample
def first_word(ss):
#DEBUG if not(ss):
# raise IndexError(f'Index out of range, split first_word({ss})')
s = ran(ss.split())
def sample():
return mk_tok(s)
return sample
def word_net_string(wn):
s = ran([k for k in wn])
if not s:
return ''
return s + ' ' + word_net_string(wn[s])
def word_net(wn):
def sample():
s = word_net_string(wn)
return mk_toks(s)
return sample
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
# doctest.testmod(verbose=True, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
# doctest.testmod()
| 26.394366
| 138
| 0.574173
|
from numpy.random import (poisson , binomial, randint)
from tokenlib import (Item , Etok, mk_stream)
import lib
import state
def bernoulli(p):
return binomial(1,p)
def ran(ls):
if not ls:
raise TypeError(f'ran, expected nonempty list {ls}')
return ls
return ls[randint(0,len(ls))]
def mk_tok(v):
toks = mk_stream(v)
try:
return toks.stream[0]
except:
raise IndexError(f'List index out of range. Empty list mk_tok({v})')
def mk_toks(vs):
toks = mk_stream(vs)
return toks.stream
def next_token():
return mk_tok('blah')
def none():
return None
def add_sample(self,other):
def sample():
try:
acc1 = self.sample()
acc2 = other.sample()
return (acc1,acc2)
except AttributeError as ex:
raise AttributeError(f'MyAttributeError {other}')
return sample
def or_sample(self,other):
def sample():
if bernoulli(0.5):
return self.sample()
return other.sample()
return sample
def treat_sample(self,treatment):
def sample():
return treatment(self.sample())
return sample
def some(self,sep,m):
def sample():
if sep:
if m==0:
return []
return lib.flatten((self.sample(),sep.sample()) for _ in range(0,m-1))+[self.sample()]
return [self.sample() for _ in range(0,m-1)]
return sample
def plus(self,sep):
return some(self,sep,1 + poisson(0.5))
def many(self,sep):
return some(self,sep,0 + poisson(0.5))
def atleast(self,n):
return some(self,None,n + poisson(0.5))
def possibly(self):
def sample():
if state.state.include_possibly:
return self.sample()
if bernoulli(0.5):
return self.sample()
return None
return sample
def if_test(self,p):
def sample():
iteration_limit = 10
for _ in range(0,iteration_limit):
acc = self.sample()
if p(acc):
return acc
return next_token()
return sample
def if_value(v):
def sample():
return mk_tok(v)
return sample
def if_rawvalue(v):
return if_value(v)
def type_sample(ty:str):
d = {'STRING': ['"'+s+'"' for s in 'hello world so little time'.split()],
'CONTROLSEQ':['\\'+s for s in 'alpha beta gamma delta sum prod deg circ ast lneg times rtimes'.split()],
'DECIMAL':['3.14','2.718','1.0','4.96'],
'INTEGER': [str(i) for i in range(0,10)] ,
'SYMBOL':['<','>','!=','+','-','*','^'],
'SYMBOL_QED':[r'\qed'],
'MAPSTO':[r'\mapsto'],
'MID':[r'\mid'],
'TMID':[r'\tmid'],
'ASSIGN':[':='],
'ARROW':[r'\to'],
'BLANK':['_'],
'ALT':['|'],
'PERIOD':['.'],
'COLON':[':'],
'APPLYSUB':[r'\sub'],
'COERCION': [r'\^'],
'LAMBDA':[r'\lambda'],
'PITY':[r'\Pity'],
'QUANTIFIER':[r'\forall',r'\exists'],
'VAR':[ f'{x}{n}' for x in 'b c x y z u v w'.split() for n in range(0,5)],
'WORD':"""estimate equation solution expression inequality random sample
mean pair ordered function evaluate order operation property divisible
exponent base multiple square common prime form factorization point
plane line angle ray parallel intersecting perpendicular regular
polygon degree circle diameter chord similar congruent symmetry
leg triangle scalene equilateral trapezoid rotation transformation
translation polyhedron integer positive opposite value origin
coordinate area circumference word number blah part""".split(),
'ATOMIC_IDENTIFIER':'foo_bar bar3 foo22 sin_ cos_ atan2 ceil_ comb_ fabs_ factorial_ floor_ gcd_ sqrt_ log2 log10 pow_ '.split(),
'HIERARCHICAL_IDENTIFIER':['math.pi','math.ceil','math.abs'],
'FIELD_ACCESSOR':['.assoc','.distrib'],
'UNKNOWN':['?'],
'TEX_ERROR':[r'\error']
}
return ran(d[ty])
def if_types(tys):
def sample():
ty = ran(tys)
return mk_tok(type_sample(ty))
return sample
def all_sample(prs):
def sample():
return [p.sample() for p in prs]
return sample
def first(prs):
def sample():
if not prs:
return None
i = randint(0,len(prs))
return prs[i].sample()
return sample
def first_word(ss):
s = ran(ss.split())
def sample():
return mk_tok(s)
return sample
def word_net_string(wn):
s = ran([k for k in wn])
if not s:
return ''
return s + ' ' + word_net_string(wn[s])
def word_net(wn):
def sample():
s = word_net_string(wn)
return mk_toks(s)
return sample
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
| true
| true
|
f7189aca9151d09325fd0e73daa10f100e064973
| 4,598
|
py
|
Python
|
onpolicy/envs/mpe/scenarios/simple_push.py
|
zoeyuchao/onpolicy-release
|
c2cb64e59c5b1f21cce022db76c378b396fd480e
|
[
"MIT"
] | 1
|
2021-07-04T08:08:30.000Z
|
2021-07-04T08:08:30.000Z
|
onpolicy/envs/mpe/scenarios/simple_push.py
|
zoeyuchao/onpolicy-release
|
c2cb64e59c5b1f21cce022db76c378b396fd480e
|
[
"MIT"
] | 1
|
2021-06-11T15:28:11.000Z
|
2021-06-11T15:28:11.000Z
|
onpolicy/envs/mpe/scenarios/simple_push.py
|
zoeyuchao/onpolicy-release
|
c2cb64e59c5b1f21cce022db76c378b396fd480e
|
[
"MIT"
] | 1
|
2021-05-17T02:00:18.000Z
|
2021-05-17T02:00:18.000Z
|
import numpy as np
from onpolicy.envs.mpe.core import World, Agent, Landmark
from onpolicy.envs.mpe.scenario import BaseScenario
import random
#
# # the non-ensemble version of <ensemble_push>
#
#
class Scenario(BaseScenario):
def make_world(self, args):
world = World()
world.world_length = args.episode_length
# set any world properties first
world.dim_c = 2
num_agents = args.num_agents#2
num_adversaries = 1
num_landmarks = args.num_landmarks#2
# add agents
world.agents = [Agent() for i in range(num_agents)]
for i, agent in enumerate(world.agents):
agent.name = 'agent %d' % i
agent.collide = True
agent.silent = True
if i < num_adversaries:
agent.adversary = True
else:
agent.adversary = False
# agent.u_noise = 1e-1
# agent.c_noise = 1e-1
# add landmarks
world.landmarks = [Landmark() for i in range(num_landmarks)]
for i, landmark in enumerate(world.landmarks):
landmark.name = 'landmark %d' % i
landmark.collide = False
landmark.movable = False
# make initial conditions
self.reset_world(world)
return world
def reset_world(self, world):
# random properties for landmarks
for i, landmark in enumerate(world.landmarks):
landmark.color = np.array([0.1, 0.1, 0.1])
landmark.color[i + 1] += 0.8
landmark.index = i
# set goal landmark
goal = np.random.choice(world.landmarks)
for i, agent in enumerate(world.agents):
agent.goal_a = goal
agent.color = np.array([0.25, 0.25, 0.25])
if agent.adversary:
agent.color = np.array([0.75, 0.25, 0.25])
else:
j = goal.index
agent.color[j + 1] += 0.5
# set random initial states
for agent in world.agents:
agent.state.p_pos = np.random.uniform(-1, +1, world.dim_p)
agent.state.p_vel = np.zeros(world.dim_p)
agent.state.c = np.zeros(world.dim_c)
for i, landmark in enumerate(world.landmarks):
landmark.state.p_pos = 0.8 * np.random.uniform(-1, +1, world.dim_p)
landmark.state.p_vel = np.zeros(world.dim_p)
def reward(self, agent, world):
# Agents are rewarded based on minimum agent distance to each landmark
return self.adversary_reward(agent, world) if agent.adversary else self.agent_reward(agent, world)
def agent_reward(self, agent, world):
# the distance to the goal
return -np.sqrt(np.sum(np.square(agent.state.p_pos - agent.goal_a.state.p_pos)))
def adversary_reward(self, agent, world):
# keep the nearest good agents away from the goal
agent_dist = [np.sqrt(np.sum(np.square(a.state.p_pos - a.goal_a.state.p_pos))) for a in world.agents if not a.adversary]
pos_rew = min(agent_dist)
#nearest_agent = world.good_agents[np.argmin(agent_dist)]
#neg_rew = np.sqrt(np.sum(np.square(nearest_agent.state.p_pos - agent.state.p_pos)))
neg_rew = np.sqrt(np.sum(np.square(agent.goal_a.state.p_pos - agent.state.p_pos)))
#neg_rew = sum([np.sqrt(np.sum(np.square(a.state.p_pos - agent.state.p_pos))) for a in world.good_agents])
return pos_rew - neg_rew
def observation(self, agent, world):
# get positions of all entities in this agent's reference frame
entity_pos = []
for entity in world.landmarks: # world.entities:
entity_pos.append(entity.state.p_pos - agent.state.p_pos)
# entity colors
entity_color = []
for entity in world.landmarks: # world.entities:
entity_color.append(entity.color)
# communication of all other agents
comm = []
other_pos = []
for other in world.agents:
if other is agent: continue
comm.append(other.state.c)
other_pos.append(other.state.p_pos - agent.state.p_pos)
if not agent.adversary:
return np.concatenate([agent.state.p_vel] + [agent.goal_a.state.p_pos - agent.state.p_pos] + [agent.color] + entity_pos + entity_color + other_pos)
else:
#other_pos = list(reversed(other_pos)) if random.uniform(0,1) > 0.5 else other_pos # randomize position of other agents in adversary network
return np.concatenate([agent.state.p_vel] + entity_pos + other_pos)
| 43.377358
| 159
| 0.61157
|
import numpy as np
from onpolicy.envs.mpe.core import World, Agent, Landmark
from onpolicy.envs.mpe.scenario import BaseScenario
import random
e_world(self, args):
world = World()
world.world_length = args.episode_length
world.dim_c = 2
num_agents = args.num_agents
num_adversaries = 1
num_landmarks = args.num_landmarks
world.agents = [Agent() for i in range(num_agents)]
for i, agent in enumerate(world.agents):
agent.name = 'agent %d' % i
agent.collide = True
agent.silent = True
if i < num_adversaries:
agent.adversary = True
else:
agent.adversary = False
world.landmarks = [Landmark() for i in range(num_landmarks)]
for i, landmark in enumerate(world.landmarks):
landmark.name = 'landmark %d' % i
landmark.collide = False
landmark.movable = False
self.reset_world(world)
return world
def reset_world(self, world):
for i, landmark in enumerate(world.landmarks):
landmark.color = np.array([0.1, 0.1, 0.1])
landmark.color[i + 1] += 0.8
landmark.index = i
goal = np.random.choice(world.landmarks)
for i, agent in enumerate(world.agents):
agent.goal_a = goal
agent.color = np.array([0.25, 0.25, 0.25])
if agent.adversary:
agent.color = np.array([0.75, 0.25, 0.25])
else:
j = goal.index
agent.color[j + 1] += 0.5
for agent in world.agents:
agent.state.p_pos = np.random.uniform(-1, +1, world.dim_p)
agent.state.p_vel = np.zeros(world.dim_p)
agent.state.c = np.zeros(world.dim_c)
for i, landmark in enumerate(world.landmarks):
landmark.state.p_pos = 0.8 * np.random.uniform(-1, +1, world.dim_p)
landmark.state.p_vel = np.zeros(world.dim_p)
def reward(self, agent, world):
return self.adversary_reward(agent, world) if agent.adversary else self.agent_reward(agent, world)
def agent_reward(self, agent, world):
return -np.sqrt(np.sum(np.square(agent.state.p_pos - agent.goal_a.state.p_pos)))
def adversary_reward(self, agent, world):
agent_dist = [np.sqrt(np.sum(np.square(a.state.p_pos - a.goal_a.state.p_pos))) for a in world.agents if not a.adversary]
pos_rew = min(agent_dist)
neg_rew = np.sqrt(np.sum(np.square(agent.goal_a.state.p_pos - agent.state.p_pos)))
return pos_rew - neg_rew
def observation(self, agent, world):
entity_pos = []
for entity in world.landmarks: # world.entities:
entity_pos.append(entity.state.p_pos - agent.state.p_pos)
# entity colors
entity_color = []
for entity in world.landmarks: # world.entities:
entity_color.append(entity.color)
# communication of all other agents
comm = []
other_pos = []
for other in world.agents:
if other is agent: continue
comm.append(other.state.c)
other_pos.append(other.state.p_pos - agent.state.p_pos)
if not agent.adversary:
return np.concatenate([agent.state.p_vel] + [agent.goal_a.state.p_pos - agent.state.p_pos] + [agent.color] + entity_pos + entity_color + other_pos)
else:
#other_pos = list(reversed(other_pos)) if random.uniform(0,1) > 0.5 else other_pos # randomize position of other agents in adversary network
return np.concatenate([agent.state.p_vel] + entity_pos + other_pos)
| true
| true
|
f7189bcea8006e2f10ec06aeeee0afb685dda826
| 15,186
|
py
|
Python
|
pysnmp/GBOND-TDIM-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/GBOND-TDIM-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/GBOND-TDIM-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module GBOND-TDIM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/GBOND-TDIM-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:05:16 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection")
gBondMIB, = mibBuilder.importSymbols("GBOND-MIB", "gBondMIB")
InterfaceIndex, ifIndex = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex", "ifIndex")
ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup")
Counter32, Gauge32, Unsigned32, NotificationType, Integer32, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, IpAddress, ObjectIdentity, TimeTicks, Bits, MibIdentifier, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Gauge32", "Unsigned32", "NotificationType", "Integer32", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "IpAddress", "ObjectIdentity", "TimeTicks", "Bits", "MibIdentifier", "Counter64")
TextualConvention, DisplayString, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue")
gBondTdimMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 211, 3))
gBondTdimMIB.setRevisions(('2007-04-29 00:00',))
if mibBuilder.loadTexts: gBondTdimMIB.setLastUpdated('200704290000Z')
if mibBuilder.loadTexts: gBondTdimMIB.setOrganization('IETF ADSL MIB Working Group')
gBondTdimObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 1))
gBondTdimConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 2))
gBondTdimPort = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 1, 1))
class GBondTdimServiceIndex(TextualConvention, Unsigned32):
status = 'current'
displayHint = 'd'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(1, 60)
gBondTdimPortNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 0))
gBondTdimServiceUp = NotificationType((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 0, 1)).setObjects(("GBOND-TDIM-MIB", "gBondTdimServiceIfIdx"), ("GBOND-TDIM-MIB", "gBondTdimServiceOperState"))
if mibBuilder.loadTexts: gBondTdimServiceUp.setStatus('current')
gBondTdimServiceDown = NotificationType((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 0, 2)).setObjects(("GBOND-TDIM-MIB", "gBondTdimServiceIfIdx"), ("GBOND-TDIM-MIB", "gBondTdimServiceOperState"))
if mibBuilder.loadTexts: gBondTdimServiceDown.setStatus('current')
gBondTdimPortConfTable = MibTable((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1), )
if mibBuilder.loadTexts: gBondTdimPortConfTable.setStatus('current')
gBondTdimPortConfEntry = MibTableRow((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: gBondTdimPortConfEntry.setStatus('current')
gBondTdimFecAdminState = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimFecAdminState.setStatus('current')
gBondTdimFecWordSize = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 2), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(20, 255), ))).setUnits('octets').setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimFecWordSize.setStatus('current')
gBondTdimFecRedundancySize = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(2, 2), ValueRangeConstraint(4, 4), ValueRangeConstraint(8, 8), ValueRangeConstraint(16, 16), ValueRangeConstraint(20, 20), ))).setUnits('octets').setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimFecRedundancySize.setStatus('current')
gBondTdimFecInterleaverType = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("block", 1), ("convolution", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimFecInterleaverType.setStatus('current')
gBondTdimFecInterleaverDepth = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 5), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ValueRangeConstraint(2, 2), ValueRangeConstraint(3, 3), ValueRangeConstraint(4, 4), ValueRangeConstraint(6, 6), ValueRangeConstraint(8, 8), ValueRangeConstraint(12, 12), ValueRangeConstraint(16, 16), ValueRangeConstraint(24, 24), ValueRangeConstraint(32, 32), ValueRangeConstraint(48, 48), ValueRangeConstraint(96, 96), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimFecInterleaverDepth.setStatus('current')
gBondTdimServiceUpDownEnable = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 6), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimServiceUpDownEnable.setStatus('current')
gBondTdimPortCapabilityTable = MibTable((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2), )
if mibBuilder.loadTexts: gBondTdimPortCapabilityTable.setStatus('current')
gBondTdimPortCapabilityEntry = MibTableRow((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: gBondTdimPortCapabilityEntry.setStatus('current')
gBondTdimFecSupported = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1, 1), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFecSupported.setStatus('current')
gBondTdimFecMaxWordSize = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1, 2), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(20, 255), ))).setUnits('octets').setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFecMaxWordSize.setStatus('current')
gBondTdimFecMaxRedundancySize = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1, 3), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(2, 2), ValueRangeConstraint(4, 4), ValueRangeConstraint(8, 8), ValueRangeConstraint(16, 16), ValueRangeConstraint(20, 20), ))).setUnits('octets').setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFecMaxRedundancySize.setStatus('current')
gBondTdimFecInterleaverTypeSupported = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("block", 1), ("convolution", 2), ("blockConvolution", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFecInterleaverTypeSupported.setStatus('current')
gBondTdimFecMaxInterleaverDepth = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1, 5), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ValueRangeConstraint(2, 2), ValueRangeConstraint(3, 3), ValueRangeConstraint(4, 4), ValueRangeConstraint(6, 6), ValueRangeConstraint(8, 8), ValueRangeConstraint(12, 12), ValueRangeConstraint(16, 16), ValueRangeConstraint(24, 24), ValueRangeConstraint(32, 32), ValueRangeConstraint(48, 48), ValueRangeConstraint(96, 96), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFecMaxInterleaverDepth.setStatus('current')
gBondTdimPortStatusTable = MibTable((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3), )
if mibBuilder.loadTexts: gBondTdimPortStatusTable.setStatus('current')
gBondTdimPortStatusEntry = MibTableRow((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: gBondTdimPortStatusEntry.setStatus('current')
gBondTdimCrc4Errors = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimCrc4Errors.setStatus('current')
gBondTdimCrc6Errors = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimCrc6Errors.setStatus('current')
gBondTdimCrc8Errors = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimCrc8Errors.setStatus('current')
gBondTdimFltStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3, 1, 4), Bits().clone(namedValues=NamedValues(("serviceDown", 0), ("wrongConfig", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFltStatus.setStatus('current')
gBondTdimServiceTable = MibTable((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4), )
if mibBuilder.loadTexts: gBondTdimServiceTable.setStatus('current')
gBondTdimServiceEntry = MibTableRow((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1), ).setIndexNames((0, "GBOND-TDIM-MIB", "gBondTdimServiceIdx"))
if mibBuilder.loadTexts: gBondTdimServiceEntry.setStatus('current')
gBondTdimServiceIdx = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1, 1), GBondTdimServiceIndex())
if mibBuilder.loadTexts: gBondTdimServiceIdx.setStatus('current')
gBondTdimServiceIfIdx = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1, 2), InterfaceIndex()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimServiceIfIdx.setStatus('current')
gBondTdimServiceType = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("ds1", 0), ("e1", 1), ("nxds0", 2), ("nxe0", 3), ("ds3", 4), ("e3", 5), ("clock", 6), ("ethernet", 7), ("atm", 8), ("gfpNoFCS", 9), ("gfp", 10)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimServiceType.setStatus('current')
gBondTdimServiceSize = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1, 4), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(20, 255), ))).setUnits('octets').setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimServiceSize.setStatus('current')
gBondTdimServiceOperState = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimServiceOperState.setStatus('current')
gBondTdimGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 2, 1))
gBondTdimCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 2, 2))
gBondTdimBasicGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 211, 3, 2, 1, 1)).setObjects(("GBOND-TDIM-MIB", "gBondTdimCrc4Errors"), ("GBOND-TDIM-MIB", "gBondTdimCrc6Errors"), ("GBOND-TDIM-MIB", "gBondTdimCrc8Errors"), ("GBOND-TDIM-MIB", "gBondTdimFecSupported"), ("GBOND-TDIM-MIB", "gBondTdimServiceIfIdx"), ("GBOND-TDIM-MIB", "gBondTdimServiceType"), ("GBOND-TDIM-MIB", "gBondTdimServiceSize"), ("GBOND-TDIM-MIB", "gBondTdimServiceOperState"), ("GBOND-TDIM-MIB", "gBondTdimServiceUpDownEnable"), ("GBOND-TDIM-MIB", "gBondTdimFltStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
gBondTdimBasicGroup = gBondTdimBasicGroup.setStatus('current')
gBondTdimFecGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 211, 3, 2, 1, 2)).setObjects(("GBOND-TDIM-MIB", "gBondTdimFecSupported"), ("GBOND-TDIM-MIB", "gBondTdimFecAdminState"), ("GBOND-TDIM-MIB", "gBondTdimFecWordSize"), ("GBOND-TDIM-MIB", "gBondTdimFecRedundancySize"), ("GBOND-TDIM-MIB", "gBondTdimFecInterleaverType"), ("GBOND-TDIM-MIB", "gBondTdimFecInterleaverDepth"), ("GBOND-TDIM-MIB", "gBondTdimFecMaxWordSize"), ("GBOND-TDIM-MIB", "gBondTdimFecMaxRedundancySize"), ("GBOND-TDIM-MIB", "gBondTdimFecInterleaverTypeSupported"), ("GBOND-TDIM-MIB", "gBondTdimFecMaxInterleaverDepth"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
gBondTdimFecGroup = gBondTdimFecGroup.setStatus('current')
gBondTdimAlarmConfGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 211, 3, 2, 1, 3)).setObjects(("GBOND-TDIM-MIB", "gBondTdimServiceUpDownEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
gBondTdimAlarmConfGroup = gBondTdimAlarmConfGroup.setStatus('current')
gBondTdimNotificationGroup = NotificationGroup((1, 3, 6, 1, 2, 1, 211, 3, 2, 1, 4)).setObjects(("GBOND-TDIM-MIB", "gBondTdimServiceUp"), ("GBOND-TDIM-MIB", "gBondTdimServiceDown"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
gBondTdimNotificationGroup = gBondTdimNotificationGroup.setStatus('current')
gBondTdimCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 211, 3, 2, 2, 1)).setObjects(("GBOND-TDIM-MIB", "gBondTdimBasicGroup"), ("GBOND-TDIM-MIB", "gBondTdimAlarmConfGroup"), ("GBOND-TDIM-MIB", "gBondTdimNotificationGroup"), ("GBOND-TDIM-MIB", "gBondTdimFecGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
gBondTdimCompliance = gBondTdimCompliance.setStatus('current')
mibBuilder.exportSymbols("GBOND-TDIM-MIB", gBondTdimFecInterleaverTypeSupported=gBondTdimFecInterleaverTypeSupported, gBondTdimPortConfTable=gBondTdimPortConfTable, gBondTdimFecMaxRedundancySize=gBondTdimFecMaxRedundancySize, gBondTdimServiceUp=gBondTdimServiceUp, gBondTdimPortConfEntry=gBondTdimPortConfEntry, gBondTdimPortCapabilityEntry=gBondTdimPortCapabilityEntry, GBondTdimServiceIndex=GBondTdimServiceIndex, gBondTdimNotificationGroup=gBondTdimNotificationGroup, gBondTdimServiceTable=gBondTdimServiceTable, gBondTdimFecGroup=gBondTdimFecGroup, gBondTdimServiceIdx=gBondTdimServiceIdx, gBondTdimFecMaxInterleaverDepth=gBondTdimFecMaxInterleaverDepth, gBondTdimServiceType=gBondTdimServiceType, gBondTdimFltStatus=gBondTdimFltStatus, gBondTdimServiceUpDownEnable=gBondTdimServiceUpDownEnable, gBondTdimFecSupported=gBondTdimFecSupported, gBondTdimServiceSize=gBondTdimServiceSize, gBondTdimFecMaxWordSize=gBondTdimFecMaxWordSize, gBondTdimPort=gBondTdimPort, gBondTdimFecInterleaverDepth=gBondTdimFecInterleaverDepth, gBondTdimMIB=gBondTdimMIB, gBondTdimConformance=gBondTdimConformance, gBondTdimGroups=gBondTdimGroups, gBondTdimCrc8Errors=gBondTdimCrc8Errors, gBondTdimBasicGroup=gBondTdimBasicGroup, gBondTdimPortCapabilityTable=gBondTdimPortCapabilityTable, gBondTdimServiceEntry=gBondTdimServiceEntry, gBondTdimAlarmConfGroup=gBondTdimAlarmConfGroup, gBondTdimPortStatusEntry=gBondTdimPortStatusEntry, gBondTdimFecInterleaverType=gBondTdimFecInterleaverType, gBondTdimObjects=gBondTdimObjects, gBondTdimPortStatusTable=gBondTdimPortStatusTable, gBondTdimServiceIfIdx=gBondTdimServiceIfIdx, gBondTdimServiceDown=gBondTdimServiceDown, PYSNMP_MODULE_ID=gBondTdimMIB, gBondTdimCompliance=gBondTdimCompliance, gBondTdimCompliances=gBondTdimCompliances, gBondTdimFecRedundancySize=gBondTdimFecRedundancySize, gBondTdimFecWordSize=gBondTdimFecWordSize, gBondTdimCrc4Errors=gBondTdimCrc4Errors, gBondTdimCrc6Errors=gBondTdimCrc6Errors, gBondTdimServiceOperState=gBondTdimServiceOperState, gBondTdimPortNotifications=gBondTdimPortNotifications, gBondTdimFecAdminState=gBondTdimFecAdminState)
| 140.611111
| 2,094
| 0.766298
|
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection")
gBondMIB, = mibBuilder.importSymbols("GBOND-MIB", "gBondMIB")
InterfaceIndex, ifIndex = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex", "ifIndex")
ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup")
Counter32, Gauge32, Unsigned32, NotificationType, Integer32, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, IpAddress, ObjectIdentity, TimeTicks, Bits, MibIdentifier, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Gauge32", "Unsigned32", "NotificationType", "Integer32", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "IpAddress", "ObjectIdentity", "TimeTicks", "Bits", "MibIdentifier", "Counter64")
TextualConvention, DisplayString, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue")
gBondTdimMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 211, 3))
gBondTdimMIB.setRevisions(('2007-04-29 00:00',))
if mibBuilder.loadTexts: gBondTdimMIB.setLastUpdated('200704290000Z')
if mibBuilder.loadTexts: gBondTdimMIB.setOrganization('IETF ADSL MIB Working Group')
gBondTdimObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 1))
gBondTdimConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 2))
gBondTdimPort = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 1, 1))
class GBondTdimServiceIndex(TextualConvention, Unsigned32):
status = 'current'
displayHint = 'd'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(1, 60)
gBondTdimPortNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 0))
gBondTdimServiceUp = NotificationType((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 0, 1)).setObjects(("GBOND-TDIM-MIB", "gBondTdimServiceIfIdx"), ("GBOND-TDIM-MIB", "gBondTdimServiceOperState"))
if mibBuilder.loadTexts: gBondTdimServiceUp.setStatus('current')
gBondTdimServiceDown = NotificationType((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 0, 2)).setObjects(("GBOND-TDIM-MIB", "gBondTdimServiceIfIdx"), ("GBOND-TDIM-MIB", "gBondTdimServiceOperState"))
if mibBuilder.loadTexts: gBondTdimServiceDown.setStatus('current')
gBondTdimPortConfTable = MibTable((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1), )
if mibBuilder.loadTexts: gBondTdimPortConfTable.setStatus('current')
gBondTdimPortConfEntry = MibTableRow((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: gBondTdimPortConfEntry.setStatus('current')
gBondTdimFecAdminState = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimFecAdminState.setStatus('current')
gBondTdimFecWordSize = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 2), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(20, 255), ))).setUnits('octets').setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimFecWordSize.setStatus('current')
gBondTdimFecRedundancySize = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(2, 2), ValueRangeConstraint(4, 4), ValueRangeConstraint(8, 8), ValueRangeConstraint(16, 16), ValueRangeConstraint(20, 20), ))).setUnits('octets').setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimFecRedundancySize.setStatus('current')
gBondTdimFecInterleaverType = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("block", 1), ("convolution", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimFecInterleaverType.setStatus('current')
gBondTdimFecInterleaverDepth = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 5), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ValueRangeConstraint(2, 2), ValueRangeConstraint(3, 3), ValueRangeConstraint(4, 4), ValueRangeConstraint(6, 6), ValueRangeConstraint(8, 8), ValueRangeConstraint(12, 12), ValueRangeConstraint(16, 16), ValueRangeConstraint(24, 24), ValueRangeConstraint(32, 32), ValueRangeConstraint(48, 48), ValueRangeConstraint(96, 96), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimFecInterleaverDepth.setStatus('current')
gBondTdimServiceUpDownEnable = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 1, 1, 6), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimServiceUpDownEnable.setStatus('current')
gBondTdimPortCapabilityTable = MibTable((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2), )
if mibBuilder.loadTexts: gBondTdimPortCapabilityTable.setStatus('current')
gBondTdimPortCapabilityEntry = MibTableRow((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: gBondTdimPortCapabilityEntry.setStatus('current')
gBondTdimFecSupported = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1, 1), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFecSupported.setStatus('current')
gBondTdimFecMaxWordSize = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1, 2), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(20, 255), ))).setUnits('octets').setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFecMaxWordSize.setStatus('current')
gBondTdimFecMaxRedundancySize = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1, 3), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(2, 2), ValueRangeConstraint(4, 4), ValueRangeConstraint(8, 8), ValueRangeConstraint(16, 16), ValueRangeConstraint(20, 20), ))).setUnits('octets').setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFecMaxRedundancySize.setStatus('current')
gBondTdimFecInterleaverTypeSupported = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("block", 1), ("convolution", 2), ("blockConvolution", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFecInterleaverTypeSupported.setStatus('current')
gBondTdimFecMaxInterleaverDepth = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 2, 1, 5), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ValueRangeConstraint(2, 2), ValueRangeConstraint(3, 3), ValueRangeConstraint(4, 4), ValueRangeConstraint(6, 6), ValueRangeConstraint(8, 8), ValueRangeConstraint(12, 12), ValueRangeConstraint(16, 16), ValueRangeConstraint(24, 24), ValueRangeConstraint(32, 32), ValueRangeConstraint(48, 48), ValueRangeConstraint(96, 96), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFecMaxInterleaverDepth.setStatus('current')
gBondTdimPortStatusTable = MibTable((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3), )
if mibBuilder.loadTexts: gBondTdimPortStatusTable.setStatus('current')
gBondTdimPortStatusEntry = MibTableRow((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: gBondTdimPortStatusEntry.setStatus('current')
gBondTdimCrc4Errors = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimCrc4Errors.setStatus('current')
gBondTdimCrc6Errors = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimCrc6Errors.setStatus('current')
gBondTdimCrc8Errors = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimCrc8Errors.setStatus('current')
gBondTdimFltStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 3, 1, 4), Bits().clone(namedValues=NamedValues(("serviceDown", 0), ("wrongConfig", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimFltStatus.setStatus('current')
gBondTdimServiceTable = MibTable((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4), )
if mibBuilder.loadTexts: gBondTdimServiceTable.setStatus('current')
gBondTdimServiceEntry = MibTableRow((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1), ).setIndexNames((0, "GBOND-TDIM-MIB", "gBondTdimServiceIdx"))
if mibBuilder.loadTexts: gBondTdimServiceEntry.setStatus('current')
gBondTdimServiceIdx = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1, 1), GBondTdimServiceIndex())
if mibBuilder.loadTexts: gBondTdimServiceIdx.setStatus('current')
gBondTdimServiceIfIdx = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1, 2), InterfaceIndex()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimServiceIfIdx.setStatus('current')
gBondTdimServiceType = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("ds1", 0), ("e1", 1), ("nxds0", 2), ("nxe0", 3), ("ds3", 4), ("e3", 5), ("clock", 6), ("ethernet", 7), ("atm", 8), ("gfpNoFCS", 9), ("gfp", 10)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimServiceType.setStatus('current')
gBondTdimServiceSize = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1, 4), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(20, 255), ))).setUnits('octets').setMaxAccess("readwrite")
if mibBuilder.loadTexts: gBondTdimServiceSize.setStatus('current')
gBondTdimServiceOperState = MibTableColumn((1, 3, 6, 1, 2, 1, 211, 3, 1, 1, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gBondTdimServiceOperState.setStatus('current')
gBondTdimGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 2, 1))
gBondTdimCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 211, 3, 2, 2))
gBondTdimBasicGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 211, 3, 2, 1, 1)).setObjects(("GBOND-TDIM-MIB", "gBondTdimCrc4Errors"), ("GBOND-TDIM-MIB", "gBondTdimCrc6Errors"), ("GBOND-TDIM-MIB", "gBondTdimCrc8Errors"), ("GBOND-TDIM-MIB", "gBondTdimFecSupported"), ("GBOND-TDIM-MIB", "gBondTdimServiceIfIdx"), ("GBOND-TDIM-MIB", "gBondTdimServiceType"), ("GBOND-TDIM-MIB", "gBondTdimServiceSize"), ("GBOND-TDIM-MIB", "gBondTdimServiceOperState"), ("GBOND-TDIM-MIB", "gBondTdimServiceUpDownEnable"), ("GBOND-TDIM-MIB", "gBondTdimFltStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
gBondTdimBasicGroup = gBondTdimBasicGroup.setStatus('current')
gBondTdimFecGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 211, 3, 2, 1, 2)).setObjects(("GBOND-TDIM-MIB", "gBondTdimFecSupported"), ("GBOND-TDIM-MIB", "gBondTdimFecAdminState"), ("GBOND-TDIM-MIB", "gBondTdimFecWordSize"), ("GBOND-TDIM-MIB", "gBondTdimFecRedundancySize"), ("GBOND-TDIM-MIB", "gBondTdimFecInterleaverType"), ("GBOND-TDIM-MIB", "gBondTdimFecInterleaverDepth"), ("GBOND-TDIM-MIB", "gBondTdimFecMaxWordSize"), ("GBOND-TDIM-MIB", "gBondTdimFecMaxRedundancySize"), ("GBOND-TDIM-MIB", "gBondTdimFecInterleaverTypeSupported"), ("GBOND-TDIM-MIB", "gBondTdimFecMaxInterleaverDepth"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
gBondTdimFecGroup = gBondTdimFecGroup.setStatus('current')
gBondTdimAlarmConfGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 211, 3, 2, 1, 3)).setObjects(("GBOND-TDIM-MIB", "gBondTdimServiceUpDownEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
gBondTdimAlarmConfGroup = gBondTdimAlarmConfGroup.setStatus('current')
gBondTdimNotificationGroup = NotificationGroup((1, 3, 6, 1, 2, 1, 211, 3, 2, 1, 4)).setObjects(("GBOND-TDIM-MIB", "gBondTdimServiceUp"), ("GBOND-TDIM-MIB", "gBondTdimServiceDown"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
gBondTdimNotificationGroup = gBondTdimNotificationGroup.setStatus('current')
gBondTdimCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 211, 3, 2, 2, 1)).setObjects(("GBOND-TDIM-MIB", "gBondTdimBasicGroup"), ("GBOND-TDIM-MIB", "gBondTdimAlarmConfGroup"), ("GBOND-TDIM-MIB", "gBondTdimNotificationGroup"), ("GBOND-TDIM-MIB", "gBondTdimFecGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
gBondTdimCompliance = gBondTdimCompliance.setStatus('current')
mibBuilder.exportSymbols("GBOND-TDIM-MIB", gBondTdimFecInterleaverTypeSupported=gBondTdimFecInterleaverTypeSupported, gBondTdimPortConfTable=gBondTdimPortConfTable, gBondTdimFecMaxRedundancySize=gBondTdimFecMaxRedundancySize, gBondTdimServiceUp=gBondTdimServiceUp, gBondTdimPortConfEntry=gBondTdimPortConfEntry, gBondTdimPortCapabilityEntry=gBondTdimPortCapabilityEntry, GBondTdimServiceIndex=GBondTdimServiceIndex, gBondTdimNotificationGroup=gBondTdimNotificationGroup, gBondTdimServiceTable=gBondTdimServiceTable, gBondTdimFecGroup=gBondTdimFecGroup, gBondTdimServiceIdx=gBondTdimServiceIdx, gBondTdimFecMaxInterleaverDepth=gBondTdimFecMaxInterleaverDepth, gBondTdimServiceType=gBondTdimServiceType, gBondTdimFltStatus=gBondTdimFltStatus, gBondTdimServiceUpDownEnable=gBondTdimServiceUpDownEnable, gBondTdimFecSupported=gBondTdimFecSupported, gBondTdimServiceSize=gBondTdimServiceSize, gBondTdimFecMaxWordSize=gBondTdimFecMaxWordSize, gBondTdimPort=gBondTdimPort, gBondTdimFecInterleaverDepth=gBondTdimFecInterleaverDepth, gBondTdimMIB=gBondTdimMIB, gBondTdimConformance=gBondTdimConformance, gBondTdimGroups=gBondTdimGroups, gBondTdimCrc8Errors=gBondTdimCrc8Errors, gBondTdimBasicGroup=gBondTdimBasicGroup, gBondTdimPortCapabilityTable=gBondTdimPortCapabilityTable, gBondTdimServiceEntry=gBondTdimServiceEntry, gBondTdimAlarmConfGroup=gBondTdimAlarmConfGroup, gBondTdimPortStatusEntry=gBondTdimPortStatusEntry, gBondTdimFecInterleaverType=gBondTdimFecInterleaverType, gBondTdimObjects=gBondTdimObjects, gBondTdimPortStatusTable=gBondTdimPortStatusTable, gBondTdimServiceIfIdx=gBondTdimServiceIfIdx, gBondTdimServiceDown=gBondTdimServiceDown, PYSNMP_MODULE_ID=gBondTdimMIB, gBondTdimCompliance=gBondTdimCompliance, gBondTdimCompliances=gBondTdimCompliances, gBondTdimFecRedundancySize=gBondTdimFecRedundancySize, gBondTdimFecWordSize=gBondTdimFecWordSize, gBondTdimCrc4Errors=gBondTdimCrc4Errors, gBondTdimCrc6Errors=gBondTdimCrc6Errors, gBondTdimServiceOperState=gBondTdimServiceOperState, gBondTdimPortNotifications=gBondTdimPortNotifications, gBondTdimFecAdminState=gBondTdimFecAdminState)
| true
| true
|
f7189c274d57bd65a3dbaf3a87aaaf696023de37
| 56,687
|
py
|
Python
|
src/_pytest/fixtures.py
|
blueyed/pytest
|
2b52e24a9fe013a043c36e3df3d62b4b4f6348f1
|
[
"MIT"
] | 3
|
2019-11-26T02:30:12.000Z
|
2020-04-15T17:49:07.000Z
|
src/_pytest/fixtures.py
|
blueyed/pytest
|
2b52e24a9fe013a043c36e3df3d62b4b4f6348f1
|
[
"MIT"
] | 59
|
2019-10-22T04:34:22.000Z
|
2021-11-27T18:23:11.000Z
|
src/_pytest/fixtures.py
|
blueyed/pytest
|
2b52e24a9fe013a043c36e3df3d62b4b4f6348f1
|
[
"MIT"
] | 1
|
2019-11-14T16:47:19.000Z
|
2019-11-14T16:47:19.000Z
|
import functools
import inspect
import itertools
import sys
import warnings
from collections import defaultdict
from collections import deque
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
import attr
import py.path
import _pytest
from _pytest._code.code import FormattedExcinfo
from _pytest._code.code import TerminalRepr
from _pytest._code.source import getfslineno
from _pytest.compat import _format_args
from _pytest.compat import _PytestWrapper
from _pytest.compat import get_real_func
from _pytest.compat import get_real_method
from _pytest.compat import getfuncargnames
from _pytest.compat import getimfunc
from _pytest.compat import getlocation
from _pytest.compat import is_generator
from _pytest.compat import NOTSET
from _pytest.compat import order_preserving_dict
from _pytest.compat import safe_getattr
from _pytest.compat import TYPE_CHECKING
from _pytest.deprecated import FIXTURE_POSITIONAL_ARGUMENTS
from _pytest.deprecated import FUNCARGNAMES
from _pytest.mark import ParameterSet
from _pytest.outcomes import fail
from _pytest.outcomes import TEST_OUTCOME
if TYPE_CHECKING:
from typing import Type
from typing_extensions import Literal
from _pytest import nodes
from _pytest._io import TerminalWriter
from _pytest.main import Session
from _pytest.runner import _RuntestPhase
_Scope = Literal["session", "package", "module", "class", "function"]
@attr.s(frozen=True)
class PseudoFixtureDef:
cached_result = attr.ib()
scope = attr.ib()
def pytest_sessionstart(session: "Session"):
import _pytest.python
import _pytest.nodes
scopename2class.update(
{
"package": _pytest.python.Package,
"class": _pytest.python.Class,
"module": _pytest.python.Module,
"function": _pytest.nodes.Item,
"session": _pytest.main.Session,
}
)
session._fixturemanager = FixtureManager(session)
scopename2class = {} # type: Dict[str, Type[nodes.Node]]
scope2props = dict(session=()) # type: Dict[str, Tuple[str, ...]]
scope2props["package"] = ("fspath",)
scope2props["module"] = ("fspath", "module")
scope2props["class"] = scope2props["module"] + ("cls",)
scope2props["instance"] = scope2props["class"] + ("instance",)
scope2props["function"] = scope2props["instance"] + ("function", "keywords")
def scopeproperty(name=None, doc=None):
def decoratescope(func):
scopename = name or func.__name__
def provide(self):
if func.__name__ in scope2props[self.scope]:
return func(self)
raise AttributeError(
"{} not available in {}-scoped context".format(scopename, self.scope)
)
return property(provide, None, None, func.__doc__)
return decoratescope
def get_scope_package(node, fixturedef):
import pytest
cls = pytest.Package
current = node
fixture_package_name = "{}/{}".format(fixturedef.baseid, "__init__.py")
while current and (
type(current) is not cls or fixture_package_name != current.nodeid
):
current = current.parent
if current is None:
return node.session
return current
def get_scope_node(node, scope):
cls = scopename2class.get(scope)
if cls is None:
raise ValueError("unknown scope")
return node.getparent(cls)
def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager):
# this function will transform all collected calls to a functions
# if they use direct funcargs (i.e. direct parametrization)
# because we want later test execution to be able to rely on
# an existing FixtureDef structure for all arguments.
# XXX we can probably avoid this algorithm if we modify CallSpec2
# to directly care for creating the fixturedefs within its methods.
if not metafunc._calls[0].funcargs:
return # this function call does not have direct parametrization
# collect funcargs of all callspecs into a list of values
arg2params = {}
arg2scope = {}
for callspec in metafunc._calls:
for argname, argvalue in callspec.funcargs.items():
assert argname not in callspec.params
callspec.params[argname] = argvalue
arg2params_list = arg2params.setdefault(argname, [])
callspec.indices[argname] = len(arg2params_list)
arg2params_list.append(argvalue)
if argname not in arg2scope:
scopenum = callspec._arg2scopenum.get(argname, scopenum_function)
arg2scope[argname] = scopes[scopenum]
callspec.funcargs.clear()
# register artificial FixtureDef's so that later at test execution
# time we can rely on a proper FixtureDef to exist for fixture setup.
arg2fixturedefs = metafunc._arg2fixturedefs
for argname, valuelist in arg2params.items():
# if we have a scope that is higher than function we need
# to make sure we only ever create an according fixturedef on
# a per-scope basis. We thus store and cache the fixturedef on the
# node related to the scope.
scope = arg2scope[argname]
node = None
if scope != "function":
node = get_scope_node(collector, scope)
if node is None:
assert scope == "class" and isinstance(collector, _pytest.python.Module)
# use module-level collector for class-scope (for now)
node = collector
if node and argname in node._name2pseudofixturedef:
arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]]
else:
fixturedef = FixtureDef(
fixturemanager,
"",
argname,
get_direct_param_fixture_func,
arg2scope[argname],
valuelist,
False,
False,
)
arg2fixturedefs[argname] = [fixturedef]
if node is not None:
node._name2pseudofixturedef[argname] = fixturedef
def getfixturemarker(obj):
""" return fixturemarker or None if it doesn't exist or raised
exceptions."""
try:
return getattr(obj, "_pytestfixturefunction", None)
except TEST_OUTCOME:
# some objects raise errors like request (from flask import request)
# we don't expect them to be fixture functions
return None
def get_parametrized_fixture_keys(item, scopenum):
""" return list of keys for all parametrized arguments which match
the specified scope. """
assert scopenum < scopenum_function # function
try:
cs = item.callspec
except AttributeError:
pass
else:
# cs.indices.items() is random order of argnames. Need to
# sort this so that different calls to
# get_parametrized_fixture_keys will be deterministic.
for argname, param_index in sorted(cs.indices.items()):
if cs._arg2scopenum[argname] != scopenum:
continue
if scopenum == 0: # session
key = (argname, param_index)
elif scopenum == 1: # package
key = (argname, param_index, item.fspath.dirpath())
elif scopenum == 2: # module
key = (argname, param_index, item.fspath)
elif scopenum == 3: # class
key = (argname, param_index, item.fspath, item.cls)
yield key
# algorithm for sorting on a per-parametrized resource setup basis
# it is called for scopenum==0 (session) first and performs sorting
# down to the lower scopes such as to minimize number of "high scope"
# setups and teardowns
def reorder_items(items):
argkeys_cache = {}
items_by_argkey = {}
for scopenum in range(0, scopenum_function):
argkeys_cache[scopenum] = d = {}
items_by_argkey[scopenum] = item_d = defaultdict(deque)
for item in items:
keys = order_preserving_dict.fromkeys(
get_parametrized_fixture_keys(item, scopenum)
)
if keys:
d[item] = keys
for key in keys:
item_d[key].append(item)
items = order_preserving_dict.fromkeys(items)
return list(reorder_items_atscope(items, argkeys_cache, items_by_argkey, 0))
def fix_cache_order(item, argkeys_cache, items_by_argkey):
for scopenum in range(0, scopenum_function):
for key in argkeys_cache[scopenum].get(item, []):
items_by_argkey[scopenum][key].appendleft(item)
def reorder_items_atscope(items, argkeys_cache, items_by_argkey, scopenum):
if scopenum >= scopenum_function or len(items) < 3:
return items
ignore = set()
items_deque = deque(items)
items_done = order_preserving_dict()
scoped_items_by_argkey = items_by_argkey[scopenum]
scoped_argkeys_cache = argkeys_cache[scopenum]
while items_deque:
no_argkey_group = order_preserving_dict()
slicing_argkey = None
while items_deque:
item = items_deque.popleft()
if item in items_done or item in no_argkey_group:
continue
argkeys = order_preserving_dict.fromkeys(
k for k in scoped_argkeys_cache.get(item, []) if k not in ignore
)
if not argkeys:
no_argkey_group[item] = None
else:
slicing_argkey, _ = argkeys.popitem()
# we don't have to remove relevant items from later in the deque because they'll just be ignored
matching_items = [
i for i in scoped_items_by_argkey[slicing_argkey] if i in items
]
for i in reversed(matching_items):
fix_cache_order(i, argkeys_cache, items_by_argkey)
items_deque.appendleft(i)
break
if no_argkey_group:
no_argkey_group = reorder_items_atscope(
no_argkey_group, argkeys_cache, items_by_argkey, scopenum + 1
)
for item in no_argkey_group:
items_done[item] = None
ignore.add(slicing_argkey)
return items_done
def fillfixtures(function):
""" fill missing funcargs for a test function. """
try:
request = function._request
except AttributeError:
# XXX this special code path is only expected to execute
# with the oejskit plugin. It uses classes with funcargs
# and we thus have to work a bit to allow this.
fm = function.session._fixturemanager
fi = fm.getfixtureinfo(function.parent, function.obj, None)
function._fixtureinfo = fi
request = function._request = FixtureRequest(function)
request._fillfixtures()
# prune out funcargs for jstests
newfuncargs = {}
for name in fi.argnames:
newfuncargs[name] = function.funcargs[name]
function.funcargs = newfuncargs
else:
request._fillfixtures()
def get_direct_param_fixture_func(request):
return request.param
@attr.s(slots=True)
class FuncFixtureInfo:
# original function argument names
argnames = attr.ib(type=tuple)
# argnames that function immediately requires. These include argnames +
# fixture names specified via usefixtures and via autouse=True in fixture
# definitions.
initialnames = attr.ib(type=tuple)
names_closure = attr.ib() # type: List[str]
name2fixturedefs = attr.ib() # type: Dict[str, List[FixtureDef]]
def prune_dependency_tree(self):
"""Recompute names_closure from initialnames and name2fixturedefs
Can only reduce names_closure, which means that the new closure will
always be a subset of the old one. The order is preserved.
This method is needed because direct parametrization may shadow some
of the fixtures that were included in the originally built dependency
tree. In this way the dependency tree can get pruned, and the closure
of argnames may get reduced.
"""
closure = set()
working_set = set(self.initialnames)
while working_set:
argname = working_set.pop()
# argname may be smth not included in the original names_closure,
# in which case we ignore it. This currently happens with pseudo
# FixtureDefs which wrap 'get_direct_param_fixture_func(request)'.
# So they introduce the new dependency 'request' which might have
# been missing in the original tree (closure).
if argname not in closure and argname in self.names_closure:
closure.add(argname)
if argname in self.name2fixturedefs:
working_set.update(self.name2fixturedefs[argname][-1].argnames)
self.names_closure[:] = sorted(closure, key=self.names_closure.index)
class FixtureRequest:
""" A request for a fixture from a test or fixture function.
A request object gives access to the requesting test context
and has an optional ``param`` attribute in case
the fixture is parametrized indirectly.
"""
def __init__(self, pyfuncitem):
self._pyfuncitem = pyfuncitem
#: fixture for which this request is being performed
self.fixturename = None
#: Scope string, one of "function", "class", "module", "session"
self.scope = "function"
self._fixture_defs = {} # type: Dict[str, FixtureDef]
fixtureinfo = pyfuncitem._fixtureinfo
self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy()
self._arg2index = {}
self._fixturemanager = pyfuncitem.session._fixturemanager
self._phase = None # type: Optional[_RuntestPhase]
@property
def fixturenames(self):
"""names of all active fixtures in this request"""
result = list(self._pyfuncitem._fixtureinfo.names_closure)
result.extend(set(self._fixture_defs).difference(result))
return result
@property
def funcargnames(self):
""" alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
warnings.warn(FUNCARGNAMES, stacklevel=2)
return self.fixturenames
@property
def node(self):
""" underlying collection node (depends on current request scope)"""
return self._getscopeitem(self.scope)
def _getnextfixturedef(self, argname):
fixturedefs = self._arg2fixturedefs.get(argname, None)
if fixturedefs is None:
# we arrive here because of a dynamic call to
# getfixturevalue(argname) usage which was naturally
# not known at parsing/collection time
parentid = self._pyfuncitem.parent.nodeid
fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid)
self._arg2fixturedefs[argname] = fixturedefs
# fixturedefs list is immutable so we maintain a decreasing index
index = self._arg2index.get(argname, 0) - 1
if fixturedefs is None or (-index > len(fixturedefs)):
raise FixtureLookupError(argname, self)
self._arg2index[argname] = index
return fixturedefs[index]
@property
def config(self):
""" the pytest config object associated with this request. """
return self._pyfuncitem.config
@scopeproperty()
def function(self):
""" test function object if the request has a per-function scope. """
return self._pyfuncitem.obj
@scopeproperty("class")
def cls(self):
""" class (can be None) where the test function was collected. """
clscol = self._pyfuncitem.getparent(_pytest.python.Class)
if clscol:
return clscol.obj
@property
def instance(self):
""" instance (can be None) on which test function was collected. """
# unittest support hack, see _pytest.unittest.TestCaseFunction
try:
return self._pyfuncitem._testcase
except AttributeError:
function = getattr(self, "function", None)
return getattr(function, "__self__", None)
@scopeproperty()
def module(self):
""" python module object where the test function was collected. """
return self._pyfuncitem.getparent(_pytest.python.Module).obj
@scopeproperty()
def fspath(self) -> py.path.local:
""" the file system path of the test module which collected this test. """
return self._pyfuncitem.fspath # type: ignore[no-any-return]
@property
def keywords(self):
""" keywords/markers dictionary for the underlying node. """
return self.node.keywords
@property
def session(self):
""" pytest session object. """
return self._pyfuncitem.session
def addfinalizer(self, finalizer):
""" add finalizer/teardown function to be called after the
last test within the requesting test context finished
execution. """
# XXX usually this method is shadowed by fixturedef specific ones
self._addfinalizer(finalizer, scope=self.scope)
def _addfinalizer(self, finalizer, scope):
colitem = self._getscopeitem(scope)
self._pyfuncitem.session._setupstate.addfinalizer(
finalizer=finalizer, colitem=colitem
)
def applymarker(self, marker):
""" Apply a marker to a single test function invocation.
This method is useful if you don't want to have a keyword/marker
on all function invocations.
:arg marker: a :py:class:`_pytest.mark.MarkDecorator` object
created by a call to ``pytest.mark.NAME(...)``.
"""
self.node.add_marker(marker)
def raiseerror(self, msg):
""" raise a FixtureLookupError with the given message. """
raise self._fixturemanager.FixtureLookupError(None, self, msg)
def _fillfixtures(self):
item = self._pyfuncitem
fixturenames = getattr(item, "fixturenames", self.fixturenames)
for argname in fixturenames:
if argname not in item.funcargs:
item.funcargs[argname] = self.getfixturevalue(argname)
def getfixturevalue(self, argname):
""" Dynamically run a named fixture function.
Declaring fixtures via function argument is recommended where possible.
But if you can only decide whether to use another fixture at test
setup time, you may use this function to retrieve it inside a fixture
or test function body.
"""
return self._get_active_fixturedef(argname).cached_result[0]
def _get_active_fixturedef(self, argname):
try:
return self._fixture_defs[argname]
except KeyError:
try:
fixturedef = self._getnextfixturedef(argname)
except FixtureLookupError:
if argname == "request":
cached_result = (self, [0], None)
scope = "function"
return PseudoFixtureDef(cached_result, scope)
raise
# remove indent to prevent the python3 exception
# from leaking into the call
self._compute_fixture_value(fixturedef)
self._fixture_defs[argname] = fixturedef
return fixturedef
def _get_fixturestack(self):
current = self
values = []
while 1:
fixturedef = getattr(current, "_fixturedef", None)
if fixturedef is None:
values.reverse()
return values
values.append(fixturedef)
current = current._parent_request
def _compute_fixture_value(self, fixturedef: "FixtureDef") -> None:
"""
Creates a SubRequest based on "self" and calls the execute method of the given fixturedef object. This will
force the FixtureDef object to throw away any previous results and compute a new fixture value, which
will be stored into the FixtureDef object itself.
"""
# prepare a subrequest object before calling fixture function
# (latter managed by fixturedef)
argname = fixturedef.argname
funcitem = self._pyfuncitem
scope = fixturedef.scope
try:
param = funcitem.callspec.getparam(argname)
except (AttributeError, ValueError):
param = NOTSET
param_index = 0
has_params = fixturedef.params is not None
fixtures_not_supported = getattr(funcitem, "nofuncargs", False)
if has_params and fixtures_not_supported:
msg = (
"{name} does not support fixtures, maybe unittest.TestCase subclass?\n"
"Node id: {nodeid}\n"
"Function type: {typename}"
).format(
name=funcitem.name,
nodeid=funcitem.nodeid,
typename=type(funcitem).__name__,
)
fail(msg, pytrace=False)
if has_params:
frame = inspect.stack()[3]
frameinfo = inspect.getframeinfo(frame[0])
source_path = py.path.local(frameinfo.filename)
source_lineno = frameinfo.lineno
rel_source_path = source_path.relto(funcitem.config.rootdir)
if rel_source_path:
source_path_str = rel_source_path
else:
source_path_str = str(source_path)
msg = (
"The requested fixture has no parameter defined for test:\n"
" {}\n\n"
"Requested fixture '{}' defined in:\n{}"
"\n\nRequested here:\n{}:{}".format(
funcitem.nodeid,
fixturedef.argname,
getlocation(fixturedef.func, funcitem.config.rootdir),
source_path_str,
source_lineno,
)
)
fail(msg, pytrace=False)
else:
param_index = funcitem.callspec.indices[argname]
# if a parametrize invocation set a scope it will override
# the static scope defined with the fixture function
paramscopenum = funcitem.callspec._arg2scopenum.get(argname)
if paramscopenum is not None:
scope = scopes[paramscopenum]
subrequest = SubRequest(self, scope, param, param_index, fixturedef)
# check if a higher-level scoped fixture accesses a lower level one
subrequest._check_scope(argname, self.scope, scope)
try:
# call the fixture function
fixturedef.execute(request=subrequest)
finally:
self._schedule_finalizers(fixturedef, subrequest)
def _schedule_finalizers(self, fixturedef, subrequest):
# if fixture function failed it might have registered finalizers
self.session._setupstate.addfinalizer(
functools.partial(fixturedef.finish, request=subrequest), subrequest.node
)
def _check_scope(self, argname, invoking_scope, requested_scope):
if argname == "request":
return
if scopemismatch(invoking_scope, requested_scope):
# try to report something helpful
lines = self._factorytraceback()
fail(
"ScopeMismatch: You tried to access the %r scoped "
"fixture %r with a %r scoped request object, "
"involved factories\n%s"
% ((requested_scope, argname, invoking_scope, "\n".join(lines))),
pytrace=False,
)
def _factorytraceback(self):
lines = []
for fixturedef in self._get_fixturestack():
factory = fixturedef.func
fs, lineno = getfslineno(factory)
p = self._pyfuncitem.session.fspath.bestrelpath(fs)
args = _format_args(factory)
lines.append("%s:%d: def %s%s" % (p, lineno + 1, factory.__name__, args))
return lines
def _getscopeitem(self, scope):
if scope == "function":
# this might also be a non-function Item despite its attribute name
return self._pyfuncitem
if scope == "package":
node = get_scope_package(self._pyfuncitem, self._fixturedef)
else:
node = get_scope_node(self._pyfuncitem, scope)
if node is None and scope == "class":
# fallback to function item itself
node = self._pyfuncitem
assert node, 'Could not obtain a node for scope "{}" for function {!r}'.format(
scope, self._pyfuncitem
)
return node
def __repr__(self):
return "<FixtureRequest for {!r} _phase={}>".format(self.node, self._phase)
class SubRequest(FixtureRequest):
""" a sub request for handling getting a fixture from a
test function/fixture. """
def __init__(
self,
request: "FixtureRequest",
scope: "_Scope",
param,
param_index: int,
fixturedef: "FixtureDef",
) -> None:
self._parent_request = request
self.fixturename = fixturedef.argname
if param is not NOTSET:
self.param = param
self.param_index = param_index
self.scope = scope
self._fixturedef = fixturedef
self._pyfuncitem = request._pyfuncitem
self._fixture_defs = request._fixture_defs
self._arg2fixturedefs = request._arg2fixturedefs
self._arg2index = request._arg2index
self._fixturemanager = request._fixturemanager
def __repr__(self):
return "<SubRequest {!r} for {!r} _phase={}>".format(
self.fixturename, self._pyfuncitem, self._phase
)
@property
def _phase(self) -> "Optional[_RuntestPhase]": # type: ignore[override]
return self._parent_request._phase
def addfinalizer(self, finalizer):
self._fixturedef.addfinalizer(finalizer)
def _schedule_finalizers(self, fixturedef, subrequest):
# if the executing fixturedef was not explicitly requested in the argument list (via
# getfixturevalue inside the fixture call) then ensure this fixture def will be finished
# first
if fixturedef.argname not in self.fixturenames:
fixturedef.addfinalizer(
functools.partial(self._fixturedef.finish, request=self)
)
super()._schedule_finalizers(fixturedef, subrequest)
scopes = "session package module class function".split()
scopenum_function = scopes.index("function")
def scopemismatch(currentscope, newscope):
return scopes.index(newscope) > scopes.index(currentscope)
def scope2index(scope, descr, where=None):
"""Look up the index of ``scope`` and raise a descriptive value error
if not defined.
"""
try:
return scopes.index(scope)
except ValueError:
fail(
"{} {}got an unexpected scope value '{}'".format(
descr, "from {} ".format(where) if where else "", scope
),
pytrace=False,
)
class FixtureLookupError(LookupError):
""" could not return a requested Fixture (missing or invalid). """
def __init__(self, argname, request, msg=None):
self.argname = argname
self.request = request
self.fixturestack = request._get_fixturestack()
self.msg = msg
def formatrepr(self) -> "FixtureLookupErrorRepr":
tblines = [] # type: List[str]
addline = tblines.append
stack = [self.request._pyfuncitem.obj]
stack.extend(map(lambda x: x.func, self.fixturestack))
msg = self.msg
if msg is not None:
# the last fixture raise an error, let's present
# it at the requesting side
stack = stack[:-1]
for function in stack:
fspath, lineno = getfslineno(function)
try:
lines, _ = inspect.getsourcelines(get_real_func(function))
except (OSError, IndexError, TypeError):
error_msg = "file %s, line %s: source code not available"
addline(error_msg % (fspath, lineno + 1))
else:
addline("file {}, line {}".format(fspath, lineno + 1))
for i, line in enumerate(lines):
line = line.rstrip()
addline(" " + line)
if line.lstrip().startswith("def"):
break
if msg is None:
fm = self.request._fixturemanager
available = set()
parentid = self.request._pyfuncitem.parent.nodeid
for name, fixturedefs in fm._arg2fixturedefs.items():
faclist = list(fm._matchfactories(fixturedefs, parentid))
if faclist:
available.add(name)
if self.argname in available:
msg = " recursive dependency involving fixture '{}' detected".format(
self.argname
)
else:
msg = "fixture '{}' not found".format(self.argname)
msg += "\n available fixtures: {}".format(", ".join(sorted(available)))
msg += "\n use 'pytest --fixtures [testpath]' for help on them."
return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname)
class FixtureLookupErrorRepr(TerminalRepr):
def __init__(self, filename, firstlineno, tblines, errorstring, argname):
self.tblines = tblines
self.errorstring = errorstring
self.filename = filename
self.firstlineno = firstlineno
self.argname = argname
def toterminal(self, tw: "TerminalWriter") -> None:
# tw.line("FixtureLookupError: %s" %(self.argname), red=True)
for tbline in self.tblines:
tw.line(tbline.rstrip())
lines = self.errorstring.split("\n")
if lines:
tw.line(
"{} {}".format(FormattedExcinfo.fail_marker, lines[0].strip()),
red=True,
)
for line in lines[1:]:
tw.line(
"{} {}".format(FormattedExcinfo.flow_marker, line.strip()),
red=True,
)
tw.line()
tw.line("%s:%d" % (self.filename, self.firstlineno + 1))
def fail_fixturefunc(fixturefunc, msg):
fs, lineno = getfslineno(fixturefunc)
location = "{}:{}".format(fs, lineno + 1)
source = _pytest._code.Source(fixturefunc)
fail(msg + ":\n\n" + str(source.indent()) + "\n" + location, pytrace=False)
def call_fixture_func(fixturefunc, request, kwargs):
yieldctx = is_generator(fixturefunc)
if yieldctx:
it = fixturefunc(**kwargs)
res = next(it)
finalizer = functools.partial(_teardown_yield_fixture, fixturefunc, it)
request.addfinalizer(finalizer)
else:
res = fixturefunc(**kwargs)
return res
def _teardown_yield_fixture(fixturefunc, it):
"""Executes the teardown of a fixture function by advancing the iterator after the
yield and ensure the iteration ends (if not it means there is more than one yield in the function)"""
try:
next(it)
except StopIteration:
pass
else:
fail_fixturefunc(
fixturefunc, "yield_fixture function has more than one 'yield'"
)
def _eval_scope_callable(scope_callable, fixture_name, config):
try:
result = scope_callable(fixture_name=fixture_name, config=config)
except Exception:
raise TypeError(
"Error evaluating {} while defining fixture '{}'.\n"
"Expected a function with the signature (*, fixture_name, config)".format(
scope_callable, fixture_name
)
)
if not isinstance(result, str):
fail(
"Expected {} to return a 'str' while defining fixture '{}', but it returned:\n"
"{!r}".format(scope_callable, fixture_name, result),
pytrace=False,
)
return result
class FixtureDef:
""" A container for a factory definition. """
def __init__(
self,
fixturemanager,
baseid,
argname,
func,
scope,
params,
unittest=False,
ids=None,
):
self._fixturemanager = fixturemanager
self.baseid = baseid or ""
self.has_location = baseid is not None
self.func = func
self.argname = argname
if callable(scope):
scope = _eval_scope_callable(scope, argname, fixturemanager.config)
self.scope = scope
self.scopenum = scope2index(
scope or "function",
descr="Fixture '{}'".format(func.__name__),
where=baseid,
)
self.params = params
self.argnames = getfuncargnames(func, name=argname, is_method=unittest)
self.unittest = unittest
self.ids = ids
self.cached_result = None
self._finalizers = []
def addfinalizer(self, finalizer):
self._finalizers.append(finalizer)
def finish(self, request):
exc = None
try:
while self._finalizers:
try:
func = self._finalizers.pop()
func()
except BaseException as e:
# XXX Only first exception will be seen by user,
# ideally all should be reported.
if exc is None:
exc = e
if exc:
raise exc
finally:
hook = self._fixturemanager.session.gethookproxy(request.node.fspath)
hook.pytest_fixture_post_finalizer(fixturedef=self, request=request)
# even if finalization fails, we invalidate
# the cached fixture value and remove
# all finalizers because they may be bound methods which will
# keep instances alive
self.cached_result = None
self._finalizers = []
def execute(self, request):
# get required arguments and register our own finish()
# with their finalization
for argname in self.argnames:
fixturedef = request._get_active_fixturedef(argname)
if argname != "request":
fixturedef.addfinalizer(functools.partial(self.finish, request=request))
my_cache_key = self.cache_key(request)
if self.cached_result is not None:
result, cache_key, err = self.cached_result
# note: comparison with `==` can fail (or be expensive) for e.g.
# numpy arrays (#6497)
if my_cache_key is cache_key:
if err is not None:
_, val, tb = err
raise val.with_traceback(tb)
else:
return result
# we have a previous but differently parametrized fixture instance
# so we need to tear it down before creating a new one
self.finish(request)
assert self.cached_result is None
hook = self._fixturemanager.session.gethookproxy(request.node.fspath)
return hook.pytest_fixture_setup(fixturedef=self, request=request)
def cache_key(self, request):
return request.param_index if not hasattr(request, "param") else request.param
def __repr__(self):
return "<FixtureDef argname={!r} scope={!r} baseid={!r}>".format(
self.argname, self.scope, self.baseid
)
def resolve_fixture_function(fixturedef, request):
"""Gets the actual callable that can be called to obtain the fixture value, dealing with unittest-specific
instances and bound methods.
"""
fixturefunc = fixturedef.func
if fixturedef.unittest:
if request.instance is not None:
# bind the unbound method to the TestCase instance
fixturefunc = fixturedef.func.__get__(request.instance)
else:
# the fixture function needs to be bound to the actual
# request.instance so that code working with "fixturedef" behaves
# as expected.
if request.instance is not None:
# handle the case where fixture is defined not in a test class, but some other class
# (for example a plugin class with a fixture), see #2270
if hasattr(fixturefunc, "__self__") and not isinstance(
request.instance, fixturefunc.__self__.__class__
):
return fixturefunc
fixturefunc = getimfunc(fixturedef.func)
if fixturefunc != fixturedef.func:
fixturefunc = fixturefunc.__get__(request.instance)
return fixturefunc
def pytest_fixture_setup(fixturedef, request):
""" Execution of fixture setup. """
kwargs = {}
for argname in fixturedef.argnames:
fixdef = request._get_active_fixturedef(argname)
assert fixdef.cached_result is not None
result, arg_cache_key, exc = fixdef.cached_result
request._check_scope(argname, request.scope, fixdef.scope)
kwargs[argname] = result
fixturefunc = resolve_fixture_function(fixturedef, request)
my_cache_key = fixturedef.cache_key(request)
try:
result = call_fixture_func(fixturefunc, request, kwargs)
except TEST_OUTCOME:
fixturedef.cached_result = (None, my_cache_key, sys.exc_info())
raise
fixturedef.cached_result = (result, my_cache_key, None)
return result
def _ensure_immutable_ids(ids):
if ids is None:
return
if callable(ids):
return ids
return tuple(ids)
def wrap_function_to_error_out_if_called_directly(function, fixture_marker):
"""Wrap the given fixture function so we can raise an error about it being called directly,
instead of used as an argument in a test function.
"""
message = (
'Fixture "{name}" called directly. Fixtures are not meant to be called directly,\n'
"but are created automatically when test functions request them as parameters.\n"
"See https://docs.pytest.org/en/latest/fixture.html for more information about fixtures, and\n"
"https://docs.pytest.org/en/latest/deprecations.html#calling-fixtures-directly about how to update your code."
).format(name=fixture_marker.name or function.__name__)
@functools.wraps(function)
def result(*args, **kwargs):
fail(message, pytrace=False)
# keep reference to the original function in our own custom attribute so we don't unwrap
# further than this point and lose useful wrappings like @mock.patch (#3774)
result.__pytest_wrapped__ = _PytestWrapper(function)
return result
@attr.s(frozen=True)
class FixtureFunctionMarker:
scope = attr.ib()
params = attr.ib(
type=Optional[Tuple[object, ...]],
converter=attr.converters.optional(tuple),
)
autouse = attr.ib(default=False)
# Ignore type because of https://github.com/python/mypy/issues/6172.
ids = attr.ib(default=None, converter=_ensure_immutable_ids) # type: ignore
name = attr.ib(default=None)
def __call__(self, function):
if inspect.isclass(function):
raise ValueError("class fixtures not supported (maybe in the future)")
if getattr(function, "_pytestfixturefunction", False):
raise ValueError(
"fixture is being applied more than once to the same function"
)
function = wrap_function_to_error_out_if_called_directly(function, self)
name = self.name or function.__name__
if name == "request":
location = getlocation(function)
fail(
"'request' is a reserved word for fixtures, use another name:\n {}".format(
location
),
pytrace=False,
)
function._pytestfixturefunction = self
return function
FIXTURE_ARGS_ORDER = ("scope", "params", "autouse", "ids", "name")
def _parse_fixture_args(callable_or_scope, *args, **kwargs):
arguments = {
"scope": "function",
"params": None,
"autouse": False,
"ids": None,
"name": None,
}
kwargs = {
key: value for key, value in kwargs.items() if arguments.get(key) != value
}
fixture_function = None
if isinstance(callable_or_scope, str):
args = list(args)
args.insert(0, callable_or_scope)
else:
fixture_function = callable_or_scope
positionals = set()
for positional, argument_name in zip(args, FIXTURE_ARGS_ORDER):
arguments[argument_name] = positional
positionals.add(argument_name)
duplicated_kwargs = {kwarg for kwarg in kwargs.keys() if kwarg in positionals}
if duplicated_kwargs:
raise TypeError(
"The fixture arguments are defined as positional and keyword: {}. "
"Use only keyword arguments.".format(", ".join(duplicated_kwargs))
)
if positionals:
warnings.warn(FIXTURE_POSITIONAL_ARGUMENTS, stacklevel=2)
arguments.update(kwargs)
return fixture_function, arguments
def fixture(
callable_or_scope=None,
*args,
scope="function",
params=None,
autouse=False,
ids=None,
name=None
):
"""Decorator to mark a fixture factory function.
This decorator can be used, with or without parameters, to define a
fixture function.
The name of the fixture function can later be referenced to cause its
invocation ahead of running tests: test
modules or classes can use the ``pytest.mark.usefixtures(fixturename)``
marker.
Test functions can directly use fixture names as input
arguments in which case the fixture instance returned from the fixture
function will be injected.
Fixtures can provide their values to test functions using ``return`` or ``yield``
statements. When using ``yield`` the code block after the ``yield`` statement is executed
as teardown code regardless of the test outcome, and must yield exactly once.
:arg scope: the scope for which this fixture is shared, one of
``"function"`` (default), ``"class"``, ``"module"``,
``"package"`` or ``"session"`` (``"package"`` is considered **experimental**
at this time).
This parameter may also be a callable which receives ``(fixture_name, config)``
as parameters, and must return a ``str`` with one of the values mentioned above.
See :ref:`dynamic scope` in the docs for more information.
:arg params: an optional list of parameters which will cause multiple
invocations of the fixture function and all of the tests
using it.
The current parameter is available in ``request.param``.
:arg autouse: if True, the fixture func is activated for all tests that
can see it. If False (the default) then an explicit
reference is needed to activate the fixture.
:arg ids: list of string ids each corresponding to the params
so that they are part of the test id. If no ids are provided
they will be generated automatically from the params.
:arg name: the name of the fixture. This defaults to the name of the
decorated function. If a fixture is used in the same module in
which it is defined, the function name of the fixture will be
shadowed by the function arg that requests the fixture; one way
to resolve this is to name the decorated function
``fixture_<fixturename>`` and then use
``@pytest.fixture(name='<fixturename>')``.
"""
if params is not None:
params = list(params)
fixture_function, arguments = _parse_fixture_args(
callable_or_scope,
*args,
scope=scope,
params=params,
autouse=autouse,
ids=ids,
name=name,
)
scope = arguments.get("scope")
params = arguments.get("params")
autouse = arguments.get("autouse")
ids = arguments.get("ids")
name = arguments.get("name")
if fixture_function and params is None and autouse is False:
# direct decoration
return FixtureFunctionMarker(scope, params, autouse, name=name)(
fixture_function
)
return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name)
def yield_fixture(
callable_or_scope=None,
*args,
scope="function",
params=None,
autouse=False,
ids=None,
name=None
):
""" (return a) decorator to mark a yield-fixture factory function.
.. deprecated:: 3.0
Use :py:func:`pytest.fixture` directly instead.
"""
return fixture(
callable_or_scope,
*args,
scope=scope,
params=params,
autouse=autouse,
ids=ids,
name=name,
)
defaultfuncargprefixmarker = fixture()
@fixture(scope="session")
def pytestconfig(request):
"""Session-scoped fixture that returns the :class:`_pytest.config.Config` object.
Example::
def test_foo(pytestconfig):
if pytestconfig.getoption("verbose") > 0:
...
"""
return request.config
def pytest_addoption(parser):
parser.addini(
"usefixtures",
type="args",
default=[],
help="list of default fixtures to be used with this project",
)
class FixtureManager:
"""
pytest fixtures definitions and information is stored and managed
from this class.
During collection fm.parsefactories() is called multiple times to parse
fixture function definitions into FixtureDef objects and internal
data structures.
During collection of test functions, metafunc-mechanics instantiate
a FuncFixtureInfo object which is cached per node/func-name.
This FuncFixtureInfo object is later retrieved by Function nodes
which themselves offer a fixturenames attribute.
The FuncFixtureInfo object holds information about fixtures and FixtureDefs
relevant for a particular function. An initial list of fixtures is
assembled like this:
- ini-defined usefixtures
- autouse-marked fixtures along the collection chain up from the function
- usefixtures markers at module/class/function level
- test function funcargs
Subsequently the funcfixtureinfo.fixturenames attribute is computed
as the closure of the fixtures needed to setup the initial fixtures,
i. e. fixtures needed by fixture functions themselves are appended
to the fixturenames list.
Upon the test-setup phases all fixturenames are instantiated, retrieved
by a lookup of their FuncFixtureInfo.
"""
FixtureLookupError = FixtureLookupError
FixtureLookupErrorRepr = FixtureLookupErrorRepr
def __init__(self, session):
self.session = session
self.config = session.config
self._arg2fixturedefs = {}
self._holderobjseen = set()
self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))]
session.config.pluginmanager.register(self, "funcmanage")
def _get_direct_parametrize_args(self, node):
"""This function returns all the direct parametrization
arguments of a node, so we don't mistake them for fixtures
Check https://github.com/pytest-dev/pytest/issues/5036
This things are done later as well when dealing with parametrization
so this could be improved
"""
parametrize_argnames = []
for marker in node.iter_markers(name="parametrize"):
if not marker.kwargs.get("indirect", False):
try:
p_argnames, _ = ParameterSet._parse_parametrize_args(
*marker.args, **marker.kwargs
)
except TypeError:
pass
else:
parametrize_argnames.extend(p_argnames)
return parametrize_argnames
def getfixtureinfo(self, node, func, cls, funcargs=True):
if funcargs and not getattr(node, "nofuncargs", False):
argnames = getfuncargnames(func, name=node.name, cls=cls)
else:
argnames = ()
usefixtures = itertools.chain.from_iterable(
mark.args for mark in node.iter_markers(name="usefixtures")
)
initialnames = tuple(usefixtures) + argnames
fm = node.session._fixturemanager
initialnames, names_closure, arg2fixturedefs = fm.getfixtureclosure(
initialnames, node, ignore_args=self._get_direct_parametrize_args(node)
)
return FuncFixtureInfo(argnames, initialnames, names_closure, arg2fixturedefs)
def pytest_plugin_registered(self, plugin):
nodeid = None
try:
p = py.path.local(plugin.__file__).realpath()
except AttributeError:
pass
else:
from _pytest import nodes
# construct the base nodeid which is later used to check
# what fixtures are visible for particular tests (as denoted
# by their test id)
if p.basename.startswith("conftest.py"):
nodeid = p.dirpath().relto(self.config.rootdir)
if p.sep != nodes.SEP:
nodeid = nodeid.replace(p.sep, nodes.SEP)
self.parsefactories(plugin, nodeid)
def _getautousenames(self, nodeid):
""" return a tuple of fixture names to be used. """
autousenames = []
for baseid, basenames in self._nodeid_and_autousenames:
if nodeid.startswith(baseid):
if baseid:
i = len(baseid)
nextchar = nodeid[i : i + 1]
if nextchar and nextchar not in ":/":
continue
autousenames.extend(basenames)
return autousenames
def getfixtureclosure(self, fixturenames, parentnode, ignore_args=()):
# collect the closure of all fixtures , starting with the given
# fixturenames as the initial set. As we have to visit all
# factory definitions anyway, we also return an arg2fixturedefs
# mapping so that the caller can reuse it and does not have
# to re-discover fixturedefs again for each fixturename
# (discovering matching fixtures for a given name/node is expensive)
parentid = parentnode.nodeid
fixturenames_closure = self._getautousenames(parentid)
def merge(otherlist):
for arg in otherlist:
if arg not in fixturenames_closure:
fixturenames_closure.append(arg)
merge(fixturenames)
# at this point, fixturenames_closure contains what we call "initialnames",
# which is a set of fixturenames the function immediately requests. We
# need to return it as well, so save this.
initialnames = tuple(fixturenames_closure)
arg2fixturedefs = {}
lastlen = -1
while lastlen != len(fixturenames_closure):
lastlen = len(fixturenames_closure)
for argname in fixturenames_closure:
if argname in ignore_args:
continue
if argname in arg2fixturedefs:
continue
fixturedefs = self.getfixturedefs(argname, parentid)
if fixturedefs:
arg2fixturedefs[argname] = fixturedefs
merge(fixturedefs[-1].argnames)
def sort_by_scope(arg_name):
try:
fixturedefs = arg2fixturedefs[arg_name]
except KeyError:
return scopes.index("function")
else:
return fixturedefs[-1].scopenum
fixturenames_closure.sort(key=sort_by_scope)
return initialnames, fixturenames_closure, arg2fixturedefs
def pytest_generate_tests(self, metafunc):
for argname in metafunc.fixturenames:
faclist = metafunc._arg2fixturedefs.get(argname)
if faclist:
fixturedef = faclist[-1]
if fixturedef.params is not None:
markers = list(metafunc.definition.iter_markers("parametrize"))
for parametrize_mark in markers:
if "argnames" in parametrize_mark.kwargs:
argnames = parametrize_mark.kwargs["argnames"]
else:
argnames = parametrize_mark.args[0]
if not isinstance(argnames, (tuple, list)):
argnames = [
x.strip() for x in argnames.split(",") if x.strip()
]
if argname in argnames:
break
else:
metafunc.parametrize(
argname,
fixturedef.params,
indirect=True,
scope=fixturedef.scope,
ids=fixturedef.ids,
)
else:
continue # will raise FixtureLookupError at setup time
def pytest_collection_modifyitems(self, items):
# separate parametrized setups
items[:] = reorder_items(items)
def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False):
if nodeid is not NOTSET:
holderobj = node_or_obj
else:
holderobj = node_or_obj.obj
nodeid = node_or_obj.nodeid
if holderobj in self._holderobjseen:
return
self._holderobjseen.add(holderobj)
autousenames = []
for name in dir(holderobj):
# The attribute can be an arbitrary descriptor, so the attribute
# access below can raise. safe_getatt() ignores such exceptions.
obj = safe_getattr(holderobj, name, None)
marker = getfixturemarker(obj)
if not isinstance(marker, FixtureFunctionMarker):
# magic globals with __getattr__ might have got us a wrong
# fixture attribute
continue
if marker.name:
name = marker.name
# during fixture definition we wrap the original fixture function
# to issue a warning if called directly, so here we unwrap it in order to not emit the warning
# when pytest itself calls the fixture function
obj = get_real_method(obj, holderobj)
fixture_def = FixtureDef(
self,
nodeid,
name,
obj,
marker.scope,
marker.params,
unittest=unittest,
ids=marker.ids,
)
faclist = self._arg2fixturedefs.setdefault(name, [])
if fixture_def.has_location:
faclist.append(fixture_def)
else:
# fixturedefs with no location are at the front
# so this inserts the current fixturedef after the
# existing fixturedefs from external plugins but
# before the fixturedefs provided in conftests.
i = len([f for f in faclist if not f.has_location])
faclist.insert(i, fixture_def)
if marker.autouse:
autousenames.append(name)
if autousenames:
self._nodeid_and_autousenames.append((nodeid or "", autousenames))
def getfixturedefs(self, argname, nodeid):
"""
Gets a list of fixtures which are applicable to the given node id.
:param str argname: name of the fixture to search for
:param str nodeid: full node id of the requesting test.
:return: list[FixtureDef]
"""
try:
fixturedefs = self._arg2fixturedefs[argname]
except KeyError:
return None
return tuple(self._matchfactories(fixturedefs, nodeid))
def _matchfactories(self, fixturedefs, nodeid):
from _pytest import nodes
for fixturedef in fixturedefs:
if nodes.ischildnode(fixturedef.baseid, nodeid):
yield fixturedef
| 37.590849
| 118
| 0.622647
|
import functools
import inspect
import itertools
import sys
import warnings
from collections import defaultdict
from collections import deque
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
import attr
import py.path
import _pytest
from _pytest._code.code import FormattedExcinfo
from _pytest._code.code import TerminalRepr
from _pytest._code.source import getfslineno
from _pytest.compat import _format_args
from _pytest.compat import _PytestWrapper
from _pytest.compat import get_real_func
from _pytest.compat import get_real_method
from _pytest.compat import getfuncargnames
from _pytest.compat import getimfunc
from _pytest.compat import getlocation
from _pytest.compat import is_generator
from _pytest.compat import NOTSET
from _pytest.compat import order_preserving_dict
from _pytest.compat import safe_getattr
from _pytest.compat import TYPE_CHECKING
from _pytest.deprecated import FIXTURE_POSITIONAL_ARGUMENTS
from _pytest.deprecated import FUNCARGNAMES
from _pytest.mark import ParameterSet
from _pytest.outcomes import fail
from _pytest.outcomes import TEST_OUTCOME
if TYPE_CHECKING:
from typing import Type
from typing_extensions import Literal
from _pytest import nodes
from _pytest._io import TerminalWriter
from _pytest.main import Session
from _pytest.runner import _RuntestPhase
_Scope = Literal["session", "package", "module", "class", "function"]
@attr.s(frozen=True)
class PseudoFixtureDef:
cached_result = attr.ib()
scope = attr.ib()
def pytest_sessionstart(session: "Session"):
import _pytest.python
import _pytest.nodes
scopename2class.update(
{
"package": _pytest.python.Package,
"class": _pytest.python.Class,
"module": _pytest.python.Module,
"function": _pytest.nodes.Item,
"session": _pytest.main.Session,
}
)
session._fixturemanager = FixtureManager(session)
scopename2class = {}
scope2props = dict(session=())
scope2props["package"] = ("fspath",)
scope2props["module"] = ("fspath", "module")
scope2props["class"] = scope2props["module"] + ("cls",)
scope2props["instance"] = scope2props["class"] + ("instance",)
scope2props["function"] = scope2props["instance"] + ("function", "keywords")
def scopeproperty(name=None, doc=None):
def decoratescope(func):
scopename = name or func.__name__
def provide(self):
if func.__name__ in scope2props[self.scope]:
return func(self)
raise AttributeError(
"{} not available in {}-scoped context".format(scopename, self.scope)
)
return property(provide, None, None, func.__doc__)
return decoratescope
def get_scope_package(node, fixturedef):
import pytest
cls = pytest.Package
current = node
fixture_package_name = "{}/{}".format(fixturedef.baseid, "__init__.py")
while current and (
type(current) is not cls or fixture_package_name != current.nodeid
):
current = current.parent
if current is None:
return node.session
return current
def get_scope_node(node, scope):
cls = scopename2class.get(scope)
if cls is None:
raise ValueError("unknown scope")
return node.getparent(cls)
def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager):
if not metafunc._calls[0].funcargs:
return
arg2params = {}
arg2scope = {}
for callspec in metafunc._calls:
for argname, argvalue in callspec.funcargs.items():
assert argname not in callspec.params
callspec.params[argname] = argvalue
arg2params_list = arg2params.setdefault(argname, [])
callspec.indices[argname] = len(arg2params_list)
arg2params_list.append(argvalue)
if argname not in arg2scope:
scopenum = callspec._arg2scopenum.get(argname, scopenum_function)
arg2scope[argname] = scopes[scopenum]
callspec.funcargs.clear()
# time we can rely on a proper FixtureDef to exist for fixture setup.
arg2fixturedefs = metafunc._arg2fixturedefs
for argname, valuelist in arg2params.items():
# if we have a scope that is higher than function we need
# to make sure we only ever create an according fixturedef on
# a per-scope basis. We thus store and cache the fixturedef on the
# node related to the scope.
scope = arg2scope[argname]
node = None
if scope != "function":
node = get_scope_node(collector, scope)
if node is None:
assert scope == "class" and isinstance(collector, _pytest.python.Module)
# use module-level collector for class-scope (for now)
node = collector
if node and argname in node._name2pseudofixturedef:
arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]]
else:
fixturedef = FixtureDef(
fixturemanager,
"",
argname,
get_direct_param_fixture_func,
arg2scope[argname],
valuelist,
False,
False,
)
arg2fixturedefs[argname] = [fixturedef]
if node is not None:
node._name2pseudofixturedef[argname] = fixturedef
def getfixturemarker(obj):
try:
return getattr(obj, "_pytestfixturefunction", None)
except TEST_OUTCOME:
# some objects raise errors like request (from flask import request)
# we don't expect them to be fixture functions
return None
def get_parametrized_fixture_keys(item, scopenum):
assert scopenum < scopenum_function
try:
cs = item.callspec
except AttributeError:
pass
else:
for argname, param_index in sorted(cs.indices.items()):
if cs._arg2scopenum[argname] != scopenum:
continue
if scopenum == 0:
key = (argname, param_index)
elif scopenum == 1:
key = (argname, param_index, item.fspath.dirpath())
elif scopenum == 2:
key = (argname, param_index, item.fspath)
elif scopenum == 3:
key = (argname, param_index, item.fspath, item.cls)
yield key
def reorder_items(items):
argkeys_cache = {}
items_by_argkey = {}
for scopenum in range(0, scopenum_function):
argkeys_cache[scopenum] = d = {}
items_by_argkey[scopenum] = item_d = defaultdict(deque)
for item in items:
keys = order_preserving_dict.fromkeys(
get_parametrized_fixture_keys(item, scopenum)
)
if keys:
d[item] = keys
for key in keys:
item_d[key].append(item)
items = order_preserving_dict.fromkeys(items)
return list(reorder_items_atscope(items, argkeys_cache, items_by_argkey, 0))
def fix_cache_order(item, argkeys_cache, items_by_argkey):
for scopenum in range(0, scopenum_function):
for key in argkeys_cache[scopenum].get(item, []):
items_by_argkey[scopenum][key].appendleft(item)
def reorder_items_atscope(items, argkeys_cache, items_by_argkey, scopenum):
if scopenum >= scopenum_function or len(items) < 3:
return items
ignore = set()
items_deque = deque(items)
items_done = order_preserving_dict()
scoped_items_by_argkey = items_by_argkey[scopenum]
scoped_argkeys_cache = argkeys_cache[scopenum]
while items_deque:
no_argkey_group = order_preserving_dict()
slicing_argkey = None
while items_deque:
item = items_deque.popleft()
if item in items_done or item in no_argkey_group:
continue
argkeys = order_preserving_dict.fromkeys(
k for k in scoped_argkeys_cache.get(item, []) if k not in ignore
)
if not argkeys:
no_argkey_group[item] = None
else:
slicing_argkey, _ = argkeys.popitem()
matching_items = [
i for i in scoped_items_by_argkey[slicing_argkey] if i in items
]
for i in reversed(matching_items):
fix_cache_order(i, argkeys_cache, items_by_argkey)
items_deque.appendleft(i)
break
if no_argkey_group:
no_argkey_group = reorder_items_atscope(
no_argkey_group, argkeys_cache, items_by_argkey, scopenum + 1
)
for item in no_argkey_group:
items_done[item] = None
ignore.add(slicing_argkey)
return items_done
def fillfixtures(function):
try:
request = function._request
except AttributeError:
fm = function.session._fixturemanager
fi = fm.getfixtureinfo(function.parent, function.obj, None)
function._fixtureinfo = fi
request = function._request = FixtureRequest(function)
request._fillfixtures()
newfuncargs = {}
for name in fi.argnames:
newfuncargs[name] = function.funcargs[name]
function.funcargs = newfuncargs
else:
request._fillfixtures()
def get_direct_param_fixture_func(request):
return request.param
@attr.s(slots=True)
class FuncFixtureInfo:
argnames = attr.ib(type=tuple)
initialnames = attr.ib(type=tuple)
names_closure = attr.ib()
name2fixturedefs = attr.ib()
def prune_dependency_tree(self):
closure = set()
working_set = set(self.initialnames)
while working_set:
argname = working_set.pop()
if argname not in closure and argname in self.names_closure:
closure.add(argname)
if argname in self.name2fixturedefs:
working_set.update(self.name2fixturedefs[argname][-1].argnames)
self.names_closure[:] = sorted(closure, key=self.names_closure.index)
class FixtureRequest:
def __init__(self, pyfuncitem):
self._pyfuncitem = pyfuncitem
self.fixturename = None
self.scope = "function"
self._fixture_defs = {}
fixtureinfo = pyfuncitem._fixtureinfo
self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy()
self._arg2index = {}
self._fixturemanager = pyfuncitem.session._fixturemanager
self._phase = None
@property
def fixturenames(self):
result = list(self._pyfuncitem._fixtureinfo.names_closure)
result.extend(set(self._fixture_defs).difference(result))
return result
@property
def funcargnames(self):
warnings.warn(FUNCARGNAMES, stacklevel=2)
return self.fixturenames
@property
def node(self):
return self._getscopeitem(self.scope)
def _getnextfixturedef(self, argname):
fixturedefs = self._arg2fixturedefs.get(argname, None)
if fixturedefs is None:
parentid = self._pyfuncitem.parent.nodeid
fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid)
self._arg2fixturedefs[argname] = fixturedefs
index = self._arg2index.get(argname, 0) - 1
if fixturedefs is None or (-index > len(fixturedefs)):
raise FixtureLookupError(argname, self)
self._arg2index[argname] = index
return fixturedefs[index]
@property
def config(self):
return self._pyfuncitem.config
@scopeproperty()
def function(self):
return self._pyfuncitem.obj
@scopeproperty("class")
def cls(self):
clscol = self._pyfuncitem.getparent(_pytest.python.Class)
if clscol:
return clscol.obj
@property
def instance(self):
try:
return self._pyfuncitem._testcase
except AttributeError:
function = getattr(self, "function", None)
return getattr(function, "__self__", None)
@scopeproperty()
def module(self):
return self._pyfuncitem.getparent(_pytest.python.Module).obj
@scopeproperty()
def fspath(self) -> py.path.local:
return self._pyfuncitem.fspath
@property
def keywords(self):
return self.node.keywords
@property
def session(self):
return self._pyfuncitem.session
def addfinalizer(self, finalizer):
self._addfinalizer(finalizer, scope=self.scope)
def _addfinalizer(self, finalizer, scope):
colitem = self._getscopeitem(scope)
self._pyfuncitem.session._setupstate.addfinalizer(
finalizer=finalizer, colitem=colitem
)
def applymarker(self, marker):
self.node.add_marker(marker)
def raiseerror(self, msg):
raise self._fixturemanager.FixtureLookupError(None, self, msg)
def _fillfixtures(self):
item = self._pyfuncitem
fixturenames = getattr(item, "fixturenames", self.fixturenames)
for argname in fixturenames:
if argname not in item.funcargs:
item.funcargs[argname] = self.getfixturevalue(argname)
def getfixturevalue(self, argname):
return self._get_active_fixturedef(argname).cached_result[0]
def _get_active_fixturedef(self, argname):
try:
return self._fixture_defs[argname]
except KeyError:
try:
fixturedef = self._getnextfixturedef(argname)
except FixtureLookupError:
if argname == "request":
cached_result = (self, [0], None)
scope = "function"
return PseudoFixtureDef(cached_result, scope)
raise
self._compute_fixture_value(fixturedef)
self._fixture_defs[argname] = fixturedef
return fixturedef
def _get_fixturestack(self):
current = self
values = []
while 1:
fixturedef = getattr(current, "_fixturedef", None)
if fixturedef is None:
values.reverse()
return values
values.append(fixturedef)
current = current._parent_request
def _compute_fixture_value(self, fixturedef: "FixtureDef") -> None:
argname = fixturedef.argname
funcitem = self._pyfuncitem
scope = fixturedef.scope
try:
param = funcitem.callspec.getparam(argname)
except (AttributeError, ValueError):
param = NOTSET
param_index = 0
has_params = fixturedef.params is not None
fixtures_not_supported = getattr(funcitem, "nofuncargs", False)
if has_params and fixtures_not_supported:
msg = (
"{name} does not support fixtures, maybe unittest.TestCase subclass?\n"
"Node id: {nodeid}\n"
"Function type: {typename}"
).format(
name=funcitem.name,
nodeid=funcitem.nodeid,
typename=type(funcitem).__name__,
)
fail(msg, pytrace=False)
if has_params:
frame = inspect.stack()[3]
frameinfo = inspect.getframeinfo(frame[0])
source_path = py.path.local(frameinfo.filename)
source_lineno = frameinfo.lineno
rel_source_path = source_path.relto(funcitem.config.rootdir)
if rel_source_path:
source_path_str = rel_source_path
else:
source_path_str = str(source_path)
msg = (
"The requested fixture has no parameter defined for test:\n"
" {}\n\n"
"Requested fixture '{}' defined in:\n{}"
"\n\nRequested here:\n{}:{}".format(
funcitem.nodeid,
fixturedef.argname,
getlocation(fixturedef.func, funcitem.config.rootdir),
source_path_str,
source_lineno,
)
)
fail(msg, pytrace=False)
else:
param_index = funcitem.callspec.indices[argname]
paramscopenum = funcitem.callspec._arg2scopenum.get(argname)
if paramscopenum is not None:
scope = scopes[paramscopenum]
subrequest = SubRequest(self, scope, param, param_index, fixturedef)
subrequest._check_scope(argname, self.scope, scope)
try:
fixturedef.execute(request=subrequest)
finally:
self._schedule_finalizers(fixturedef, subrequest)
def _schedule_finalizers(self, fixturedef, subrequest):
self.session._setupstate.addfinalizer(
functools.partial(fixturedef.finish, request=subrequest), subrequest.node
)
def _check_scope(self, argname, invoking_scope, requested_scope):
if argname == "request":
return
if scopemismatch(invoking_scope, requested_scope):
lines = self._factorytraceback()
fail(
"ScopeMismatch: You tried to access the %r scoped "
"fixture %r with a %r scoped request object, "
"involved factories\n%s"
% ((requested_scope, argname, invoking_scope, "\n".join(lines))),
pytrace=False,
)
def _factorytraceback(self):
lines = []
for fixturedef in self._get_fixturestack():
factory = fixturedef.func
fs, lineno = getfslineno(factory)
p = self._pyfuncitem.session.fspath.bestrelpath(fs)
args = _format_args(factory)
lines.append("%s:%d: def %s%s" % (p, lineno + 1, factory.__name__, args))
return lines
def _getscopeitem(self, scope):
if scope == "function":
return self._pyfuncitem
if scope == "package":
node = get_scope_package(self._pyfuncitem, self._fixturedef)
else:
node = get_scope_node(self._pyfuncitem, scope)
if node is None and scope == "class":
node = self._pyfuncitem
assert node, 'Could not obtain a node for scope "{}" for function {!r}'.format(
scope, self._pyfuncitem
)
return node
def __repr__(self):
return "<FixtureRequest for {!r} _phase={}>".format(self.node, self._phase)
class SubRequest(FixtureRequest):
def __init__(
self,
request: "FixtureRequest",
scope: "_Scope",
param,
param_index: int,
fixturedef: "FixtureDef",
) -> None:
self._parent_request = request
self.fixturename = fixturedef.argname
if param is not NOTSET:
self.param = param
self.param_index = param_index
self.scope = scope
self._fixturedef = fixturedef
self._pyfuncitem = request._pyfuncitem
self._fixture_defs = request._fixture_defs
self._arg2fixturedefs = request._arg2fixturedefs
self._arg2index = request._arg2index
self._fixturemanager = request._fixturemanager
def __repr__(self):
return "<SubRequest {!r} for {!r} _phase={}>".format(
self.fixturename, self._pyfuncitem, self._phase
)
@property
def _phase(self) -> "Optional[_RuntestPhase]":
return self._parent_request._phase
def addfinalizer(self, finalizer):
self._fixturedef.addfinalizer(finalizer)
def _schedule_finalizers(self, fixturedef, subrequest):
if fixturedef.argname not in self.fixturenames:
fixturedef.addfinalizer(
functools.partial(self._fixturedef.finish, request=self)
)
super()._schedule_finalizers(fixturedef, subrequest)
scopes = "session package module class function".split()
scopenum_function = scopes.index("function")
def scopemismatch(currentscope, newscope):
return scopes.index(newscope) > scopes.index(currentscope)
def scope2index(scope, descr, where=None):
try:
return scopes.index(scope)
except ValueError:
fail(
"{} {}got an unexpected scope value '{}'".format(
descr, "from {} ".format(where) if where else "", scope
),
pytrace=False,
)
class FixtureLookupError(LookupError):
def __init__(self, argname, request, msg=None):
self.argname = argname
self.request = request
self.fixturestack = request._get_fixturestack()
self.msg = msg
def formatrepr(self) -> "FixtureLookupErrorRepr":
tblines = []
addline = tblines.append
stack = [self.request._pyfuncitem.obj]
stack.extend(map(lambda x: x.func, self.fixturestack))
msg = self.msg
if msg is not None:
# it at the requesting side
stack = stack[:-1]
for function in stack:
fspath, lineno = getfslineno(function)
try:
lines, _ = inspect.getsourcelines(get_real_func(function))
except (OSError, IndexError, TypeError):
error_msg = "file %s, line %s: source code not available"
addline(error_msg % (fspath, lineno + 1))
else:
addline("file {}, line {}".format(fspath, lineno + 1))
for i, line in enumerate(lines):
line = line.rstrip()
addline(" " + line)
if line.lstrip().startswith("def"):
break
if msg is None:
fm = self.request._fixturemanager
available = set()
parentid = self.request._pyfuncitem.parent.nodeid
for name, fixturedefs in fm._arg2fixturedefs.items():
faclist = list(fm._matchfactories(fixturedefs, parentid))
if faclist:
available.add(name)
if self.argname in available:
msg = " recursive dependency involving fixture '{}' detected".format(
self.argname
)
else:
msg = "fixture '{}' not found".format(self.argname)
msg += "\n available fixtures: {}".format(", ".join(sorted(available)))
msg += "\n use 'pytest --fixtures [testpath]' for help on them."
return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname)
class FixtureLookupErrorRepr(TerminalRepr):
def __init__(self, filename, firstlineno, tblines, errorstring, argname):
self.tblines = tblines
self.errorstring = errorstring
self.filename = filename
self.firstlineno = firstlineno
self.argname = argname
def toterminal(self, tw: "TerminalWriter") -> None:
# tw.line("FixtureLookupError: %s" %(self.argname), red=True)
for tbline in self.tblines:
tw.line(tbline.rstrip())
lines = self.errorstring.split("\n")
if lines:
tw.line(
"{} {}".format(FormattedExcinfo.fail_marker, lines[0].strip()),
red=True,
)
for line in lines[1:]:
tw.line(
"{} {}".format(FormattedExcinfo.flow_marker, line.strip()),
red=True,
)
tw.line()
tw.line("%s:%d" % (self.filename, self.firstlineno + 1))
def fail_fixturefunc(fixturefunc, msg):
fs, lineno = getfslineno(fixturefunc)
location = "{}:{}".format(fs, lineno + 1)
source = _pytest._code.Source(fixturefunc)
fail(msg + ":\n\n" + str(source.indent()) + "\n" + location, pytrace=False)
def call_fixture_func(fixturefunc, request, kwargs):
yieldctx = is_generator(fixturefunc)
if yieldctx:
it = fixturefunc(**kwargs)
res = next(it)
finalizer = functools.partial(_teardown_yield_fixture, fixturefunc, it)
request.addfinalizer(finalizer)
else:
res = fixturefunc(**kwargs)
return res
def _teardown_yield_fixture(fixturefunc, it):
try:
next(it)
except StopIteration:
pass
else:
fail_fixturefunc(
fixturefunc, "yield_fixture function has more than one 'yield'"
)
def _eval_scope_callable(scope_callable, fixture_name, config):
try:
result = scope_callable(fixture_name=fixture_name, config=config)
except Exception:
raise TypeError(
"Error evaluating {} while defining fixture '{}'.\n"
"Expected a function with the signature (*, fixture_name, config)".format(
scope_callable, fixture_name
)
)
if not isinstance(result, str):
fail(
"Expected {} to return a 'str' while defining fixture '{}', but it returned:\n"
"{!r}".format(scope_callable, fixture_name, result),
pytrace=False,
)
return result
class FixtureDef:
def __init__(
self,
fixturemanager,
baseid,
argname,
func,
scope,
params,
unittest=False,
ids=None,
):
self._fixturemanager = fixturemanager
self.baseid = baseid or ""
self.has_location = baseid is not None
self.func = func
self.argname = argname
if callable(scope):
scope = _eval_scope_callable(scope, argname, fixturemanager.config)
self.scope = scope
self.scopenum = scope2index(
scope or "function",
descr="Fixture '{}'".format(func.__name__),
where=baseid,
)
self.params = params
self.argnames = getfuncargnames(func, name=argname, is_method=unittest)
self.unittest = unittest
self.ids = ids
self.cached_result = None
self._finalizers = []
def addfinalizer(self, finalizer):
self._finalizers.append(finalizer)
def finish(self, request):
exc = None
try:
while self._finalizers:
try:
func = self._finalizers.pop()
func()
except BaseException as e:
# XXX Only first exception will be seen by user,
# ideally all should be reported.
if exc is None:
exc = e
if exc:
raise exc
finally:
hook = self._fixturemanager.session.gethookproxy(request.node.fspath)
hook.pytest_fixture_post_finalizer(fixturedef=self, request=request)
# even if finalization fails, we invalidate
# the cached fixture value and remove
# all finalizers because they may be bound methods which will
# keep instances alive
self.cached_result = None
self._finalizers = []
def execute(self, request):
# get required arguments and register our own finish()
# with their finalization
for argname in self.argnames:
fixturedef = request._get_active_fixturedef(argname)
if argname != "request":
fixturedef.addfinalizer(functools.partial(self.finish, request=request))
my_cache_key = self.cache_key(request)
if self.cached_result is not None:
result, cache_key, err = self.cached_result
# note: comparison with `==` can fail (or be expensive) for e.g.
# numpy arrays (#6497)
if my_cache_key is cache_key:
if err is not None:
_, val, tb = err
raise val.with_traceback(tb)
else:
return result
# we have a previous but differently parametrized fixture instance
# so we need to tear it down before creating a new one
self.finish(request)
assert self.cached_result is None
hook = self._fixturemanager.session.gethookproxy(request.node.fspath)
return hook.pytest_fixture_setup(fixturedef=self, request=request)
def cache_key(self, request):
return request.param_index if not hasattr(request, "param") else request.param
def __repr__(self):
return "<FixtureDef argname={!r} scope={!r} baseid={!r}>".format(
self.argname, self.scope, self.baseid
)
def resolve_fixture_function(fixturedef, request):
fixturefunc = fixturedef.func
if fixturedef.unittest:
if request.instance is not None:
# bind the unbound method to the TestCase instance
fixturefunc = fixturedef.func.__get__(request.instance)
else:
# the fixture function needs to be bound to the actual
# request.instance so that code working with "fixturedef" behaves
# as expected.
if request.instance is not None:
# handle the case where fixture is defined not in a test class, but some other class
# (for example a plugin class with a fixture), see #2270
if hasattr(fixturefunc, "__self__") and not isinstance(
request.instance, fixturefunc.__self__.__class__
):
return fixturefunc
fixturefunc = getimfunc(fixturedef.func)
if fixturefunc != fixturedef.func:
fixturefunc = fixturefunc.__get__(request.instance)
return fixturefunc
def pytest_fixture_setup(fixturedef, request):
kwargs = {}
for argname in fixturedef.argnames:
fixdef = request._get_active_fixturedef(argname)
assert fixdef.cached_result is not None
result, arg_cache_key, exc = fixdef.cached_result
request._check_scope(argname, request.scope, fixdef.scope)
kwargs[argname] = result
fixturefunc = resolve_fixture_function(fixturedef, request)
my_cache_key = fixturedef.cache_key(request)
try:
result = call_fixture_func(fixturefunc, request, kwargs)
except TEST_OUTCOME:
fixturedef.cached_result = (None, my_cache_key, sys.exc_info())
raise
fixturedef.cached_result = (result, my_cache_key, None)
return result
def _ensure_immutable_ids(ids):
if ids is None:
return
if callable(ids):
return ids
return tuple(ids)
def wrap_function_to_error_out_if_called_directly(function, fixture_marker):
message = (
'Fixture "{name}" called directly. Fixtures are not meant to be called directly,\n'
"but are created automatically when test functions request them as parameters.\n"
"See https://docs.pytest.org/en/latest/fixture.html for more information about fixtures, and\n"
"https://docs.pytest.org/en/latest/deprecations.html#calling-fixtures-directly about how to update your code."
).format(name=fixture_marker.name or function.__name__)
@functools.wraps(function)
def result(*args, **kwargs):
fail(message, pytrace=False)
# keep reference to the original function in our own custom attribute so we don't unwrap
esult.__pytest_wrapped__ = _PytestWrapper(function)
return result
@attr.s(frozen=True)
class FixtureFunctionMarker:
scope = attr.ib()
params = attr.ib(
type=Optional[Tuple[object, ...]],
converter=attr.converters.optional(tuple),
)
autouse = attr.ib(default=False)
ids = attr.ib(default=None, converter=_ensure_immutable_ids)
name = attr.ib(default=None)
def __call__(self, function):
if inspect.isclass(function):
raise ValueError("class fixtures not supported (maybe in the future)")
if getattr(function, "_pytestfixturefunction", False):
raise ValueError(
"fixture is being applied more than once to the same function"
)
function = wrap_function_to_error_out_if_called_directly(function, self)
name = self.name or function.__name__
if name == "request":
location = getlocation(function)
fail(
"'request' is a reserved word for fixtures, use another name:\n {}".format(
location
),
pytrace=False,
)
function._pytestfixturefunction = self
return function
FIXTURE_ARGS_ORDER = ("scope", "params", "autouse", "ids", "name")
def _parse_fixture_args(callable_or_scope, *args, **kwargs):
arguments = {
"scope": "function",
"params": None,
"autouse": False,
"ids": None,
"name": None,
}
kwargs = {
key: value for key, value in kwargs.items() if arguments.get(key) != value
}
fixture_function = None
if isinstance(callable_or_scope, str):
args = list(args)
args.insert(0, callable_or_scope)
else:
fixture_function = callable_or_scope
positionals = set()
for positional, argument_name in zip(args, FIXTURE_ARGS_ORDER):
arguments[argument_name] = positional
positionals.add(argument_name)
duplicated_kwargs = {kwarg for kwarg in kwargs.keys() if kwarg in positionals}
if duplicated_kwargs:
raise TypeError(
"The fixture arguments are defined as positional and keyword: {}. "
"Use only keyword arguments.".format(", ".join(duplicated_kwargs))
)
if positionals:
warnings.warn(FIXTURE_POSITIONAL_ARGUMENTS, stacklevel=2)
arguments.update(kwargs)
return fixture_function, arguments
def fixture(
callable_or_scope=None,
*args,
scope="function",
params=None,
autouse=False,
ids=None,
name=None
):
if params is not None:
params = list(params)
fixture_function, arguments = _parse_fixture_args(
callable_or_scope,
*args,
scope=scope,
params=params,
autouse=autouse,
ids=ids,
name=name,
)
scope = arguments.get("scope")
params = arguments.get("params")
autouse = arguments.get("autouse")
ids = arguments.get("ids")
name = arguments.get("name")
if fixture_function and params is None and autouse is False:
return FixtureFunctionMarker(scope, params, autouse, name=name)(
fixture_function
)
return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name)
def yield_fixture(
callable_or_scope=None,
*args,
scope="function",
params=None,
autouse=False,
ids=None,
name=None
):
return fixture(
callable_or_scope,
*args,
scope=scope,
params=params,
autouse=autouse,
ids=ids,
name=name,
)
defaultfuncargprefixmarker = fixture()
@fixture(scope="session")
def pytestconfig(request):
return request.config
def pytest_addoption(parser):
parser.addini(
"usefixtures",
type="args",
default=[],
help="list of default fixtures to be used with this project",
)
class FixtureManager:
FixtureLookupError = FixtureLookupError
FixtureLookupErrorRepr = FixtureLookupErrorRepr
def __init__(self, session):
self.session = session
self.config = session.config
self._arg2fixturedefs = {}
self._holderobjseen = set()
self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))]
session.config.pluginmanager.register(self, "funcmanage")
def _get_direct_parametrize_args(self, node):
parametrize_argnames = []
for marker in node.iter_markers(name="parametrize"):
if not marker.kwargs.get("indirect", False):
try:
p_argnames, _ = ParameterSet._parse_parametrize_args(
*marker.args, **marker.kwargs
)
except TypeError:
pass
else:
parametrize_argnames.extend(p_argnames)
return parametrize_argnames
def getfixtureinfo(self, node, func, cls, funcargs=True):
if funcargs and not getattr(node, "nofuncargs", False):
argnames = getfuncargnames(func, name=node.name, cls=cls)
else:
argnames = ()
usefixtures = itertools.chain.from_iterable(
mark.args for mark in node.iter_markers(name="usefixtures")
)
initialnames = tuple(usefixtures) + argnames
fm = node.session._fixturemanager
initialnames, names_closure, arg2fixturedefs = fm.getfixtureclosure(
initialnames, node, ignore_args=self._get_direct_parametrize_args(node)
)
return FuncFixtureInfo(argnames, initialnames, names_closure, arg2fixturedefs)
def pytest_plugin_registered(self, plugin):
nodeid = None
try:
p = py.path.local(plugin.__file__).realpath()
except AttributeError:
pass
else:
from _pytest import nodes
if p.basename.startswith("conftest.py"):
nodeid = p.dirpath().relto(self.config.rootdir)
if p.sep != nodes.SEP:
nodeid = nodeid.replace(p.sep, nodes.SEP)
self.parsefactories(plugin, nodeid)
def _getautousenames(self, nodeid):
autousenames = []
for baseid, basenames in self._nodeid_and_autousenames:
if nodeid.startswith(baseid):
if baseid:
i = len(baseid)
nextchar = nodeid[i : i + 1]
if nextchar and nextchar not in ":/":
continue
autousenames.extend(basenames)
return autousenames
def getfixtureclosure(self, fixturenames, parentnode, ignore_args=()):
parentid = parentnode.nodeid
fixturenames_closure = self._getautousenames(parentid)
def merge(otherlist):
for arg in otherlist:
if arg not in fixturenames_closure:
fixturenames_closure.append(arg)
merge(fixturenames)
initialnames = tuple(fixturenames_closure)
arg2fixturedefs = {}
lastlen = -1
while lastlen != len(fixturenames_closure):
lastlen = len(fixturenames_closure)
for argname in fixturenames_closure:
if argname in ignore_args:
continue
if argname in arg2fixturedefs:
continue
fixturedefs = self.getfixturedefs(argname, parentid)
if fixturedefs:
arg2fixturedefs[argname] = fixturedefs
merge(fixturedefs[-1].argnames)
def sort_by_scope(arg_name):
try:
fixturedefs = arg2fixturedefs[arg_name]
except KeyError:
return scopes.index("function")
else:
return fixturedefs[-1].scopenum
fixturenames_closure.sort(key=sort_by_scope)
return initialnames, fixturenames_closure, arg2fixturedefs
def pytest_generate_tests(self, metafunc):
for argname in metafunc.fixturenames:
faclist = metafunc._arg2fixturedefs.get(argname)
if faclist:
fixturedef = faclist[-1]
if fixturedef.params is not None:
markers = list(metafunc.definition.iter_markers("parametrize"))
for parametrize_mark in markers:
if "argnames" in parametrize_mark.kwargs:
argnames = parametrize_mark.kwargs["argnames"]
else:
argnames = parametrize_mark.args[0]
if not isinstance(argnames, (tuple, list)):
argnames = [
x.strip() for x in argnames.split(",") if x.strip()
]
if argname in argnames:
break
else:
metafunc.parametrize(
argname,
fixturedef.params,
indirect=True,
scope=fixturedef.scope,
ids=fixturedef.ids,
)
else:
continue
def pytest_collection_modifyitems(self, items):
items[:] = reorder_items(items)
def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False):
if nodeid is not NOTSET:
holderobj = node_or_obj
else:
holderobj = node_or_obj.obj
nodeid = node_or_obj.nodeid
if holderobj in self._holderobjseen:
return
self._holderobjseen.add(holderobj)
autousenames = []
for name in dir(holderobj):
obj = safe_getattr(holderobj, name, None)
marker = getfixturemarker(obj)
if not isinstance(marker, FixtureFunctionMarker):
continue
if marker.name:
name = marker.name
obj = get_real_method(obj, holderobj)
fixture_def = FixtureDef(
self,
nodeid,
name,
obj,
marker.scope,
marker.params,
unittest=unittest,
ids=marker.ids,
)
faclist = self._arg2fixturedefs.setdefault(name, [])
if fixture_def.has_location:
faclist.append(fixture_def)
else:
i = len([f for f in faclist if not f.has_location])
faclist.insert(i, fixture_def)
if marker.autouse:
autousenames.append(name)
if autousenames:
self._nodeid_and_autousenames.append((nodeid or "", autousenames))
def getfixturedefs(self, argname, nodeid):
try:
fixturedefs = self._arg2fixturedefs[argname]
except KeyError:
return None
return tuple(self._matchfactories(fixturedefs, nodeid))
def _matchfactories(self, fixturedefs, nodeid):
from _pytest import nodes
for fixturedef in fixturedefs:
if nodes.ischildnode(fixturedef.baseid, nodeid):
yield fixturedef
| true
| true
|
f7189cf1777438042d5eeb717699cbd063289d08
| 10,871
|
py
|
Python
|
env/lib/python3.8/site-packages/plotly/validators/choroplethmapbox/_colorbar.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2
|
2021-07-07T20:16:23.000Z
|
2021-07-14T14:03:09.000Z
|
env/lib/python3.8/site-packages/plotly/validators/choroplethmapbox/_colorbar.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 5
|
2020-06-05T20:56:21.000Z
|
2021-09-22T19:12:42.000Z
|
env/lib/python3.8/site-packages/plotly/validators/choroplethmapbox/_colorbar.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2
|
2020-07-05T12:57:14.000Z
|
2020-07-05T12:58:00.000Z
|
import _plotly_utils.basevalidators
class ColorbarValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name="colorbar", parent_name="choroplethmapbox", **kwargs
):
super(ColorbarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "ColorBar"),
data_docs=kwargs.pop(
"data_docs",
"""
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
And for dates see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format
We add one item to d3's date formatter: "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.choropl
ethmapbox.colorbar.Tickformatstop` instances or
dicts with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.choroplethmapbox.colorbar.tickformatstopdefau
lts), sets the default property values to use
for elements of
choroplethmapbox.colorbar.tickformatstops
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud
for ticktext .
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud
for tickvals .
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.choroplethmapbox.c
olorbar.Title` instance or dict with compatible
properties
titlefont
Deprecated: Please use
choroplethmapbox.colorbar.title.font instead.
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
choroplethmapbox.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
""",
),
**kwargs
)
| 47.060606
| 78
| 0.52663
|
import _plotly_utils.basevalidators
class ColorbarValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name="colorbar", parent_name="choroplethmapbox", **kwargs
):
super(ColorbarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "ColorBar"),
data_docs=kwargs.pop(
"data_docs",
"""
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
And for dates see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format
We add one item to d3's date formatter: "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.choropl
ethmapbox.colorbar.Tickformatstop` instances or
dicts with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.choroplethmapbox.colorbar.tickformatstopdefau
lts), sets the default property values to use
for elements of
choroplethmapbox.colorbar.tickformatstops
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud
for ticktext .
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud
for tickvals .
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.choroplethmapbox.c
olorbar.Title` instance or dict with compatible
properties
titlefont
Deprecated: Please use
choroplethmapbox.colorbar.title.font instead.
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
choroplethmapbox.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
""",
),
**kwargs
)
| true
| true
|
f7189d61c476bf7a949e07d688c92793680eb5d3
| 584
|
py
|
Python
|
polls/admin.py
|
Nachtalb/django-polls
|
e5f1065cdcff99c8f21ea4f211d2d6fa344b65c7
|
[
"MIT"
] | null | null | null |
polls/admin.py
|
Nachtalb/django-polls
|
e5f1065cdcff99c8f21ea4f211d2d6fa344b65c7
|
[
"MIT"
] | null | null | null |
polls/admin.py
|
Nachtalb/django-polls
|
e5f1065cdcff99c8f21ea4f211d2d6fa344b65c7
|
[
"MIT"
] | null | null | null |
from .models import Choice
from .models import Question
from django.contrib import admin
# Register your models here.
class ChoiceInline(admin.TabularInline):
model = Choice
extra = 1
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
inlines = [ChoiceInline]
list_display = ('question_text', 'pub_date', 'was_published_recently')
list_filter = ['pub_date']
search_fields = ['question_text']
admin.site.register(Question, QuestionAdmin)
| 22.461538
| 74
| 0.683219
|
from .models import Choice
from .models import Question
from django.contrib import admin
class ChoiceInline(admin.TabularInline):
model = Choice
extra = 1
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date']}),
]
inlines = [ChoiceInline]
list_display = ('question_text', 'pub_date', 'was_published_recently')
list_filter = ['pub_date']
search_fields = ['question_text']
admin.site.register(Question, QuestionAdmin)
| true
| true
|
f7189f5b921b09008dadabc0c25611488fd3ea71
| 3,408
|
py
|
Python
|
lib/spack/spack/cmd/tags.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11
|
2015-10-04T02:17:46.000Z
|
2018-02-07T18:23:00.000Z
|
lib/spack/spack/cmd/tags.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 22
|
2017-08-01T22:45:10.000Z
|
2022-03-10T07:46:31.000Z
|
lib/spack/spack/cmd/tags.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 4
|
2016-06-10T17:57:39.000Z
|
2018-09-11T04:59:38.000Z
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
import six
import llnl.util.tty as tty
import llnl.util.tty.colify as colify
import spack.repo
import spack.store
import spack.tag
description = "Show package tags and associated packages"
section = "basic"
level = "long"
def report_tags(category, tags):
buffer = six.StringIO()
isatty = sys.stdout.isatty()
if isatty:
num = len(tags)
fmt = '{0} package tag'.format(category)
buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt)))
if tags:
colify.colify(tags, output=buffer, tty=isatty, indent=4)
else:
buffer.write(" None\n")
print(buffer.getvalue())
def setup_parser(subparser):
subparser.epilog = (
"Tags from known packages will be used if no tags are provided on "
"the command\nline. If tags are provided, packages with at least one "
"will be reported.\n\nYou are not allowed to provide tags and use "
"'--all' at the same time."
)
subparser.add_argument(
'-i', '--installed', action='store_true', default=False,
help="show information for installed packages only"
)
subparser.add_argument(
'-a', '--all', action='store_true', default=False,
help="show packages for all available tags"
)
subparser.add_argument(
'tag',
nargs='*',
help="show packages with the specified tag"
)
def tags(parser, args):
# Disallow combining all option with (positional) tags to avoid confusion
if args.all and args.tag:
tty.die("Use the '--all' option OR provide tag(s) on the command line")
# Provide a nice, simple message if database is empty
if args.installed and not spack.environment.installed_specs():
tty.msg("No installed packages")
return
# unique list of available tags
available_tags = sorted(spack.repo.path.tag_index.keys())
if not available_tags:
tty.msg("No tagged packages")
return
show_packages = args.tag or args.all
# Only report relevant, available tags if no packages are to be shown
if not show_packages:
if not args.installed:
report_tags("available", available_tags)
else:
tag_pkgs = spack.tag.packages_with_tags(available_tags, True, True)
tags = tag_pkgs.keys() if tag_pkgs else []
report_tags("installed", tags)
return
# Report packages associated with tags
buffer = six.StringIO()
isatty = sys.stdout.isatty()
tags = args.tag if args.tag else available_tags
tag_pkgs = spack.tag.packages_with_tags(tags, args.installed, False)
missing = 'No installed packages' if args.installed else 'None'
for tag in sorted(tag_pkgs):
# TODO: Remove the sorting once we're sure noone has an old
# TODO: tag cache since it can accumulate duplicates.
packages = sorted(list(set(tag_pkgs[tag])))
if isatty:
buffer.write("{0}:\n".format(tag))
if packages:
colify.colify(packages, output=buffer, tty=isatty, indent=4)
else:
buffer.write(" {0}\n".format(missing))
buffer.write("\n")
print(buffer.getvalue())
| 31.555556
| 79
| 0.649941
|
import sys
import six
import llnl.util.tty as tty
import llnl.util.tty.colify as colify
import spack.repo
import spack.store
import spack.tag
description = "Show package tags and associated packages"
section = "basic"
level = "long"
def report_tags(category, tags):
buffer = six.StringIO()
isatty = sys.stdout.isatty()
if isatty:
num = len(tags)
fmt = '{0} package tag'.format(category)
buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt)))
if tags:
colify.colify(tags, output=buffer, tty=isatty, indent=4)
else:
buffer.write(" None\n")
print(buffer.getvalue())
def setup_parser(subparser):
subparser.epilog = (
"Tags from known packages will be used if no tags are provided on "
"the command\nline. If tags are provided, packages with at least one "
"will be reported.\n\nYou are not allowed to provide tags and use "
"'--all' at the same time."
)
subparser.add_argument(
'-i', '--installed', action='store_true', default=False,
help="show information for installed packages only"
)
subparser.add_argument(
'-a', '--all', action='store_true', default=False,
help="show packages for all available tags"
)
subparser.add_argument(
'tag',
nargs='*',
help="show packages with the specified tag"
)
def tags(parser, args):
if args.all and args.tag:
tty.die("Use the '--all' option OR provide tag(s) on the command line")
if args.installed and not spack.environment.installed_specs():
tty.msg("No installed packages")
return
available_tags = sorted(spack.repo.path.tag_index.keys())
if not available_tags:
tty.msg("No tagged packages")
return
show_packages = args.tag or args.all
if not show_packages:
if not args.installed:
report_tags("available", available_tags)
else:
tag_pkgs = spack.tag.packages_with_tags(available_tags, True, True)
tags = tag_pkgs.keys() if tag_pkgs else []
report_tags("installed", tags)
return
buffer = six.StringIO()
isatty = sys.stdout.isatty()
tags = args.tag if args.tag else available_tags
tag_pkgs = spack.tag.packages_with_tags(tags, args.installed, False)
missing = 'No installed packages' if args.installed else 'None'
for tag in sorted(tag_pkgs):
# TODO: tag cache since it can accumulate duplicates.
packages = sorted(list(set(tag_pkgs[tag])))
if isatty:
buffer.write("{0}:\n".format(tag))
if packages:
colify.colify(packages, output=buffer, tty=isatty, indent=4)
else:
buffer.write(" {0}\n".format(missing))
buffer.write("\n")
print(buffer.getvalue())
| true
| true
|
f7189f8dd6f26d34536254222d1b19d45d633d5e
| 602
|
py
|
Python
|
ss/leads/migrations/0003_lead_owner.py
|
nishendra3/task_managent_tool
|
e228213df2c5d22e014e5efd8c7e1011160cf3ef
|
[
"MIT"
] | null | null | null |
ss/leads/migrations/0003_lead_owner.py
|
nishendra3/task_managent_tool
|
e228213df2c5d22e014e5efd8c7e1011160cf3ef
|
[
"MIT"
] | null | null | null |
ss/leads/migrations/0003_lead_owner.py
|
nishendra3/task_managent_tool
|
e228213df2c5d22e014e5efd8c7e1011160cf3ef
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2 on 2021-04-26 09:28
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('leads', '0002_rename_leads_lead'),
]
operations = [
migrations.AddField(
model_name='lead',
name='owner',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='leads', to=settings.AUTH_USER_MODEL),
),
]
| 27.363636
| 143
| 0.677741
|
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('leads', '0002_rename_leads_lead'),
]
operations = [
migrations.AddField(
model_name='lead',
name='owner',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='leads', to=settings.AUTH_USER_MODEL),
),
]
| true
| true
|
f718a013ca7dbd568d075d456a25634b7ca18e7d
| 615
|
py
|
Python
|
setup.py
|
tavyc/lockex
|
e6b8d5440b21b08899785f7d97803b5b4ed16ab4
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
tavyc/lockex
|
e6b8d5440b21b08899785f7d97803b5b4ed16ab4
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
tavyc/lockex
|
e6b8d5440b21b08899785f7d97803b5b4ed16ab4
|
[
"BSD-3-Clause"
] | null | null | null |
from setuptools import setup, find_packages
setup(name="lockex",
version="0.3",
description="Get lock from zookeeper and execute",
packages=find_packages(exclude=["__pycache__"]),
install_requires=['click==7.1.1', 'python_gflags==3.1.2', 'kazoo==2.8.0', 'pure-sasl==0.6.2', 'psutil==5.7.0', 'future==0.18.2'],
setup_requires=['flake8==2.5.4'],
tests_require=['tox==2.3.1', 'pytest==2.6.3', 'testfixtures==4.9.1', 'mock==1.0.1'],
entry_points={'console_scripts': ['lockex = lockex.execute:execute']},
extras_require=dict(test=['testfixtures'],),
license='BSD',)
| 51.25
| 135
| 0.634146
|
from setuptools import setup, find_packages
setup(name="lockex",
version="0.3",
description="Get lock from zookeeper and execute",
packages=find_packages(exclude=["__pycache__"]),
install_requires=['click==7.1.1', 'python_gflags==3.1.2', 'kazoo==2.8.0', 'pure-sasl==0.6.2', 'psutil==5.7.0', 'future==0.18.2'],
setup_requires=['flake8==2.5.4'],
tests_require=['tox==2.3.1', 'pytest==2.6.3', 'testfixtures==4.9.1', 'mock==1.0.1'],
entry_points={'console_scripts': ['lockex = lockex.execute:execute']},
extras_require=dict(test=['testfixtures'],),
license='BSD',)
| true
| true
|
f718a027b0346912471874053f7c8deab0ce9e6d
| 3,017
|
py
|
Python
|
test/unit_tests/providers/test_vimeo.py
|
ourresearch/total-impact-webapp
|
ab0d011dc783491bc85aadc2dc9c0f204e59429e
|
[
"MIT"
] | 4
|
2015-10-22T10:11:01.000Z
|
2017-06-04T18:08:28.000Z
|
test/unit_tests/providers/test_vimeo.py
|
Impactstory/total-impact-webapp
|
ab0d011dc783491bc85aadc2dc9c0f204e59429e
|
[
"MIT"
] | 2
|
2015-01-11T05:45:59.000Z
|
2015-02-11T20:37:05.000Z
|
test/unit_tests/providers/test_vimeo.py
|
Impactstory/total-impact-webapp
|
ab0d011dc783491bc85aadc2dc9c0f204e59429e
|
[
"MIT"
] | 3
|
2015-01-10T03:23:13.000Z
|
2015-10-11T15:49:41.000Z
|
from test.unit_tests.providers import common
from test.unit_tests.providers.common import ProviderTestCase
from totalimpact.providers.provider import Provider, ProviderContentMalformedError
from test.utils import http
import os
import collections
from nose.tools import assert_equals, assert_items_equal, raises, nottest
datadir = os.path.join(os.path.split(__file__)[0], "../../../extras/sample_provider_pages/vimeo")
SAMPLE_EXTRACT_METRICS_PAGE = os.path.join(datadir, "metrics")
SAMPLE_EXTRACT_BIBLIO_PAGE = os.path.join(datadir, "biblio")
class TestVimeo(ProviderTestCase):
provider_name = "vimeo"
testitem_aliases = ("url", "http://vimeo.com/48605764")
testitem_metrics = ("url", "http://vimeo.com/48605764")
testitem_biblio = ("url", "http://vimeo.com/48605764")
def setUp(self):
ProviderTestCase.setUp(self)
def test_is_relevant_alias(self):
# ensure that it matches an appropriate ids
assert_equals(self.provider.is_relevant_alias(self.testitem_aliases), True)
assert_equals(self.provider.is_relevant_alias(("url", "NOT A VIMEO ID")), False)
def test_extract_metrics_success(self):
f = open(SAMPLE_EXTRACT_METRICS_PAGE, "r")
metrics_dict = self.provider._extract_metrics(f.read(), id=self.testitem_metrics[1])
print metrics_dict
assert_equals(metrics_dict["vimeo:plays"], 83)
def test_extract_biblio_success(self):
f = open(SAMPLE_EXTRACT_BIBLIO_PAGE, "r")
biblio_dict = self.provider._extract_biblio(f.read(), self.testitem_biblio[1])
print biblio_dict
expected = {'repository': 'Vimeo', 'title': 'Wheat Rust Inoculation Protocol Video', 'url': 'http://vimeo.com/48605764', 'year': '2012', 'authors': 'Huang Lab', 'published_date': '2012-08-31 12:20:16'}
assert_equals(biblio_dict, expected)
def test_provenance_url(self):
provenance_url = self.provider.provenance_url("github:forks", [self.testitem_aliases])
assert_equals(provenance_url, 'http://vimeo.com/48605764')
@http
def test_metrics(self):
metrics_dict = self.provider.metrics([self.testitem_metrics])
print metrics_dict
expected = {'vimeo:plays': (83, 'http://vimeo.com/48605764')}
for key in expected:
assert metrics_dict[key][0] >= expected[key][0], [key, metrics_dict[key], expected[key]]
assert metrics_dict[key][1] == expected[key][1], [key, metrics_dict[key], expected[key]]
@http
def test_biblio(self):
biblio_dict = self.provider.biblio([self.testitem_biblio])
print biblio_dict
expected = {'repository': 'Vimeo', 'title': u'Wheat Rust Inoculation Protocol Video', 'url': u'http://vimeo.com/48605764', 'year': u'2012', 'authors': u'Huang Lab', 'published_date': u'2012-08-31 12:20:16'}
assert_items_equal(biblio_dict.keys(), expected.keys())
for key in ['year', 'published_date', 'title', 'url']:
assert_equals(biblio_dict[key], expected[key])
| 46.415385
| 214
| 0.69705
|
from test.unit_tests.providers import common
from test.unit_tests.providers.common import ProviderTestCase
from totalimpact.providers.provider import Provider, ProviderContentMalformedError
from test.utils import http
import os
import collections
from nose.tools import assert_equals, assert_items_equal, raises, nottest
datadir = os.path.join(os.path.split(__file__)[0], "../../../extras/sample_provider_pages/vimeo")
SAMPLE_EXTRACT_METRICS_PAGE = os.path.join(datadir, "metrics")
SAMPLE_EXTRACT_BIBLIO_PAGE = os.path.join(datadir, "biblio")
class TestVimeo(ProviderTestCase):
provider_name = "vimeo"
testitem_aliases = ("url", "http://vimeo.com/48605764")
testitem_metrics = ("url", "http://vimeo.com/48605764")
testitem_biblio = ("url", "http://vimeo.com/48605764")
def setUp(self):
ProviderTestCase.setUp(self)
def test_is_relevant_alias(self):
assert_equals(self.provider.is_relevant_alias(self.testitem_aliases), True)
assert_equals(self.provider.is_relevant_alias(("url", "NOT A VIMEO ID")), False)
def test_extract_metrics_success(self):
f = open(SAMPLE_EXTRACT_METRICS_PAGE, "r")
metrics_dict = self.provider._extract_metrics(f.read(), id=self.testitem_metrics[1])
print metrics_dict
assert_equals(metrics_dict["vimeo:plays"], 83)
def test_extract_biblio_success(self):
f = open(SAMPLE_EXTRACT_BIBLIO_PAGE, "r")
biblio_dict = self.provider._extract_biblio(f.read(), self.testitem_biblio[1])
print biblio_dict
expected = {'repository': 'Vimeo', 'title': 'Wheat Rust Inoculation Protocol Video', 'url': 'http://vimeo.com/48605764', 'year': '2012', 'authors': 'Huang Lab', 'published_date': '2012-08-31 12:20:16'}
assert_equals(biblio_dict, expected)
def test_provenance_url(self):
provenance_url = self.provider.provenance_url("github:forks", [self.testitem_aliases])
assert_equals(provenance_url, 'http://vimeo.com/48605764')
@http
def test_metrics(self):
metrics_dict = self.provider.metrics([self.testitem_metrics])
print metrics_dict
expected = {'vimeo:plays': (83, 'http://vimeo.com/48605764')}
for key in expected:
assert metrics_dict[key][0] >= expected[key][0], [key, metrics_dict[key], expected[key]]
assert metrics_dict[key][1] == expected[key][1], [key, metrics_dict[key], expected[key]]
@http
def test_biblio(self):
biblio_dict = self.provider.biblio([self.testitem_biblio])
print biblio_dict
expected = {'repository': 'Vimeo', 'title': u'Wheat Rust Inoculation Protocol Video', 'url': u'http://vimeo.com/48605764', 'year': u'2012', 'authors': u'Huang Lab', 'published_date': u'2012-08-31 12:20:16'}
assert_items_equal(biblio_dict.keys(), expected.keys())
for key in ['year', 'published_date', 'title', 'url']:
assert_equals(biblio_dict[key], expected[key])
| false
| true
|
f718a0367b4270d581488b88bdf48bcb02c744fd
| 3,379
|
py
|
Python
|
pychron/core/ui/qt/color_map_bar_editor.py
|
ael-noblegas/pychron
|
6ebbbb1f66a614972b62b7a9be4c784ae61b5d62
|
[
"Apache-2.0"
] | 1
|
2019-02-27T21:57:44.000Z
|
2019-02-27T21:57:44.000Z
|
pychron/core/ui/qt/color_map_bar_editor.py
|
ael-noblegas/pychron
|
6ebbbb1f66a614972b62b7a9be4c784ae61b5d62
|
[
"Apache-2.0"
] | 80
|
2018-07-17T20:10:20.000Z
|
2021-08-17T15:38:24.000Z
|
pychron/core/ui/qt/color_map_bar_editor.py
|
AGESLDEO/pychron
|
1a81e05d9fba43b797f335ceff6837c016633bcf
|
[
"Apache-2.0"
] | null | null | null |
# ===============================================================================
# Copyright 2012 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from chaco.data_range_1d import DataRange1D
from chaco.default_colormaps import color_map_dict, color_map_name_dict
from pyface.qt.QtGui import QPainter, QColor, QFrame
from traits.api import Float, Int, Str
from traitsui.basic_editor_factory import BasicEditorFactory
from traitsui.qt4.editor import Editor
from numpy import array
# ============= local library imports ==========================
# from matplotlib.cm import get_cmap
class Bar(QFrame):
value = None
low = 0
high = 1
color_scalar = 1
colormap = 'jet'
bar_width = 100
scale = 'power'
# def __init__(self, parent, ident=-1):
# super(Bar, self).__init__()
# self._cmap = get_cmap(self.colormap)
def paintEvent(self, e):
qp = QPainter()
qp.begin(self)
qp.setBrush(QColor(*self.value))
qp.drawRect(0, 0, self.bar_width, 20)
qp.end()
def set_value(self, v):
"""
map v to users color scale
use power law v=A*x**(1/cs)
increase cs increases the rate of change at low values
increase cs will make it easier to see small pertubations (more color change) at
the low end.
"""
if self.scale == 'power':
N = 1 / float(self.color_scalar)
A = 1 / self.high ** N
nv = A * v ** N
else:
nv = min(1, max(0, (v - self.low) / (self.high - self.low)))
vs = self.cmap.map_screen(array([nv,]))[0][:3]
self.value = [x * 255 for x in vs]
self.update()
class _BarGaugeEditor(Editor):
def init(self, parent):
self.control = Bar()
self.control.low = low = self.factory.low
self.control.high = high = self.factory.high
self.control.color_scalar = self.factory.color_scalar
self.control.bar_width = self.factory.width
self.control.scale = self.factory.scale
# if self.factory.scale == 'power':
# high = N = 1 / float(self.color_scalar)
# A = 1 / self.high ** N
self.control.cmap = color_map_name_dict[self.factory.colormap](DataRange1D(low_setting=0, high_setting=1))
def update_editor(self):
if self.control:
self.control.set_value(self.value)
class BarGaugeEditor(BasicEditorFactory):
klass = _BarGaugeEditor
low = Float
high = Float
color_scalar = Int(1)
scale = Str('power')
colormap = Str('jet')
width = Int(100)
# ============= EOF =============================================
| 33.455446
| 114
| 0.587452
|
from __future__ import absolute_import
from chaco.data_range_1d import DataRange1D
from chaco.default_colormaps import color_map_dict, color_map_name_dict
from pyface.qt.QtGui import QPainter, QColor, QFrame
from traits.api import Float, Int, Str
from traitsui.basic_editor_factory import BasicEditorFactory
from traitsui.qt4.editor import Editor
from numpy import array
class Bar(QFrame):
value = None
low = 0
high = 1
color_scalar = 1
colormap = 'jet'
bar_width = 100
scale = 'power'
def paintEvent(self, e):
qp = QPainter()
qp.begin(self)
qp.setBrush(QColor(*self.value))
qp.drawRect(0, 0, self.bar_width, 20)
qp.end()
def set_value(self, v):
if self.scale == 'power':
N = 1 / float(self.color_scalar)
A = 1 / self.high ** N
nv = A * v ** N
else:
nv = min(1, max(0, (v - self.low) / (self.high - self.low)))
vs = self.cmap.map_screen(array([nv,]))[0][:3]
self.value = [x * 255 for x in vs]
self.update()
class _BarGaugeEditor(Editor):
def init(self, parent):
self.control = Bar()
self.control.low = low = self.factory.low
self.control.high = high = self.factory.high
self.control.color_scalar = self.factory.color_scalar
self.control.bar_width = self.factory.width
self.control.scale = self.factory.scale
self.control.cmap = color_map_name_dict[self.factory.colormap](DataRange1D(low_setting=0, high_setting=1))
def update_editor(self):
if self.control:
self.control.set_value(self.value)
class BarGaugeEditor(BasicEditorFactory):
klass = _BarGaugeEditor
low = Float
high = Float
color_scalar = Int(1)
scale = Str('power')
colormap = Str('jet')
width = Int(100)
| true
| true
|
f718a03a93d543af426ee374eecee7750397b95b
| 8,579
|
py
|
Python
|
public/code/simpleCropPredictSpektogram.py
|
awinawin1/prediksi
|
b3d552555f775d7b6a1b22077146443fe09bbf5d
|
[
"MIT"
] | null | null | null |
public/code/simpleCropPredictSpektogram.py
|
awinawin1/prediksi
|
b3d552555f775d7b6a1b22077146443fe09bbf5d
|
[
"MIT"
] | null | null | null |
public/code/simpleCropPredictSpektogram.py
|
awinawin1/prediksi
|
b3d552555f775d7b6a1b22077146443fe09bbf5d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sat May 15 00:21:05 2021
@author: marina
"""
import os
import shutil
import pyedflib
import numpy as np
import pandas as pd
import sys
import mne
from pywt import wavedec
from sklearn.preprocessing import LabelEncoder
import matplotlib.pyplot as plt
from scipy import signal
from keras.models import Sequential
#importing layers
from keras.layers import Conv2D,Flatten,Dense,MaxPooling2D
from tensorflow.keras.optimizers import SGD
# pathDataSet = "D:\\Kuliah\Tugas Akhir\chb-mit-scalp-eeg-database-1.0.0\\chb07\\"
pathDataSet = "/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/uploadedSpektogram/"
pathSaveData = "/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/uploadedSpektogram/spektogram/"
def data_load(FILE, selected_channels=[]):
fullNm = pathDataSet + FILE
# fullNm = FILE
f = pyedflib.EdfReader(fullNm )
n = f.signals_in_file
signal_labels = f.getSignalLabels()
channel_freq = f.getSampleFrequencies()
sigbufs = np.zeros((n, f.getNSamples()[0]))
for i in np.arange(n):
sigbufs[i, :] = f.readSignal(i)
f.close()
# and load the data into a DataFrame
df_signals = pd.DataFrame(sigbufs)
df_signals = df_signals.transpose()
df_signals.columns = signal_labels
df_signals = df_signals.loc[:,~df_signals.columns.duplicated()]
df_signals = df_signals[selected_channels].astype('float32')
return df_signals,channel_freq[0]
def mne_object(data, freq, events = None):
info = mne.create_info(ch_names=list(data.columns),
sfreq=freq,
ch_types=['eeg']*data.shape[-1])
data_T = data.transpose()
raw = mne.io.RawArray(data_T, info,verbose=False)
if events:
start_times = np.array(events[::2])
end_times = np.array(events[1::2])
anno_length = end_times-start_times
event_name = np.array(['Ictal']*len(anno_length))
raw.set_annotations(mne.Annotations(start_times,
anno_length,
event_name))
return raw
def loadAndFiltering(FILE,channel_keeps):
raw_data, freq = data_load(FILE, channel_keeps)
if len(raw_data) ==0:
print("no data ")
return raw_data
mne_data = mne_object(raw_data, freq)
raw=mne_data.copy()
return raw
def extract_windows(array, start, max_time, sub_window_size,
stride_size):
sub_windows = (
start +
np.expand_dims(np.arange(sub_window_size), 0) +
np.expand_dims(np.arange(max_time + 1- sub_window_size-start, step=stride_size), 0).T
)
return array[:,sub_windows]
def Crop(raw):
cropS = 3
strides = 1
tMin=0
tMax=raw.get_data().shape[1]#18*256*cropS
sub_window_size,stride_size = 256*cropS,256*strides
cropData = extract_windows(raw.get_data(), tMin, tMax , sub_window_size,stride_size)
cropData = cropData.reshape(cropData.shape[1],cropData.shape[0],cropData.shape[2])
return cropData
# def create_modelCNN(input_shape, num_class,flatten=False):
# from tensorflow.keras.models import Sequential
# from tensorflow.keras.layers import Dense
# from tensorflow.keras.backend import clear_session
# from tensorflow.keras.optimizers import Adam
# from tensorflow.keras.layers import Conv1D#, Input
# from tensorflow.keras.layers import MaxPooling1D
# from tensorflow.keras.layers import GlobalAveragePooling1D#, GlobalMaxPooling1D
# from keras.layers import Activation,Flatten, Dropout
# clear_session()
# model = Sequential()
# def add_conv_block(model, num_filters, input_shape=None):
# if input_shape:
# model.add(Conv1D(num_filters, kernel_size=3, activation='relu', padding='same', input_shape=input_shape))
# else:
# model.add(Conv1D(num_filters, kernel_size=3, activation='relu', padding='same'))
# return model
# model = add_conv_block(model, 128, input_shape=input_shape[1:])
# model = add_conv_block(model, 128)
# model.add(Dropout(0.3))
# model.add(MaxPooling1D(pool_size=3, # size of the window
# strides=2, # factor to downsample
# padding='same'))
# model.add(Dropout(0.1))
# for i in range(2):
# model.add(Conv1D(filters=256,kernel_size=3,padding="same",activation='relu'))
# model.add(Dropout(0.1))
# if flatten:
# model.add(Flatten())
# else:
# model.add(GlobalAveragePooling1D())
# model.add(Dense(units=128,activation='relu'))
# model.add(Dropout(0.1))
# model.add(Dense(num_class))
# model.add(Activation('softmax'))
# model.compile(optimizer=Adam(0.0001),
# loss='categorical_crossentropy',
# metrics=['accuracy'])
# return model
def modelCNN2(input_shape,nb_classes):
model = Sequential()
model.add(Conv2D(32, (3, 3), activation='relu', padding='same', input_shape=input_shape))
model.add(Conv2D(32, (3, 3), activation='relu', padding='same'))
model.add(MaxPooling2D((2, 2)))
model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
model.add(MaxPooling2D((2, 2)))
model.add(Conv2D(128, (3, 3), activation='relu', padding='same'))
model.add(Conv2D(128, (3, 3), activation='relu', padding='same'))
model.add(MaxPooling2D((2, 2)))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dense(nb_classes, activation='softmax'))
# compile model
opt = SGD(lr=0.001, momentum=0.9)
model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])
return model
def plotSpektogram(x,fs,nmFile=''):
f, t, Sxx = signal.spectrogram(x, fs)
cut=10
imgAll=[]
for i,sinyal in enumerate(Sxx):
img = plt.pcolormesh(t, f[:cut], sinyal[:cut], shading='gouraud')
imgAll.append([(r, g, b) for r, g, b, a in img.to_rgba(img.get_array())])
# print(nmFile)
# if nmFile !='':
#(18, 30, 3)
# print("masuk sini")
# plt.savefig(nmFile)
# plt.show()
# plt.imsave(nmFile, imgAll)
# imgAll = np.array(imgAll)# .reshape(-1,3)
imgAll = np.array(imgAll).ravel()
#(18, 30, 3)
return imgAll
if __name__ == '__main__':
FILE=sys.argv[1]
# FILE = 'D:\\Kuliah\Tugas Akhir\chb-mit-scalp-eeg-database-1.0.0\\chb24\\chb24_22.edf'
# FILE = 'chb07_12.edf'
FILE = FILE.replace("'","")
dir_path = "/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/fitur3Kelas30DetikImg/"
# if(os.path.isdir(dir_path+FILE)):
# shutil.rmtree(dir_path+FILE)
# os.mkdir("/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/fitur3Kelas30DetikImg/"+FILE,0o777)
loaded = np.load("/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/spektogram/channel_keeps.npz")
selected_channels =loaded['channel_keeps']
segmen=[]
raw = loadAndFiltering(FILE,selected_channels)
cropData = Crop(raw)
numCH = cropData[0].shape[0]
oneData = cropData[0]
oneData = plotSpektogram(oneData,256)
oneData = oneData.reshape(1,numCH,-1, 3)
KELAS = 3
bntk_input = (18, 30, 3)
model = modelCNN2(bntk_input,KELAS)
# model = modelCNN2(oneData.shape,KELAS)#,False)
nmModel = '/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/modelCNNSpektrogram_3.h5'
model.load_weights(nmModel)
cnt=0
for idx in range(cropData.shape[0]):
numCH = cropData[idx].shape[0]
oneData = cropData[idx]
nmFile = "/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/fitur3Kelas30DetikImg/%s/%s_%d.png"%(FILE,FILE,idx)
# nmFile = dir+"%s_%s.png"%(FILE,idx)
oneData = plotSpektogram(oneData,256,nmFile)
oneData = oneData.reshape(1,numCH,-1, 3)
yPred = model.predict(oneData)
yPred = np.argmax(yPred,axis=1)
if yPred[0] == 0:
hasil = "Normal"
elif yPred[0] == 1:
hasil = "Inter"
else:
hasil = "Ictal"
# break
segmen.append(hasil)
# print("segment=%d prediksi=%s <br>"%(idx,hasil))
cnt+=1
if cnt>1000:
break
saveHistory = open(pathSaveData+FILE+".txt","w")
saveHistory.write(str(segmen))
saveHistory.close()
print(segmen)
| 36.198312
| 136
| 0.645879
|
import os
import shutil
import pyedflib
import numpy as np
import pandas as pd
import sys
import mne
from pywt import wavedec
from sklearn.preprocessing import LabelEncoder
import matplotlib.pyplot as plt
from scipy import signal
from keras.models import Sequential
from keras.layers import Conv2D,Flatten,Dense,MaxPooling2D
from tensorflow.keras.optimizers import SGD
pathDataSet = "/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/uploadedSpektogram/"
pathSaveData = "/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/uploadedSpektogram/spektogram/"
def data_load(FILE, selected_channels=[]):
fullNm = pathDataSet + FILE
f = pyedflib.EdfReader(fullNm )
n = f.signals_in_file
signal_labels = f.getSignalLabels()
channel_freq = f.getSampleFrequencies()
sigbufs = np.zeros((n, f.getNSamples()[0]))
for i in np.arange(n):
sigbufs[i, :] = f.readSignal(i)
f.close()
df_signals = pd.DataFrame(sigbufs)
df_signals = df_signals.transpose()
df_signals.columns = signal_labels
df_signals = df_signals.loc[:,~df_signals.columns.duplicated()]
df_signals = df_signals[selected_channels].astype('float32')
return df_signals,channel_freq[0]
def mne_object(data, freq, events = None):
info = mne.create_info(ch_names=list(data.columns),
sfreq=freq,
ch_types=['eeg']*data.shape[-1])
data_T = data.transpose()
raw = mne.io.RawArray(data_T, info,verbose=False)
if events:
start_times = np.array(events[::2])
end_times = np.array(events[1::2])
anno_length = end_times-start_times
event_name = np.array(['Ictal']*len(anno_length))
raw.set_annotations(mne.Annotations(start_times,
anno_length,
event_name))
return raw
def loadAndFiltering(FILE,channel_keeps):
raw_data, freq = data_load(FILE, channel_keeps)
if len(raw_data) ==0:
print("no data ")
return raw_data
mne_data = mne_object(raw_data, freq)
raw=mne_data.copy()
return raw
def extract_windows(array, start, max_time, sub_window_size,
stride_size):
sub_windows = (
start +
np.expand_dims(np.arange(sub_window_size), 0) +
np.expand_dims(np.arange(max_time + 1- sub_window_size-start, step=stride_size), 0).T
)
return array[:,sub_windows]
def Crop(raw):
cropS = 3
strides = 1
tMin=0
tMax=raw.get_data().shape[1]
sub_window_size,stride_size = 256*cropS,256*strides
cropData = extract_windows(raw.get_data(), tMin, tMax , sub_window_size,stride_size)
cropData = cropData.reshape(cropData.shape[1],cropData.shape[0],cropData.shape[2])
return cropData
asses):
model = Sequential()
model.add(Conv2D(32, (3, 3), activation='relu', padding='same', input_shape=input_shape))
model.add(Conv2D(32, (3, 3), activation='relu', padding='same'))
model.add(MaxPooling2D((2, 2)))
model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
model.add(MaxPooling2D((2, 2)))
model.add(Conv2D(128, (3, 3), activation='relu', padding='same'))
model.add(Conv2D(128, (3, 3), activation='relu', padding='same'))
model.add(MaxPooling2D((2, 2)))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dense(nb_classes, activation='softmax'))
opt = SGD(lr=0.001, momentum=0.9)
model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])
return model
def plotSpektogram(x,fs,nmFile=''):
f, t, Sxx = signal.spectrogram(x, fs)
cut=10
imgAll=[]
for i,sinyal in enumerate(Sxx):
img = plt.pcolormesh(t, f[:cut], sinyal[:cut], shading='gouraud')
imgAll.append([(r, g, b) for r, g, b, a in img.to_rgba(img.get_array())])
.array(imgAll).ravel()
return imgAll
if __name__ == '__main__':
FILE=sys.argv[1]
FILE = FILE.replace("'","")
dir_path = "/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/fitur3Kelas30DetikImg/"
# if(os.path.isdir(dir_path+FILE)):
# shutil.rmtree(dir_path+FILE)
# os.mkdir("/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/fitur3Kelas30DetikImg/"+FILE,0o777)
loaded = np.load("/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/spektogram/channel_keeps.npz")
selected_channels =loaded['channel_keeps']
segmen=[]
raw = loadAndFiltering(FILE,selected_channels)
cropData = Crop(raw)
numCH = cropData[0].shape[0]
oneData = cropData[0]
oneData = plotSpektogram(oneData,256)
oneData = oneData.reshape(1,numCH,-1, 3)
KELAS = 3
bntk_input = (18, 30, 3)
model = modelCNN2(bntk_input,KELAS)
# model = modelCNN2(oneData.shape,KELAS)#,False)
nmModel = '/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/modelCNNSpektrogram_3.h5'
model.load_weights(nmModel)
cnt=0
for idx in range(cropData.shape[0]):
numCH = cropData[idx].shape[0]
oneData = cropData[idx]
nmFile = "/Applications/XAMPP/xamppfiles/htdocs/prediksi/storage/app/public/fitur3Kelas30DetikImg/%s/%s_%d.png"%(FILE,FILE,idx)
# nmFile = dir+"%s_%s.png"%(FILE,idx)
oneData = plotSpektogram(oneData,256,nmFile)
oneData = oneData.reshape(1,numCH,-1, 3)
yPred = model.predict(oneData)
yPred = np.argmax(yPred,axis=1)
if yPred[0] == 0:
hasil = "Normal"
elif yPred[0] == 1:
hasil = "Inter"
else:
hasil = "Ictal"
# break
segmen.append(hasil)
# print("segment=%d prediksi=%s <br>"%(idx,hasil))
cnt+=1
if cnt>1000:
break
saveHistory = open(pathSaveData+FILE+".txt","w")
saveHistory.write(str(segmen))
saveHistory.close()
print(segmen)
| true
| true
|
f718a057f6fbb3615714ace7248ca6eec06c111c
| 713
|
py
|
Python
|
TPs/TP4/test_flower.py
|
Aympab/BigDataHadoopSparkDaskCourse
|
42f9e0475cbd7c5db240ccc6dc00c19b9006012a
|
[
"Apache-2.0"
] | null | null | null |
TPs/TP4/test_flower.py
|
Aympab/BigDataHadoopSparkDaskCourse
|
42f9e0475cbd7c5db240ccc6dc00c19b9006012a
|
[
"Apache-2.0"
] | null | null | null |
TPs/TP4/test_flower.py
|
Aympab/BigDataHadoopSparkDaskCourse
|
42f9e0475cbd7c5db240ccc6dc00c19b9006012a
|
[
"Apache-2.0"
] | 1
|
2022-01-31T17:14:27.000Z
|
2022-01-31T17:14:27.000Z
|
import pyspark
from pyspark import SparkContext
from pyspark.sql import Row
from pyspark.sql import SQLContext
from pyspark import SparkFiles
import os
import pandas as pd
sc =SparkContext()
sqlContext = SQLContext(sc)
data_dir="/work/irlin355_1/gratienj/ParallelProgrammingCourse/BigDataHadoopSpark/data"
file = os.path.join(data_dir,"iris.csv")
panda_df = pd.read_csv(file)
sqlContext = SQLContext(sc)
#df = sqlContext.read.csv(SparkFiles.get("iris.csv"), header=True, inferSchema= True)
df=sqlContext.createDataFrame(panda_df)
df.printSchema()
df.show(5, truncate = False)
df.select('petal_width','variety').show(5)
df.groupBy("variety").count().sort("count",ascending=True).show()
df.describe().show()
| 26.407407
| 86
| 0.779804
|
import pyspark
from pyspark import SparkContext
from pyspark.sql import Row
from pyspark.sql import SQLContext
from pyspark import SparkFiles
import os
import pandas as pd
sc =SparkContext()
sqlContext = SQLContext(sc)
data_dir="/work/irlin355_1/gratienj/ParallelProgrammingCourse/BigDataHadoopSpark/data"
file = os.path.join(data_dir,"iris.csv")
panda_df = pd.read_csv(file)
sqlContext = SQLContext(sc)
df=sqlContext.createDataFrame(panda_df)
df.printSchema()
df.show(5, truncate = False)
df.select('petal_width','variety').show(5)
df.groupBy("variety").count().sort("count",ascending=True).show()
df.describe().show()
| true
| true
|
f718a2b72257a97c4d973393e72a8863d380eada
| 3,752
|
py
|
Python
|
tests/gallery/test_raster_transform.py
|
krisHans3n/geoalchemy2-mysql
|
38a44d51c242d867f40d4c5503c91f52a8269ff4
|
[
"MIT"
] | null | null | null |
tests/gallery/test_raster_transform.py
|
krisHans3n/geoalchemy2-mysql
|
38a44d51c242d867f40d4c5503c91f52a8269ff4
|
[
"MIT"
] | null | null | null |
tests/gallery/test_raster_transform.py
|
krisHans3n/geoalchemy2-mysql
|
38a44d51c242d867f40d4c5503c91f52a8269ff4
|
[
"MIT"
] | null | null | null |
"""
Reproject a Raster using ST_Transform
=====================================
The `ST_Transform()` function (and a few others like `ST_SnapToGrid()`) can be used on
both `Geometry` and `Raster` types. In `GeoAlchemy2`, this function is only defined for
`Geometry` as it can not be defined for several types at the same time. Thus using this
function on `Raster` requires minor tweaking.
This example uses both SQLAlchemy core and ORM queries.
"""
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy import func
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Query
from geoalchemy2 import Geometry
from geoalchemy2 import Raster
# Tests imports
from tests import select
metadata = MetaData()
Base = declarative_base(metadata=metadata)
table = Table(
"raster_table",
metadata,
Column("id", Integer, primary_key=True),
Column("geom", Geometry("POLYGON", 4326)),
Column("rast", Raster()),
)
class RasterTable(Base):
__tablename__ = 'raster_table_orm'
id = Column(Integer, primary_key=True)
geom = Column(Geometry("POLYGON", 4326))
rast = Column(Raster())
def __init__(self, rast):
self.rast = rast
def test_transform_core():
# Define the transform query for both the geometry and the raster in a naive way
wrong_query = select([
func.ST_Transform(table.c.geom, 2154),
func.ST_Transform(table.c.rast, 2154)
])
# Check the query
assert str(wrong_query) == (
"SELECT "
"ST_AsEWKB("
"ST_Transform(raster_table.geom, :ST_Transform_2)) AS \"ST_Transform_1\", "
"ST_AsEWKB(" # <= Note that the raster is processed as a Geometry here
"ST_Transform(raster_table.rast, :ST_Transform_4)) AS \"ST_Transform_3\" \n"
"FROM raster_table"
)
# Define the transform query for both the geometry and the raster in the correct way
correct_query = select([
func.ST_Transform(table.c.geom, 2154),
func.ST_Transform(table.c.rast, 2154, type_=Raster)
])
# Check the query
assert str(correct_query) == (
"SELECT "
"ST_AsEWKB("
"ST_Transform(raster_table.geom, :ST_Transform_2)) AS \"ST_Transform_1\", "
"raster(" # <= This time the raster is correctly processed as a Raster
"ST_Transform(raster_table.rast, :ST_Transform_4)) AS \"ST_Transform_3\" \n"
"FROM raster_table"
)
def test_transform_ORM():
# Define the transform query for both the geometry and the raster in a naive way
wrong_query = Query([
RasterTable.geom.ST_Transform(2154),
RasterTable.rast.ST_Transform(2154)
])
# Check the query
assert str(wrong_query) == (
"SELECT "
"ST_AsEWKB("
"ST_Transform(raster_table_orm.geom, :ST_Transform_2)) AS \"ST_Transform_1\", "
"ST_AsEWKB(" # <= Note that the raster is processed as a Geometry here
"ST_Transform(raster_table_orm.rast, :ST_Transform_4)) AS \"ST_Transform_3\" \n"
"FROM raster_table_orm"
)
# Define the transform query for both the geometry and the raster in the correct way
correct_query = Query([
RasterTable.geom.ST_Transform(2154),
RasterTable.rast.ST_Transform(2154, type_=Raster)
])
# Check the query
assert str(correct_query) == (
"SELECT "
"ST_AsEWKB("
"ST_Transform(raster_table_orm.geom, :ST_Transform_2)) AS \"ST_Transform_1\", "
"raster(" # <= This time the raster is correctly processed as a Raster
"ST_Transform(raster_table_orm.rast, :ST_Transform_4)) AS \"ST_Transform_3\" \n"
"FROM raster_table_orm"
)
| 32.912281
| 88
| 0.673507
|
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy import func
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Query
from geoalchemy2 import Geometry
from geoalchemy2 import Raster
from tests import select
metadata = MetaData()
Base = declarative_base(metadata=metadata)
table = Table(
"raster_table",
metadata,
Column("id", Integer, primary_key=True),
Column("geom", Geometry("POLYGON", 4326)),
Column("rast", Raster()),
)
class RasterTable(Base):
__tablename__ = 'raster_table_orm'
id = Column(Integer, primary_key=True)
geom = Column(Geometry("POLYGON", 4326))
rast = Column(Raster())
def __init__(self, rast):
self.rast = rast
def test_transform_core():
wrong_query = select([
func.ST_Transform(table.c.geom, 2154),
func.ST_Transform(table.c.rast, 2154)
])
assert str(wrong_query) == (
"SELECT "
"ST_AsEWKB("
"ST_Transform(raster_table.geom, :ST_Transform_2)) AS \"ST_Transform_1\", "
"ST_AsEWKB("
"ST_Transform(raster_table.rast, :ST_Transform_4)) AS \"ST_Transform_3\" \n"
"FROM raster_table"
)
correct_query = select([
func.ST_Transform(table.c.geom, 2154),
func.ST_Transform(table.c.rast, 2154, type_=Raster)
])
assert str(correct_query) == (
"SELECT "
"ST_AsEWKB("
"ST_Transform(raster_table.geom, :ST_Transform_2)) AS \"ST_Transform_1\", "
"raster("
"ST_Transform(raster_table.rast, :ST_Transform_4)) AS \"ST_Transform_3\" \n"
"FROM raster_table"
)
def test_transform_ORM():
wrong_query = Query([
RasterTable.geom.ST_Transform(2154),
RasterTable.rast.ST_Transform(2154)
])
assert str(wrong_query) == (
"SELECT "
"ST_AsEWKB("
"ST_Transform(raster_table_orm.geom, :ST_Transform_2)) AS \"ST_Transform_1\", "
"ST_AsEWKB("
"ST_Transform(raster_table_orm.rast, :ST_Transform_4)) AS \"ST_Transform_3\" \n"
"FROM raster_table_orm"
)
correct_query = Query([
RasterTable.geom.ST_Transform(2154),
RasterTable.rast.ST_Transform(2154, type_=Raster)
])
assert str(correct_query) == (
"SELECT "
"ST_AsEWKB("
"ST_Transform(raster_table_orm.geom, :ST_Transform_2)) AS \"ST_Transform_1\", "
"raster("
"ST_Transform(raster_table_orm.rast, :ST_Transform_4)) AS \"ST_Transform_3\" \n"
"FROM raster_table_orm"
)
| true
| true
|
f718a32adc8e23215976b6874f7a9baff2ac6fb1
| 2,262
|
py
|
Python
|
acme/agents/actors_tf2_test.py
|
owenshen24/acme
|
71434dffd3449236f9b8aaf7a53ceab515e75a2a
|
[
"Apache-2.0"
] | 1
|
2020-06-03T18:33:40.000Z
|
2020-06-03T18:33:40.000Z
|
acme/agents/actors_tf2_test.py
|
owenshen24/acme
|
71434dffd3449236f9b8aaf7a53ceab515e75a2a
|
[
"Apache-2.0"
] | null | null | null |
acme/agents/actors_tf2_test.py
|
owenshen24/acme
|
71434dffd3449236f9b8aaf7a53ceab515e75a2a
|
[
"Apache-2.0"
] | 1
|
2021-11-26T22:51:55.000Z
|
2021-11-26T22:51:55.000Z
|
# python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for actors_tf2."""
from absl.testing import absltest
from acme import environment_loop
from acme import specs
from acme.agents import actors_tf2
from acme.testing import fakes
import dm_env
import numpy as np
import sonnet as snt
import tensorflow as tf
def _make_fake_env() -> dm_env.Environment:
env_spec = specs.EnvironmentSpec(
observations=specs.Array(shape=(10, 5), dtype=np.float32),
actions=specs.DiscreteArray(num_values=3),
rewards=specs.Array(shape=(), dtype=np.float32),
discounts=specs.BoundedArray(
shape=(), dtype=np.float32, minimum=0., maximum=1.),
)
return fakes.Environment(env_spec, episode_length=10)
class ActorTest(absltest.TestCase):
def test_feedforward(self):
environment = _make_fake_env()
env_spec = specs.make_environment_spec(environment)
network = snt.Sequential([
snt.Flatten(),
snt.Linear(env_spec.actions.num_values),
lambda x: tf.argmax(x, axis=-1, output_type=env_spec.actions.dtype),
])
actor = actors_tf2.FeedForwardActor(network)
loop = environment_loop.EnvironmentLoop(environment, actor)
loop.run(20)
def test_recurrent(self):
environment = _make_fake_env()
env_spec = specs.make_environment_spec(environment)
network = snt.DeepRNN([
snt.Flatten(),
snt.Linear(env_spec.actions.num_values),
lambda x: tf.argmax(x, axis=-1, output_type=env_spec.actions.dtype),
])
actor = actors_tf2.RecurrentActor(network)
loop = environment_loop.EnvironmentLoop(environment, actor)
loop.run(20)
if __name__ == '__main__':
absltest.main()
| 30.16
| 76
| 0.728559
|
from absl.testing import absltest
from acme import environment_loop
from acme import specs
from acme.agents import actors_tf2
from acme.testing import fakes
import dm_env
import numpy as np
import sonnet as snt
import tensorflow as tf
def _make_fake_env() -> dm_env.Environment:
env_spec = specs.EnvironmentSpec(
observations=specs.Array(shape=(10, 5), dtype=np.float32),
actions=specs.DiscreteArray(num_values=3),
rewards=specs.Array(shape=(), dtype=np.float32),
discounts=specs.BoundedArray(
shape=(), dtype=np.float32, minimum=0., maximum=1.),
)
return fakes.Environment(env_spec, episode_length=10)
class ActorTest(absltest.TestCase):
def test_feedforward(self):
environment = _make_fake_env()
env_spec = specs.make_environment_spec(environment)
network = snt.Sequential([
snt.Flatten(),
snt.Linear(env_spec.actions.num_values),
lambda x: tf.argmax(x, axis=-1, output_type=env_spec.actions.dtype),
])
actor = actors_tf2.FeedForwardActor(network)
loop = environment_loop.EnvironmentLoop(environment, actor)
loop.run(20)
def test_recurrent(self):
environment = _make_fake_env()
env_spec = specs.make_environment_spec(environment)
network = snt.DeepRNN([
snt.Flatten(),
snt.Linear(env_spec.actions.num_values),
lambda x: tf.argmax(x, axis=-1, output_type=env_spec.actions.dtype),
])
actor = actors_tf2.RecurrentActor(network)
loop = environment_loop.EnvironmentLoop(environment, actor)
loop.run(20)
if __name__ == '__main__':
absltest.main()
| true
| true
|
f718a32e9858f3ea9274a1aadac3baae13a0a7c1
| 64,546
|
py
|
Python
|
tools/blender-dff/io_scene_dff.py
|
FakeYou/mashed
|
902d8f514872cfa22e2a2904e215d360bf48cae1
|
[
"MIT"
] | 5
|
2016-12-31T13:51:31.000Z
|
2018-06-12T18:36:52.000Z
|
tools/blender-dff/io_scene_dff.py
|
andrenanninga/mashed
|
902d8f514872cfa22e2a2904e215d360bf48cae1
|
[
"MIT"
] | 1
|
2021-03-26T20:53:34.000Z
|
2021-03-26T20:53:34.000Z
|
tools/blender-dff/io_scene_dff.py
|
FakeYou/mashed
|
902d8f514872cfa22e2a2904e215d360bf48cae1
|
[
"MIT"
] | 7
|
2016-10-08T15:38:30.000Z
|
2019-01-25T11:34:31.000Z
|
bl_info = {
"name": "RenderWare importer/exporter for GTA III/VC/SA (.dff)",
"author": "Ago Allikmaa (maxorator)",
"version": (0, 9, 2),
"blender": (2, 6, 3),
"location": "File > Import-Export > Renderware (.dff) ",
"description": "RenderWare importer/exporter for GTA III/VC/SA",
"category": "Import-Export" }
import struct
import os
import zlib
import base64
from collections import deque
import bpy
import math
import mathutils
from bpy.props import *
class RwTypes():
ANY = -1
STRUCT = 0x0001
STRING = 0x0002
EXTENSION = 0x0003
TEXTURE = 0x0006
MATERIAL = 0x0007
MATERIALLIST = 0x0008
FRAMELIST = 0x000E
GEOMETRY = 0x000F
CLUMP = 0x0010
ATOMIC = 0x0014
GEOMETRYLIST = 0x001A
RENDERRIGHTS = 0x001F
MORPHPLG = 0x0105
SKINPLG = 0x116
HANIMPLG = 0x11E
MATEFFECTS = 0x0120
BINMESHPLG = 0x050E
FRAMENAME = 0x253F2FE
COLLISION = 0x253F2FA
MATSPECULAR = 0x253F2F6
NIGHTCOLS = 0x253F2F9
MATREFLECTION = 0x253F2FC
MESHEXTENSION = 0x253F2FD
def decodeVersion(version):
if (version & 0xFFFF0000) == 0:
return version << 8
else:
p1 = ((version >> 14) & 0x3FF00) + 0x30000
p2 = (version >> 16) & 0x3F
return p1 | p2
class RpGeomFlag:
TRISTRIP = 0x0001
POSITIONS = 0x0002
TEXTURED = 0x0004
PRELIT = 0x0008
NORMALS = 0x0010
LIGHT = 0x0020
MODULATEMATERIALCOLOR = 0x0040
TEXTURED2 = 0x0080
class ImportRenderware:
class RwTriangle:
def __init__(self, verts, mat):
self.verts = verts
self.mat = mat
def desc(self):
return (self.verts[0], self.verts[1], self.verts[2])
class RwVertex:
def __init__(self, coords, normal):
self.coords = coords
self.normal = normal
self.uv = None
self.uv_env = None
def desc(self):
return (self.coords[0], self.coords[1], self.coords[2])
class RwFrame:
def __init__(self, loader, index, rot, pos, parent):
self.loader = loader
self.index = index
self.geometry = None
self.atomic = None
self.blobj = None
self.bldata = None
self.hanimdata = None
self.name = None
rmatrix = mathutils.Matrix.Identity(3)
rmatrix[0] = rot[0], rot[1], rot[2]
rmatrix[1] = rot[3], rot[4], rot[5]
rmatrix[2] = rot[6], rot[7], rot[8]
rmatrix.resize_4x4()
rmatrix.translation = pos[0], pos[1], pos[2]
self.matrix = rmatrix
self.parent = parent
self.loader.childrenOf[parent+1].append(self.index)
def setAtomic(self, atomic):
self.atomic = atomic
self.geometry = atomic.geometry
def build(self):
if self.name is None:
self.name = "noname_" + str(self.index);
if self.geometry:
self.bldata = self.geometry.build(self.name)
self.blobj = bpy.data.objects.new(self.name, self.bldata)
if self.parent >= 0:
self.blobj.parent = self.loader.frames[self.parent].blobj
self.blobj.matrix_local = self.matrix
bpy.context.scene.objects.link(self.blobj)
for frame in self.loader.childrenOf[self.index+1]:
self.loader.frames[frame].build()
if "_vlo" in self.name or "_dam" in self.name:
self.blobj.hide = True
self.blobj.hide_render = True
if self.loader.colhex and self.index == self.loader.childrenOf[0][0]:
textobj = bpy.data.texts.new(name = ("zrwcoll_" + self.name))
textobj.from_string(self.loader.colhex)
self.blobj.collhex = textobj.name
if self.hanimdata:
textobj = bpy.data.texts.new(name = ("zrwhanim" + str(self.index) + "_" + self.name))
textobj.from_string(self.hanimdata)
self.blobj.rw_hanimdata = textobj.name
if self.geometry and self.geometry.skindata:
textobj = bpy.data.texts.new(name = ("zrwskin_" + self.name))
textobj.from_string(self.geometry.skindata)
self.blobj.rw_skindata = textobj.name
if self.atomic and self.atomic.renderPlugin != None and self.atomic.renderExtra != None:
self.blobj.renderright = self.atomic.renderPlugin
self.blobj.renderextra = self.atomic.renderExtra
if self.atomic and self.atomic.matfxpipe:
self.blobj.matfxpipe = True
class RpGeometry:
def __init__(self, loader, index):
self.loader = loader
self.index = index
self.vertices = []
self.triangles = []
self.materials = []
self.mesh = None
self.atomic = None
self.skindata = None
self.hasEnvUV = False
self.vertCol = None
self.nightVertCol = None
self.hasNormals = False
def setAtomic(self, atomic):
self.atomic = atomic
def addMaterial(self, material):
material.setIndex(len(self.materials))
self.materials.append(material)
def addVertex(self, vertex):
self.vertices.append(vertex)
def addTriangle(self, triangle):
self.triangles.append(triangle)
def build(self, name):
self.mesh = bpy.data.meshes.new(name)
pyverts = []
pypolys = []
for vertex in self.vertices:
pyverts.append(vertex.desc())
for triangle in self.triangles:
pypolys.append(triangle.desc())
self.mesh.from_pydata(pyverts, [], pypolys)
self.mesh.update()
if self.vertCol:
vcol = self.mesh.vertex_colors.new("Normal")
self.mesh.vertex_colors.active = vcol
for i in range(len(self.vertices)):
vcol.data[i].color = (self.vertCol[i][0], self.vertCol[i][1], self.vertCol[i][2])
if self.nightVertCol:
nvcol = self.mesh.vertex_colors.new("Night")
self.mesh.vertex_colors.active = nvcol
for i in range(len(self.vertices)):
nvcol.data[i].color = (self.nightVertCol[i][0], self.nightVertCol[i][1], self.nightVertCol[i][2])
uvtexture = self.mesh.uv_textures.new()
uvtexture.name = "MainUV"
uvlayer = self.mesh.uv_layers[-1]
for i in range(len(self.triangles)):
for j in range(3):
uvlayer.data[3*i + j].uv = self.vertices[self.triangles[i].verts[j]].uv
if self.hasEnvUV:
euvtexture = self.mesh.uv_textures.new()
euvtexture.name = "EnvUV"
euvlayer = self.mesh.uv_layers[-1]
for i in range(len(self.triangles)):
for j in range(3):
euvlayer.data[3*i + j].uv = self.vertices[self.triangles[i].verts[j]].uv_env
for material in self.materials:
material.build()
for i in range(len(self.triangles)):
self.mesh.polygons[i].material_index = self.triangles[i].mat
return self.mesh
class RpMaterial:
def __init__(self, geometry, flags=None, col=None, textured=None, ambient=None, specular=None, diffuse=None):
self.index = None
self.name = "g" + str(geometry.index) + "m"
self.geometry = geometry
self.flags = flags
self.col = col
self.ambient = ambient
self.specular = specular
self.diffuse = diffuse
self.textured = textured
self.texture = None
self.blmat = None
self.envtex = None
self.readenvmap = False
self.envIntensity = 1
self.reflectColour = None
self.reflectIntensity = None
self.spectex = None
def setIndex(self, index):
self.index = index
self.name = "g" + str(self.geometry.index) + "m" + str(index)
def setTexture(self, texture):
self.texture = texture
def setEnvTexture(self, texture):
self.envtex = texture
def setSpecTexture(self, texture):
self.spectex = texture
def setReflection(self, colour, intensity):
self.reflectColour = colour
self.reflectIntensity = intensity
def build(self):
self.blmat = bpy.data.materials.new(self.name)
self.blmat.diffuse_color = (self.col[0]/255, self.col[1]/255, self.col[2]/255)
self.blmat.diffuse_intensity = self.diffuse
self.blmat.ambient = self.ambient
self.blmat.specular_intensity = self.specular
if self.geometry.vertCol:
self.blmat.use_vertex_color_light = True
if self.col[3] < 255:
self.blmat.use_transparency = True
self.blmat.alpha = self.col[3]/255
if self.envtex:
self.envtex.build()
if self.spectex:
self.spectex.build()
if self.texture:
self.texture.build()
self.blmat.active_texture_index = 0
if self.reflectColour and self.reflectIntensity:
self.blmat.mirror_color = self.reflectColour
self.blmat.raytrace_mirror.use = True
self.blmat.raytrace_mirror.reflect_factor = self.reflectIntensity
self.geometry.mesh.materials.append(self.blmat)
class RwTexture:
def __init__(self, loader, material, name, texType, intensity=1):
self.material = material
self.bltex = None
self.bltexslot = None
self.name = name
self.loader = loader
self.texType = texType
self.intensity = intensity
def build(self):
if self.texType == 1 and self.name in self.loader.envtexpool:
self.bltex = self.loader.envtexpool[self.name]
elif self.texType != 1 and self.name in self.loader.texpool:
self.bltex = self.loader.texpool[self.name]
else:
if self.texType == 1:
self.bltex = bpy.data.textures.new(self.name, "ENVIRONMENT_MAP")
self.bltex.__class__ = bpy.types.EnvironmentMapTexture
self.bltex.environment_map.source = "IMAGE_FILE"
self.loader.envtexpool[self.name] = self.bltex
else:
self.bltex = bpy.data.textures.new(self.name, "IMAGE")
self.bltex.__class__ = bpy.types.ImageTexture
self.loader.texpool[self.name] = self.bltex
imgfile = self.loader.filename + "_tex\\" + self.name + ".png"
if os.path.isfile(imgfile):
self.bltex.image = bpy.data.images.load(imgfile)
self.bltexslot = self.material.blmat.texture_slots.create(self.texType)
self.bltexslot.texture_coords = "UV"
self.bltexslot.texture = self.bltex
if (self.texType == 1 or self.texType == 2) and self.material.geometry.hasEnvUV:
self.bltexslot.uv_layer = "EnvUV"
else:
self.bltexslot.uv_layer = "MainUV"
if self.texType == 1:
self.bltexslot.diffuse_factor = self.intensity
elif self.texType == 2:
self.bltexslot.use_map_diffuse = False
self.bltexslot.use_map_color_diffuse = False
self.bltexslot.use_map_color_spec = True
self.bltexslot.specular_color_factor = self.intensity
class RpAtomic:
def __init__(self, loader, frame, geometry, flags):
self.loader = loader
self.frame = frame
self.geometry = geometry
self.flags = flags
self.renderPlugin = None
self.renderExtra = None
self.matfxpipe = False
frame.setAtomic(self)
geometry.setAtomic(self)
def setRenderRights(self, plugin, extra):
self.renderPlugin = plugin
self.renderExtra = extra
def __init__(self, filename):
self.filename = filename
self.texpool = {}
self.envtexpool = {}
self.colhex = None
self.childrenOf = None
self.frames = []
self.geoms = []
self.f = open(filename, "rb")
self.readSection(RwTypes.CLUMP)
self.f.close()
for frame in self.childrenOf[0]:
self.frames[frame].build()
def writeDebug(self, text):
g = open(self.filename + ".txt", "a")
g.write(text + "\n")
g.close()
def readFormat(self, format):
return struct.unpack(format, self.f.read(struct.calcsize(format)))
def readSlice(self, format, slice):
size = struct.calcsize(format)
if(len(slice) < size):
raise Exception("Failed to read slice, buffer is too small.")
return struct.unpack(format, slice[:size]), slice[size:]
def readSection(self, type, extra = None):
header = self.readFormat("III")
header = (header[0], header[1], RwTypes.decodeVersion(header[2]))
if type >= 0 and header[0] != type:
raise Exception("Expected type " + str(type) + ", found " + str(header[0]))
curPos = self.f.tell()
res = None
if header[0] == RwTypes.STRUCT: res = self.readSectionStruct(header)
elif header[0] == RwTypes.STRING: res = self.readSectionString(header)
elif header[0] == RwTypes.EXTENSION: res = self.readSectionExtension(header, extra)
elif header[0] == RwTypes.TEXTURE: res = self.readSectionTexture(header, extra)
elif header[0] == RwTypes.MATERIAL: res = self.readSectionMaterial(header, extra)
elif header[0] == RwTypes.MATERIALLIST: res = self.readSectionMaterialList(header, extra)
elif header[0] == RwTypes.FRAMELIST: res = self.readSectionFrameList(header)
elif header[0] == RwTypes.GEOMETRY: res = self.readSectionGeometry(header, extra)
elif header[0] == RwTypes.CLUMP: res = self.readSectionClump(header)
elif header[0] == RwTypes.ATOMIC: res = self.readSectionAtomic(header)
elif header[0] == RwTypes.GEOMETRYLIST: res = self.readSectionGeometryList(header)
elif header[0] == RwTypes.MORPHPLG: res = self.readSectionMorphPLG(header, extra)
elif header[0] == RwTypes.BINMESHPLG: res = self.readSectionBinMeshPLG(header, extra)
elif header[0] == RwTypes.FRAMENAME: res = self.readSectionFrameName(header, extra)
elif header[0] == RwTypes.COLLISION: res = self.readSectionCollision(header, extra)
elif header[0] == RwTypes.MATEFFECTS: res = self.readSectionMatEffects(header, extra)
elif header[0] == RwTypes.MATSPECULAR: res = self.readSectionMatSpecular(header, extra)
elif header[0] == RwTypes.MATREFLECTION: res = self.readSectionMatReflection(header, extra)
elif header[0] == RwTypes.MESHEXTENSION: res = self.readSectionMeshExtension(header, extra)
elif header[0] == RwTypes.RENDERRIGHTS: res = self.readSectionRenderRights(header, extra)
elif header[0] == RwTypes.HANIMPLG: res = self.readSectionHAnimPLG(header, extra)
elif header[0] == RwTypes.SKINPLG: res = self.readSectionSkinPLG(header, extra)
elif header[0] == RwTypes.NIGHTCOLS: res = self.readSectionNightCols(header, extra)
elif type >= 0: raise Exception("Missing read function for section type " + str(type))
else: print("Ignoring extension data of type " + hex(header[0]))
self.f.seek(curPos + header[1])
return res
def readSectionStruct(self, header):
return header, self.f.read(header[1])
def readSectionString(self, header):
byteList = b""
for i in range(header[1]):
newByte = self.f.read(1)
if newByte[0] == 0:
break
byteList += newByte
return header, byteList.decode("ascii")
def readSectionExtension(self, header, extra):
endPos = self.f.tell() + header[1]
while self.f.tell() < endPos:
self.readSection(RwTypes.ANY, extra)
return header, None
def readSectionTexture(self, header, material):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(flags, x), slice = self.readSlice("HH", slice)
x, texName = self.readSection(RwTypes.STRING)
x, alphaName = self.readSection(RwTypes.STRING)
if material.readenvmap:
texture = self.RwTexture(self, material, texName, 1, material.envIntensity)
material.setEnvTexture(texture)
else:
texture = self.RwTexture(self, material, texName, 0, 1)
material.setTexture(texture)
self.readSection(RwTypes.EXTENSION)
return header, None
def readSectionMaterial(self, header, geometry):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(flags,), slice = self.readSlice("I", slice)
col, slice = self.readSlice("BBBB", slice)
(x, textured, ambient, specular, diffuse), slice = self.readSlice("iifff", slice)
material = self.RpMaterial(geometry, flags, col, textured, ambient, specular, diffuse)
geometry.addMaterial(material)
if textured > 0:
self.readSection(RwTypes.TEXTURE, material)
self.readSection(RwTypes.EXTENSION, material)
return header, None
def readSectionMaterialList(self, header, geometry):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(matCount,), slice = self.readSlice("i", slice)
for i in range(matCount):
junk, slice = self.readSlice("i", slice)
for i in range(matCount):
self.readSection(RwTypes.MATERIAL, geometry)
return header, None
def readSectionFrameList(self, header):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(frameCount,), slice = self.readSlice("i", slice)
self.childrenOf = []
for i in range(frameCount+1):
self.childrenOf.append([])
for i in range(frameCount):
rot, slice = self.readSlice("fffffffff", slice)
pos, slice = self.readSlice("fff", slice)
(parent, flags), slice = self.readSlice("ii", slice)
self.frames.append(self.RwFrame(self, i, rot, pos, parent))
for i in range(frameCount):
self.readSection(RwTypes.EXTENSION, self.frames[i])
return header, None
def readSectionGeometry(self, header, index):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(flags, texCount, triCount, vertCount, morphCount), slice = self.readSlice("HHiii", slice)
geometry = self.RpGeometry(self, index)
self.geoms.append(geometry)
geometry.flags = flags
if metaHeader[2] < 0x34001:
(surfAmbient, surfSpecular, surfDiffuse), slice = self.readSlice("fff", slice)
for i in range(vertCount):
geometry.addVertex(self.RwVertex(None, None))
if flags & RpGeomFlag.PRELIT:
geometry.vertCol = []
for i in range(vertCount):
(vcr, vcg, vcb, vca), slice = self.readSlice("BBBB", slice)
geometry.vertCol.append((vcr / 255, vcg / 255, vcb / 255))
for i in range(vertCount):
uv, slice = self.readSlice("ff", slice)
geometry.vertices[i].uv = (uv[0], 1-uv[1])
if texCount > 1:
geometry.hasEnvUV = True
for i in range(vertCount):
uv_env, slice = self.readSlice("ff", slice)
geometry.vertices[i].uv_env = (uv_env[0], 1-uv_env[1])
if texCount > 2:
slice = slice[struct.calcsize("ff")*(texCount-2)*(vertCount):]
for i in range(triCount):
(c, b, mat, a), slice = self.readSlice("HHHH", slice)
if a >= vertCount or b >= vertCount or c >= vertCount:
raise Exception("Vertex indices out of range for triangle.")
geometry.addTriangle(self.RwTriangle((a, b, c), mat))
if morphCount is not 1:
raise Exception("Multiple frames not supported")
for i in range(morphCount):
(bx, by, bz, br, hasVerts, hasNormals), slice = self.readSlice("ffffii", slice)
if hasVerts > 0:
for j in range(vertCount):
coords, slice = self.readSlice("fff", slice)
geometry.vertices[j].coords = coords
if hasNormals > 0:
geometry.hasNormals = True
for j in range(vertCount):
normal, slice = self.readSlice("fff", slice)
geometry.vertices[j].normal = normal
self.readSection(RwTypes.MATERIALLIST, geometry)
self.readSection(RwTypes.EXTENSION, geometry)
return header, None
def readSectionClump(self, header):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(atomicCount,), slice = self.readSlice("i", slice)
if metaHeader[2] > 0x33000:
(lightCount, cameraCount), slice = self.readSlice("ii", slice)
self.readSection(RwTypes.FRAMELIST)
self.readSection(RwTypes.GEOMETRYLIST)
for i in range(atomicCount):
self.readSection(RwTypes.ATOMIC)
self.readSection(RwTypes.EXTENSION)
return header, None
def readSectionAtomic(self, header):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(frameIndex, geomIndex, flags, x, x, x, x), slice = self.readSlice("iiBBBBi", slice)
atomic = self.RpAtomic(self, self.frames[frameIndex], self.geoms[geomIndex], flags)
self.readSection(RwTypes.EXTENSION, atomic)
return header, None
def readSectionGeometryList(self, header):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(geomCount,), slice = self.readSlice("i", slice)
for i in range(geomCount):
self.readSection(RwTypes.GEOMETRY, i)
def readSectionMorphPLG(self, header, geometry):
return header, None
def readSectionBinMeshPLG(self, header, geometry):
slice = self.f.read(header[1])
(type, splits, total), slice = self.readSlice("iii", slice)
if type != 0 and type != 1:
print("Morph PLG section in unknown type - ignoring.")
return header, None
lookup = {}
for i in range(len(geometry.triangles)):
v = geometry.triangles[i].verts
v = list(v)
v.sort()
lookup[tuple(v)] = i
totals = 0
for i in range(splits):
(sub, mat), slice = self.readSlice("ii", slice)
if type == 0:
for j in range(sub//3):
vx, slice = self.readSlice("iii", slice)
vx = list(vx)
vx.sort()
vx = tuple(vx)
if vx in lookup:
geometry.triangles[lookup[vx]].mat = mat
else:
elems = deque()
for j in range(sub):
if len(elems) > 2:
elems.popleft()
(item,), slice = self.readSlice("i", slice)
if len(elems) > 1:
checklist = [elems[0], elems[1], item]
checklist.sort()
check = tuple(checklist)
if check in lookup:
geometry.triangles[lookup[check]].mat = mat
elems.append(item)
return header, None
def readSectionFrameName(self, header, frame):
frame.name = self.f.read(header[1]).decode("ascii")
return header, None
def readSectionCollision(self, header, geometry):
if not self.childrenOf or len(self.childrenOf[0]) is 0:
print("Collision extension - no frame to attach to.")
return header, None
binary = self.f.read(header[1])
self.colhex = base64.b64encode(zlib.compress(binary)).decode("ascii")
return header, None
def readSectionMatEffects(self, header, parent):
if parent.__class__ == self.RpMaterial:
return self.readSectionMaterialMatEffects(header, parent)
elif parent.__class__ == self.RpAtomic:
return self.readSectionAtomicMatEffects(header, parent)
return header, None
def readSectionMaterialMatEffects(self, header, material):
(flags,) = self.readFormat("I")
for i in range(2):
(effectType,) = self.readFormat("I")
if effectType == 0:
continue
elif effectType != 2:
print("Unknown material effect type.")
return header, None
(coefficient, frameBufferAlpha, textured) = self.readFormat("fii")
if textured:
material.readenvmap = True
material.envIntensity = coefficient
self.readSection(RwTypes.TEXTURE, material)
def readSectionAtomicMatEffects(self, header, atomic):
(check,) = self.readFormat("i")
if check != 0:
atomic.matfxpipe = True
return header, None
def readSectionMatSpecular(self, header, material):
slice = self.f.read(header[1])
(intensity,), slice = self.readSlice("f", slice)
specName = ""
for i in range(len(slice)):
if int(slice[i]) == 0:
break
specName += slice[i:i+1].decode("ascii")
texture = self.RwTexture(self, material, specName, 2, intensity)
material.setSpecTexture(texture)
return header, None
def readSectionMatReflection(self, header, material):
slice = self.f.read(header[1])
colour, slice = self.readSlice("fff", slice)
(x, intensity), slice = self.readSlice("ff", slice)
material.setReflection(colour, intensity)
return header, None
def readSectionMeshExtension(self, header, geometry):
slice = self.f.read(header[1])
(hasData,), slice = self.readSlice("i", slice)
if hasData:
print("Mesh extension extension actually has data. Not sure what to do with it.")
return header, None
def readSectionRenderRights(self, header, atomic):
if not hasattr(atomic, "__class__") or atomic.__class__ != self.RpAtomic:
print("Render rights extension is not in the right section, should be in atomic.")
return
slice = self.f.read(header[1])
(plugin, extra), slice = self.readSlice("ii", slice)
atomic.setRenderRights(plugin, extra)
def readSectionHAnimPLG(self, header, frame):
if not hasattr(frame, "__class__") or frame.__class__ != self.RwFrame:
print("HAnim extension is not in the right section, should be in frame.")
return
binary = self.f.read(header[1])
frame.hanimdata = base64.b64encode(zlib.compress(binary)).decode("ascii")
return header, None
def readSectionSkinPLG(self, header, geometry):
if not hasattr(geometry, "__class__") or geometry.__class__ != self.RpGeometry:
print("Skin extension is not in the right section, should be in geometry.")
return
binary = self.f.read(header[1])
geometry.skindata = base64.b64encode(zlib.compress(binary)).decode("ascii")
return header, None
def readSectionNightCols(self, header, geometry):
if not hasattr(geometry, "__class__") or geometry.__class__ != self.RpGeometry:
print("Night vertex colours extension is not in the right section, should be in geometry.")
return
slice = self.f.read(header[1])
(x,), slice = self.readSlice("I", slice)
geometry.nightVertCol = []
for i in range(len(geometry.vertices)):
(vcr, vcg, vcb, vca), slice = self.readSlice("BBBB", slice)
geometry.nightVertCol.append((vcr / 255, vcg / 255, vcb / 255))
return header, None
class ExportRenderware:
class RwChunkHeader:
def __init__(self, type, size):
self.type = type
self.size = size
def bin(self):
return struct.pack("III", self.type, self.size, ExportRenderware.targetVer)
class RwVector3:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def bin(self):
return struct.pack("fff", self.x, self.y, self.z)
class RwRotMatrix:
def __init__(self):
self.m = [1, 0, 0, 0, 1, 0, 0, 0, 1]
def bin(self):
return struct.pack("9f", *self.m)
class RwFrameList:
def __init__(self):
self.R = ExportRenderware
self.frames = []
def bin(self):
payload = struct.pack("i", len(self.frames))
for frame in self.frames:
payload += frame.binraw()
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
for frame in self.frames:
payload += frame.binext()
header = self.R.RwChunkHeader(RwTypes.FRAMELIST, len(payload)).bin()
return header + payload
class RwFrame:
def __init__(self, clump, object, parentFrame):
self.R = ExportRenderware
self.clump = clump
self.object = object
self.index = len(clump.frameList.frames)
clump.frameList.frames.append(self)
self.name = self.object.name
self.parent = parentFrame
self.rotation = self.R.RwRotMatrix()
self.position = self.R.RwVector3(0, 0, 0)
if parentFrame is not None:
ux = object.matrix_local.to_3x3()
self.rotation.m = [ux[0][0], ux[0][1], ux[0][2], ux[1][0], ux[1][1], ux[1][2], ux[2][0], ux[2][1], ux[2][2]]
self.position.x = object.matrix_local.translation[0]
self.position.y = object.matrix_local.translation[1]
self.position.z = object.matrix_local.translation[2]
if str(object.type) == "MESH":
self.atomic = self.R.RpAtomic(self)
elif str(object.type) == "EMPTY":
self.atomic = None
else:
raise Exception("Unsupported object type selected: " + str(object.type))
for child in self.object.children:
if str(object.type) != "MESH" and str(object.type) != "EMPTY":
print("Ignoring object " + object.name + ", type " + object.type)
continue
self.R.RwFrame(self.clump, child, self)
if not clump.colbin:
try:
if len(object.collhex) > 0:
textf = bpy.data.texts[object.collhex].as_string()
clump.colbin = zlib.decompress(base64.b64decode(bytes(textf, "ascii")))
except:
clump.colbin = None
def binraw(self):
payload = self.rotation.bin()
payload += self.position.bin()
payload += struct.pack("ii", -1 if self.parent is None else self.parent.index, 0)
return payload
def binext_name(self):
noname = "noname_"
if self.name[:len(noname)] == noname:
return b""
writename = self.R.unmangleName(self.name)
if len(writename) > 23:
writename = writename[:23]
print("Warning, frame name '", writename , "' truncated to 23 characters.")
payload = struct.pack(str(len(writename)) + "s", bytearray(writename, "ascii"))
header = self.R.RwChunkHeader(RwTypes.FRAMENAME, len(payload)).bin()
return header + payload
def binext_hanim(self):
object = self.object
try:
if len(object.rw_hanimdata) > 0:
textf = bpy.data.texts[object.rw_hanimdata].as_string()
rawdata = zlib.decompress(base64.b64decode(bytes(textf, "ascii")))
else:
return b""
except:
return b""
payload = rawdata
header = self.R.RwChunkHeader(RwTypes.HANIMPLG, len(payload)).bin()
return header + payload
def binext(self):
payload = self.binext_name() + self.binext_hanim()
header = self.R.RwChunkHeader(RwTypes.EXTENSION, len(payload)).bin()
return header + payload
class RpAtomicChunkInfo:
def __init__(self, frameIndex, geometryIndex, flags):
self.R = ExportRenderware
self.frameIndex = frameIndex
self.geometryIndex = geometryIndex
self.flags = flags
def bin(self):
payload = struct.pack("iiii", self.frameIndex, self.geometryIndex, self.flags, 0)
header = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin()
return header + payload
class RpAtomic:
def __init__(self, frame):
self.R = ExportRenderware
self.clump = frame.clump
self.frame = frame
self.mesh = frame.object.to_mesh(self.clump.context.scene, False, "PREVIEW")
self.geometry = self.R.RpGeometry(self)
self.flags = 5
def binext_rights(self):
if self.frame.object.renderright == 0:
return b""
payload = struct.pack("ii", self.frame.object.renderright, self.frame.object.renderextra)
header = self.R.RwChunkHeader(RwTypes.RENDERRIGHTS, len(payload)).bin()
return header + payload
def binext_matfx(self):
if self.frame.object.matfxpipe != True and self.R.decodedVer > 0x34003:
return b""
payload = struct.pack("i", 1)
header = self.R.RwChunkHeader(RwTypes.MATEFFECTS, len(payload)).bin()
return header + payload
def bin(self):
payload = self.R.RpAtomicChunkInfo(self.frame.index, self.geometry.index, self.flags).bin()
extensions = self.binext_rights() + self.binext_matfx()
extensions = self.R.RwChunkHeader(RwTypes.EXTENSION, len(extensions)).bin() + extensions
payload += extensions
header = self.R.RwChunkHeader(RwTypes.ATOMIC, len(payload)).bin()
return header + payload
class RpVertex:
def __init__(self, pos, uv, uve, normal):
self.pos = pos
self.uv = uv
self.uve = uve
self.normal = normal
class RpTriangle:
def __init__(self, a, b, c, mat):
self.a = a
self.b = b
self.c = c
self.mat = mat
def bin(self):
return struct.pack("HHHH", self.a, self.b, self.mat, self.c)
class RwUVCoord:
def __init__(self, u, v):
self.u = u
self.v = v
def bin(self):
return struct.pack("ff", self.u, 1-self.v)
class RwTexture:
def __init__(self, material, bltexslot):
self.R = ExportRenderware
self.material = material
self.bltexslot = bltexslot
self.bltex = bltexslot.texture
def bin(self):
payload = struct.pack("HH", 0x1106, 0)
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
strdata = struct.pack(str(len(self.bltex.name)) + "s", bytearray(self.bltex.name, "ascii"))
for i in range(4 - (len(self.bltex.name)&3)):
strdata += struct.pack("B", 0)
payload += self.R.RwChunkHeader(RwTypes.STRING, len(strdata)).bin() + strdata
strdata = struct.pack("i", 0)
payload += self.R.RwChunkHeader(RwTypes.STRING, len(strdata)).bin() + strdata
extensions = b""
extensions = self.R.RwChunkHeader(RwTypes.EXTENSION, len(extensions)).bin() + extensions
payload += extensions
header = self.R.RwChunkHeader(RwTypes.TEXTURE, len(payload)).bin()
return header + payload
class RpMaterial:
def __init__(self, materialList, blMaterial):
self.R = ExportRenderware
self.materialList = materialList
self.index = len(materialList.mats)
self.mesh = materialList.mesh
self.blmaterial = blMaterial
self.red = min(255, max(0, blMaterial.diffuse_color[0] * 256))
self.green = min(255, max(0, blMaterial.diffuse_color[1] * 256))
self.blue = min(255, max(0, blMaterial.diffuse_color[2] * 256))
self.alpha = min(255, max(0, blMaterial.alpha * 256))
self.ambient = blMaterial.ambient
self.specular = blMaterial.specular_intensity
self.diffuse = blMaterial.diffuse_intensity
self.bltex_diffuse = self.findTexSlot("DIFFUSE")
self.bltex_specular = self.findTexSlot("SPECULAR")
self.bltex_envmap = self.findTexSlot("ENVMAP")
self.tex_diffuse = None
self.tex_envmap = None
if self.bltex_diffuse:
self.tex_diffuse = self.R.RwTexture(self, self.bltex_diffuse)
if self.bltex_diffuse.texture_coords == "UV" and len(self.bltex_diffuse.uv_layer) > 0 and not self.materialList.geometry.uvname_diff:
self.materialList.geometry.uvname_diff = self.bltex_diffuse.uv_layer
if self.bltex_envmap:
self.tex_envmap = self.R.RwTexture(self, self.bltex_envmap)
if self.bltex_envmap.texture_coords == "UV" and len(self.bltex_envmap.uv_layer) > 0 and not self.materialList.geometry.uvname_env:
self.materialList.geometry.uvname_env = self.bltex_envmap.uv_layer
def findTexSlot(self, type):
for i in range(len(self.blmaterial.texture_slots)):
textype = ""
slot = self.blmaterial.texture_slots[i]
if slot and slot.texture:
if slot.texture.type == "ENVIRONMENT_MAP":
textype = "ENVMAP"
elif slot.use_map_color_spec and not slot.use_map_color_diffuse:
textype = "SPECULAR"
elif slot.use_map_color_diffuse and not slot.use_map_color_spec:
textype = "DIFFUSE"
if textype == type:
return slot
return None
def binext_matfx(self):
if not self.tex_envmap:
return b""
payload = struct.pack("iifii", 2, 2, self.bltex_envmap.specular_color_factor, 0, 1)
payload += self.tex_envmap.bin()
payload += struct.pack("i", 0)
header = self.R.RwChunkHeader(RwTypes.MATEFFECTS, len(payload)).bin()
return header + payload
def binext_reflect(self):
if not self.blmaterial.raytrace_mirror.use and ExportRenderware.decodedVer <= 0x34003:
return b""
factor = self.blmaterial.raytrace_mirror.reflect_factor if self.blmaterial.raytrace_mirror.use else 0
colour = self.blmaterial.mirror_color
payload = struct.pack("fffffi", colour[0], colour[1], colour[2], 1, self.blmaterial.raytrace_mirror.reflect_factor, 0)
header = self.R.RwChunkHeader(RwTypes.MATREFLECTION, len(payload)).bin()
return header + payload
def binext_specular(self):
if not self.bltex_specular:
return b""
payload = struct.pack("f", self.bltex_specular.specular_color_factor)
texname = bytes(self.bltex_specular.texture.name, "ascii")
payload += texname[:23]
nullbyte = struct.pack("B", 0)
for i in range(24 - min(23, len(texname))):
payload += nullbyte
header = self.R.RwChunkHeader(RwTypes.MATSPECULAR, len(payload)).bin()
return header + payload
def bin(self):
payload = struct.pack("iBBBBiIfff", 0, int(self.red), int(self.green), int(self.blue), int(self.alpha), 0, 1 if self.tex_diffuse else 0, self.ambient, self.specular, self.diffuse)
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
if self.tex_diffuse:
payload += self.tex_diffuse.bin()
extensions = self.binext_matfx() + self.binext_reflect() + self.binext_specular()
extensions = self.R.RwChunkHeader(RwTypes.EXTENSION, len(extensions)).bin() + extensions
payload += extensions
header = self.R.RwChunkHeader(RwTypes.MATERIAL, len(payload)).bin()
return header + payload
class RpMaterialList:
def __init__(self, geometry):
self.R = ExportRenderware
self.geometry = geometry
self.clump = geometry.clump
self.mesh = geometry.mesh
self.mats = []
for mat in self.mesh.materials:
self.mats.append(self.R.RpMaterial(self, mat))
def bin(self):
payload = struct.pack("i", len(self.mesh.materials))
for mat in self.mats:
payload += struct.pack("i", -1)
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
for mat in self.mats:
payload += mat.bin()
header = self.R.RwChunkHeader(RwTypes.MATERIALLIST, len(payload)).bin()
return header + payload
class RpGeometryList:
def __init__(self):
self.R = ExportRenderware
self.geoms = []
def bin(self):
payload = struct.pack("i", len(self.geoms))
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
for geom in self.geoms:
payload += geom.bin()
header = self.R.RwChunkHeader(RwTypes.GEOMETRYLIST, len(payload)).bin()
return header + payload
class RpGeometryChunkInfo:
def __init__(self):
self.flags = RpGeomFlag.TEXTURED | RpGeomFlag.NORMALS | RpGeomFlag.LIGHT | RpGeomFlag.MODULATEMATERIALCOLOR
self.texCount = 1
self.triangleCount = 0
self.vertexCount = 0
self.frameCount = 1
def binraw(self):
return struct.pack("HHiii", self.flags, self.texCount, self.triangleCount, self.vertexCount, self.frameCount)
class RpGeometry:
def __init__(self, atomic):
self.R = ExportRenderware
self.clump = atomic.clump
self.atomic = atomic
self.mesh = atomic.mesh
self.index = len(self.clump.geometryList.geoms)
self.clump.geometryList.geoms.append(self)
self.chunkInfo = self.R.RpGeometryChunkInfo()
self.uvname_diff = None
self.uvname_env = None
self.materialList = self.R.RpMaterialList(self)
self.matTris = []
for i in range(len(self.materialList.mats)):
self.matTris.append([])
mesh = self.mesh
self.vdict = []
for i in range(len(mesh.vertices)):
self.vdict.append({})
self.uvc = self.getUVData(self.uvname_diff)
self.uvce = None
if self.uvname_env and self.uvname_env != self.uvname_diff:
self.uvce = self.getUVData(self.uvname_env)
self.vertices = []
self.triangles = []
self.vertCol = None
self.nightVertCol = None
self.vertColData = None
self.nightVertColData = None
for vcol in self.mesh.vertex_colors:
if vcol.name.lower() == "night" and self.nightVertCol is None:
self.nightVertCol = []
self.nightVertColData = vcol.data
elif self.vertCol is None:
self.vertCol = []
self.vertColData = vcol.data
for poly in mesh.polygons:
self.addBlenderPoly(poly)
if len(self.vertices) > 65535:
raise Exception("Aborting export: vertex count exceeds 65535")
self.maxDist = 0
for v in self.mesh.vertices:
self.maxDist = max(self.maxDist, math.sqrt(v.co[0]*v.co[0] + v.co[1]*v.co[1] + v.co[2]*v.co[2]))
self.chunkInfo.triangleCount = len(self.triangles)
self.chunkInfo.vertexCount = len(self.vertices)
if self.uvce:
self.chunkInfo.texCount = 2
self.chunkInfo.flags = self.chunkInfo.flags & (~RpGeomFlag.TEXTURED)
self.chunkInfo.flags |= RpGeomFlag.TEXTURED2
if self.R.decodedVer > 0x34003:
self.chunkInfo.flags |= RpGeomFlag.POSITIONS
if self.vertColData:
self.chunkInfo.flags |= RpGeomFlag.PRELIT
def findVertex(self, type):
for i in range(len(self.blmaterial.texture_slots)):
textype = ""
slot = self.blmaterial.texture_slots[i]
if slot and slot.texture:
if slot.texture.type == "ENVIRONMENT_MAP":
textype = "ENVMAP"
elif slot.use_map_color_spec and not slot.use_map_color_diffuse:
textype = "SPECULAR"
elif slot.use_map_color_diffuse and not slot.use_map_color_spec:
textype = "DIFFUSE"
if textype == type:
return slot
return None
def getUVData(self, name):
for i in range(len(self.mesh.uv_textures)):
if name and self.mesh.uv_textures[i] and self.mesh.uv_textures[i].name == name:
return self.mesh.uv_layers[i].data
return None
def newVertId(self, id, uv, uve):
if (uv + uve) not in self.vdict[id]:
self.vdict[id][uv + uve] = len(self.vertices)
self.vertices.append(self.R.RpVertex(self.mesh.vertices[id].co, uv, uve, self.mesh.vertices[id].normal))
if self.vertColData:
self.vertCol.append((int(self.vertColData[id].color[0]*255), int(self.vertColData[id].color[1]*255), int(self.vertColData[id].color[2]*255)))
if self.nightVertColData:
self.nightVertCol.append((int(self.nightVertColData[id].color[0]*255), int(self.nightVertColData[id].color[1]*255), int(self.nightVertColData[id].color[2]*255)))
return self.vdict[id][(uv + uve)]
def addRawPoly(self, verts, uvs, mat):
newIds = []
for i in range(3):
uv = tuple(self.uvc[uvs[i]].uv) if self.uvc else (0, 0)
uve = tuple(self.uvce[uvs[i]].uv) if self.uvce else (0, 0)
newIds.append(self.newVertId(verts[i], uv, uve))
self.triangles.append(self.R.RpTriangle(newIds[0], newIds[1], newIds[2], mat))
if mat >= 0:
self.matTris[mat].append(newIds[0])
self.matTris[mat].append(newIds[1])
self.matTris[mat].append(newIds[2])
def addBlenderPoly(self, p):
if len(p.vertices) < 3 or len(p.vertices) > 4:
raise Exception("Aborting export: Invalid number of vertices on an edge.")
self.addRawPoly([p.vertices[0], p.vertices[1], p.vertices[2]], [p.loop_indices[0], p.loop_indices[1], p.loop_indices[2]], p.material_index)
if len(p.vertices) == 4:
self.addRawPoly([p.vertices[0], p.vertices[3], p.vertices[2]], [p.loop_indices[0], p.loop_indices[3], p.loop_indices[2]], p.material_index)
def binext_binmesh(self):
payload = b""
splits = 0
total = 0
for i in range(len(self.matTris)):
if len(self.matTris[i]) == 0:
continue
splits += 1
total += len(self.matTris[i])
payload += struct.pack("ii", len(self.matTris[i]), i)
for id in self.matTris[i]:
payload += struct.pack("i", id)
payload = struct.pack("iii", 0, splits, total) + payload
header = self.R.RwChunkHeader(RwTypes.BINMESHPLG, len(payload)).bin()
return header + payload
def binext_morph(self):
if self.R.decodedVer > 0x34003 or self.R.decodedVer < 0x33000:
return b""
payload = struct.pack("i", 0)
header = self.R.RwChunkHeader(RwTypes.MORPHPLG, len(payload)).bin()
return header + payload
def binext_meshext(self):
if self.R.decodedVer <= 0x34003:
return b""
payload = struct.pack("i", 0)
header = self.R.RwChunkHeader(RwTypes.MESHEXTENSION, len(payload)).bin()
return header + payload
def binext_skin(self):
object = self.atomic.frame.object
try:
if len(object.rw_skindata) > 0:
textf = bpy.data.texts[object.rw_skindata].as_string()
rawdata = zlib.decompress(base64.b64decode(bytes(textf, "ascii")))
else:
return b""
except:
return b""
payload = rawdata
header = self.R.RwChunkHeader(RwTypes.SKINPLG, len(payload)).bin()
return header + payload
def binext_nightcol(self):
if not self.nightVertCol:
return b""
payload = struct.pack("I", 1)
for col in self.nightVertCol:
payload += struct.pack("BBBB", col[0], col[1], col[2], 255)
header = self.R.RwChunkHeader(RwTypes.NIGHTCOLS, len(payload)).bin()
return header + payload
def bin(self):
payload = self.chunkInfo.binraw()
if self.R.decodedVer < 0x34001:
payload += struct.pack("fff", 0, 0, 1)
if self.vertCol:
for col in self.vertCol:
payload += struct.pack("BBBB", col[0], col[1], col[2], 255)
for vertex in self.vertices:
payload += self.R.RwUVCoord(vertex.uv[0], vertex.uv[1]).bin()
if self.uvce:
for vertex in self.vertices:
payload += self.R.RwUVCoord(vertex.uve[0], vertex.uve[1]).bin()
for triangle in self.triangles:
payload += triangle.bin()
payload += struct.pack("ffffii", 0, 0, 0, self.maxDist, 1, 1)
for vertex in self.vertices:
payload += self.R.RwVector3(vertex.pos[0], vertex.pos[1], vertex.pos[2]).bin()
for vertex in self.vertices:
payload += self.R.RwVector3(vertex.normal[0], vertex.normal[1], vertex.normal[2]).bin()
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
payload += self.materialList.bin()
extensions = self.binext_binmesh() + self.binext_skin() + self.binext_morph() + self.binext_meshext() + self.binext_nightcol()
extensions = self.R.RwChunkHeader(RwTypes.EXTENSION, len(extensions)).bin() + extensions
payload += extensions
header = self.R.RwChunkHeader(RwTypes.GEOMETRY, len(payload)).bin()
return header + payload
class RpClumpChunkInfo:
def __init__(self, atomicCount, lightCount, cameraCount):
self.R = ExportRenderware
self.atomicCount = atomicCount
self.lightCount = lightCount
self.cameraCount = cameraCount
def bin(self):
payload = struct.pack("i", self.atomicCount)
if self.R.decodedVer > 0x33000:
payload += struct.pack("ii", self.lightCount, self.cameraCount)
header = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin()
return header + payload
class RpClump:
def __init__(self, context, exportVer):
self.R = ExportRenderware
self.R.targetVer = exportVer
self.R.decodedVer = RwTypes.decodeVersion(self.R.targetVer)
self.context = context
self.frameList = self.R.RwFrameList()
self.geometryList = self.R.RpGeometryList()
self.colbin = None
exportables = []
for object in context.selected_objects:
parent = object.parent
add = True
while parent:
if parent in context.selected_objects:
add = False
break
parent = parent.parent
if add:
exportables.append(object)
for object in exportables:
if str(object.type) != "MESH" and str(object.type) != "EMPTY":
print("Ignoring object " + object.name + ", type " + object.type)
continue
self.R.RwFrame(self, object, None)
if len(self.frameList.frames) == 0:
raise Exception("Aborting export: no frames selected.")
def binext_coll(self):
if not self.colbin:
return b""
payload = self.colbin
header = self.R.RwChunkHeader(RwTypes.COLLISION, len(self.colbin)).bin()
return header + payload
def bin(self):
payload = self.R.RpClumpChunkInfo(len(self.geometryList.geoms), 0, 0).bin()
payload += self.frameList.bin()
payload += self.geometryList.bin()
for geometry in self.geometryList.geoms:
payload += geometry.atomic.bin()
extensions = self.binext_coll()
extensions = self.R.RwChunkHeader(RwTypes.EXTENSION, len(extensions)).bin() + extensions
payload += extensions
header = self.R.RwChunkHeader(RwTypes.CLUMP, len(payload)).bin()
return header + payload
def __init__(self, context, exportVerIndex, filepath):
if exportVerIndex == "1":
exportVer = 0x0800FFFF
elif exportVerIndex == "2":
exportVer = 0x1003FFFF
else:
exportVer = 0x1803FFFF
outf = open(filepath, "wb")
outf.write(self.RpClump(context, exportVer).bin())
outf.close()
def unmangleName(name):
if len(name) > 4 and name[-4] == "." and name[-3:].isnumeric():
return name[:-4]
else:
return name
class ExportRenderwareMenu(bpy.types.Operator):
expVersionValues = (("1", "GTA III", ""), ("2", "Vice City", ""), ("3", "San Andreas", ""))
bl_idname = "export_rw.dff"
bl_label = "Export Renderware (.dff)"
filename_ext = ".dff"
filepath = StringProperty(subtype = "FILE_PATH")
expVersion = EnumProperty(name = "Export version", items = expVersionValues, default="2")
def invoke(self, context, event):
wm = context.window_manager
wm.fileselect_add(self)
return {"RUNNING_MODAL"}
def execute(self, context):
setupProps()
ExportRenderware(context, self.expVersion, self.filepath)
return {"FINISHED"}
class ImportRenderwareMenu(bpy.types.Operator):
bl_idname = "import_rw.dff"
bl_label = "Import Renderware (.dff)"
filename_ext = ".dff"
filepath = StringProperty(subtype = "FILE_PATH")
def invoke(self, context, event):
wm = context.window_manager
wm.fileselect_add(self)
return {"RUNNING_MODAL"}
def execute(self, context):
setupProps()
ImportRenderware(self.filepath)
return {"FINISHED"}
def export_func(self, context):
self.layout.operator(ExportRenderwareMenu.bl_idname, text="Renderware (.dff)")
def import_func(self, context):
self.layout.operator(ImportRenderwareMenu.bl_idname, text="Renderware (.dff)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(export_func)
bpy.types.INFO_MT_file_import.append(import_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(export_func)
bpy.types.INFO_MT_file_import.remove(import_func)
def setupProps():
class renderwarePanel(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_label = "Renderware"
def draw(self, context):
self.layout.prop(bpy.context.active_object, "renderright")
self.layout.prop(bpy.context.active_object, "renderextra")
self.layout.prop(bpy.context.active_object, "matfxpipe")
self.layout.prop(bpy.context.active_object, "collhex")
self.layout.prop(bpy.context.active_object, "rw_hanimdata")
self.layout.prop(bpy.context.active_object, "rw_skindata")
if hasattr(bpy.types.Object, "collhex"):
return
bpy.types.Object.collhex = bpy.props.StringProperty(name = "Collision", description = "Name of the text object that contains collision binary data.", maxlen = 100)
bpy.types.Object.renderright = bpy.props.IntProperty(name = "RenderRight", description = "Index of the plugin whose pipeline is used for rendering.")
bpy.types.Object.renderextra = bpy.props.IntProperty(name = "RenderExtra", description = "Extra arguments to the render pipeline.")
bpy.types.Object.matfxpipe = bpy.props.BoolProperty(name = "MatFX pipeline", description = "Whether rendering is handled by MatFX pipeline.")
bpy.types.Object.rw_hanimdata = bpy.props.StringProperty(name = "HAnimData", description = "Info for this skin bone.", maxlen = 100)
bpy.types.Object.rw_skindata = bpy.props.StringProperty(name = "SkinData", description = "Skin data (bone vertices etc) for this mesh.", maxlen = 100)
bpy.utils.register_class(renderwarePanel)
if __name__ == "__main__":
unregister()
register()
setupProps()
| 37.724138
| 191
| 0.526043
|
bl_info = {
"name": "RenderWare importer/exporter for GTA III/VC/SA (.dff)",
"author": "Ago Allikmaa (maxorator)",
"version": (0, 9, 2),
"blender": (2, 6, 3),
"location": "File > Import-Export > Renderware (.dff) ",
"description": "RenderWare importer/exporter for GTA III/VC/SA",
"category": "Import-Export" }
import struct
import os
import zlib
import base64
from collections import deque
import bpy
import math
import mathutils
from bpy.props import *
class RwTypes():
ANY = -1
STRUCT = 0x0001
STRING = 0x0002
EXTENSION = 0x0003
TEXTURE = 0x0006
MATERIAL = 0x0007
MATERIALLIST = 0x0008
FRAMELIST = 0x000E
GEOMETRY = 0x000F
CLUMP = 0x0010
ATOMIC = 0x0014
GEOMETRYLIST = 0x001A
RENDERRIGHTS = 0x001F
MORPHPLG = 0x0105
SKINPLG = 0x116
HANIMPLG = 0x11E
MATEFFECTS = 0x0120
BINMESHPLG = 0x050E
FRAMENAME = 0x253F2FE
COLLISION = 0x253F2FA
MATSPECULAR = 0x253F2F6
NIGHTCOLS = 0x253F2F9
MATREFLECTION = 0x253F2FC
MESHEXTENSION = 0x253F2FD
def decodeVersion(version):
if (version & 0xFFFF0000) == 0:
return version << 8
else:
p1 = ((version >> 14) & 0x3FF00) + 0x30000
p2 = (version >> 16) & 0x3F
return p1 | p2
class RpGeomFlag:
TRISTRIP = 0x0001
POSITIONS = 0x0002
TEXTURED = 0x0004
PRELIT = 0x0008
NORMALS = 0x0010
LIGHT = 0x0020
MODULATEMATERIALCOLOR = 0x0040
TEXTURED2 = 0x0080
class ImportRenderware:
class RwTriangle:
def __init__(self, verts, mat):
self.verts = verts
self.mat = mat
def desc(self):
return (self.verts[0], self.verts[1], self.verts[2])
class RwVertex:
def __init__(self, coords, normal):
self.coords = coords
self.normal = normal
self.uv = None
self.uv_env = None
def desc(self):
return (self.coords[0], self.coords[1], self.coords[2])
class RwFrame:
def __init__(self, loader, index, rot, pos, parent):
self.loader = loader
self.index = index
self.geometry = None
self.atomic = None
self.blobj = None
self.bldata = None
self.hanimdata = None
self.name = None
rmatrix = mathutils.Matrix.Identity(3)
rmatrix[0] = rot[0], rot[1], rot[2]
rmatrix[1] = rot[3], rot[4], rot[5]
rmatrix[2] = rot[6], rot[7], rot[8]
rmatrix.resize_4x4()
rmatrix.translation = pos[0], pos[1], pos[2]
self.matrix = rmatrix
self.parent = parent
self.loader.childrenOf[parent+1].append(self.index)
def setAtomic(self, atomic):
self.atomic = atomic
self.geometry = atomic.geometry
def build(self):
if self.name is None:
self.name = "noname_" + str(self.index);
if self.geometry:
self.bldata = self.geometry.build(self.name)
self.blobj = bpy.data.objects.new(self.name, self.bldata)
if self.parent >= 0:
self.blobj.parent = self.loader.frames[self.parent].blobj
self.blobj.matrix_local = self.matrix
bpy.context.scene.objects.link(self.blobj)
for frame in self.loader.childrenOf[self.index+1]:
self.loader.frames[frame].build()
if "_vlo" in self.name or "_dam" in self.name:
self.blobj.hide = True
self.blobj.hide_render = True
if self.loader.colhex and self.index == self.loader.childrenOf[0][0]:
textobj = bpy.data.texts.new(name = ("zrwcoll_" + self.name))
textobj.from_string(self.loader.colhex)
self.blobj.collhex = textobj.name
if self.hanimdata:
textobj = bpy.data.texts.new(name = ("zrwhanim" + str(self.index) + "_" + self.name))
textobj.from_string(self.hanimdata)
self.blobj.rw_hanimdata = textobj.name
if self.geometry and self.geometry.skindata:
textobj = bpy.data.texts.new(name = ("zrwskin_" + self.name))
textobj.from_string(self.geometry.skindata)
self.blobj.rw_skindata = textobj.name
if self.atomic and self.atomic.renderPlugin != None and self.atomic.renderExtra != None:
self.blobj.renderright = self.atomic.renderPlugin
self.blobj.renderextra = self.atomic.renderExtra
if self.atomic and self.atomic.matfxpipe:
self.blobj.matfxpipe = True
class RpGeometry:
def __init__(self, loader, index):
self.loader = loader
self.index = index
self.vertices = []
self.triangles = []
self.materials = []
self.mesh = None
self.atomic = None
self.skindata = None
self.hasEnvUV = False
self.vertCol = None
self.nightVertCol = None
self.hasNormals = False
def setAtomic(self, atomic):
self.atomic = atomic
def addMaterial(self, material):
material.setIndex(len(self.materials))
self.materials.append(material)
def addVertex(self, vertex):
self.vertices.append(vertex)
def addTriangle(self, triangle):
self.triangles.append(triangle)
def build(self, name):
self.mesh = bpy.data.meshes.new(name)
pyverts = []
pypolys = []
for vertex in self.vertices:
pyverts.append(vertex.desc())
for triangle in self.triangles:
pypolys.append(triangle.desc())
self.mesh.from_pydata(pyverts, [], pypolys)
self.mesh.update()
if self.vertCol:
vcol = self.mesh.vertex_colors.new("Normal")
self.mesh.vertex_colors.active = vcol
for i in range(len(self.vertices)):
vcol.data[i].color = (self.vertCol[i][0], self.vertCol[i][1], self.vertCol[i][2])
if self.nightVertCol:
nvcol = self.mesh.vertex_colors.new("Night")
self.mesh.vertex_colors.active = nvcol
for i in range(len(self.vertices)):
nvcol.data[i].color = (self.nightVertCol[i][0], self.nightVertCol[i][1], self.nightVertCol[i][2])
uvtexture = self.mesh.uv_textures.new()
uvtexture.name = "MainUV"
uvlayer = self.mesh.uv_layers[-1]
for i in range(len(self.triangles)):
for j in range(3):
uvlayer.data[3*i + j].uv = self.vertices[self.triangles[i].verts[j]].uv
if self.hasEnvUV:
euvtexture = self.mesh.uv_textures.new()
euvtexture.name = "EnvUV"
euvlayer = self.mesh.uv_layers[-1]
for i in range(len(self.triangles)):
for j in range(3):
euvlayer.data[3*i + j].uv = self.vertices[self.triangles[i].verts[j]].uv_env
for material in self.materials:
material.build()
for i in range(len(self.triangles)):
self.mesh.polygons[i].material_index = self.triangles[i].mat
return self.mesh
class RpMaterial:
def __init__(self, geometry, flags=None, col=None, textured=None, ambient=None, specular=None, diffuse=None):
self.index = None
self.name = "g" + str(geometry.index) + "m"
self.geometry = geometry
self.flags = flags
self.col = col
self.ambient = ambient
self.specular = specular
self.diffuse = diffuse
self.textured = textured
self.texture = None
self.blmat = None
self.envtex = None
self.readenvmap = False
self.envIntensity = 1
self.reflectColour = None
self.reflectIntensity = None
self.spectex = None
def setIndex(self, index):
self.index = index
self.name = "g" + str(self.geometry.index) + "m" + str(index)
def setTexture(self, texture):
self.texture = texture
def setEnvTexture(self, texture):
self.envtex = texture
def setSpecTexture(self, texture):
self.spectex = texture
def setReflection(self, colour, intensity):
self.reflectColour = colour
self.reflectIntensity = intensity
def build(self):
self.blmat = bpy.data.materials.new(self.name)
self.blmat.diffuse_color = (self.col[0]/255, self.col[1]/255, self.col[2]/255)
self.blmat.diffuse_intensity = self.diffuse
self.blmat.ambient = self.ambient
self.blmat.specular_intensity = self.specular
if self.geometry.vertCol:
self.blmat.use_vertex_color_light = True
if self.col[3] < 255:
self.blmat.use_transparency = True
self.blmat.alpha = self.col[3]/255
if self.envtex:
self.envtex.build()
if self.spectex:
self.spectex.build()
if self.texture:
self.texture.build()
self.blmat.active_texture_index = 0
if self.reflectColour and self.reflectIntensity:
self.blmat.mirror_color = self.reflectColour
self.blmat.raytrace_mirror.use = True
self.blmat.raytrace_mirror.reflect_factor = self.reflectIntensity
self.geometry.mesh.materials.append(self.blmat)
class RwTexture:
def __init__(self, loader, material, name, texType, intensity=1):
self.material = material
self.bltex = None
self.bltexslot = None
self.name = name
self.loader = loader
self.texType = texType
self.intensity = intensity
def build(self):
if self.texType == 1 and self.name in self.loader.envtexpool:
self.bltex = self.loader.envtexpool[self.name]
elif self.texType != 1 and self.name in self.loader.texpool:
self.bltex = self.loader.texpool[self.name]
else:
if self.texType == 1:
self.bltex = bpy.data.textures.new(self.name, "ENVIRONMENT_MAP")
self.bltex.__class__ = bpy.types.EnvironmentMapTexture
self.bltex.environment_map.source = "IMAGE_FILE"
self.loader.envtexpool[self.name] = self.bltex
else:
self.bltex = bpy.data.textures.new(self.name, "IMAGE")
self.bltex.__class__ = bpy.types.ImageTexture
self.loader.texpool[self.name] = self.bltex
imgfile = self.loader.filename + "_tex\\" + self.name + ".png"
if os.path.isfile(imgfile):
self.bltex.image = bpy.data.images.load(imgfile)
self.bltexslot = self.material.blmat.texture_slots.create(self.texType)
self.bltexslot.texture_coords = "UV"
self.bltexslot.texture = self.bltex
if (self.texType == 1 or self.texType == 2) and self.material.geometry.hasEnvUV:
self.bltexslot.uv_layer = "EnvUV"
else:
self.bltexslot.uv_layer = "MainUV"
if self.texType == 1:
self.bltexslot.diffuse_factor = self.intensity
elif self.texType == 2:
self.bltexslot.use_map_diffuse = False
self.bltexslot.use_map_color_diffuse = False
self.bltexslot.use_map_color_spec = True
self.bltexslot.specular_color_factor = self.intensity
class RpAtomic:
def __init__(self, loader, frame, geometry, flags):
self.loader = loader
self.frame = frame
self.geometry = geometry
self.flags = flags
self.renderPlugin = None
self.renderExtra = None
self.matfxpipe = False
frame.setAtomic(self)
geometry.setAtomic(self)
def setRenderRights(self, plugin, extra):
self.renderPlugin = plugin
self.renderExtra = extra
def __init__(self, filename):
self.filename = filename
self.texpool = {}
self.envtexpool = {}
self.colhex = None
self.childrenOf = None
self.frames = []
self.geoms = []
self.f = open(filename, "rb")
self.readSection(RwTypes.CLUMP)
self.f.close()
for frame in self.childrenOf[0]:
self.frames[frame].build()
def writeDebug(self, text):
g = open(self.filename + ".txt", "a")
g.write(text + "\n")
g.close()
def readFormat(self, format):
return struct.unpack(format, self.f.read(struct.calcsize(format)))
def readSlice(self, format, slice):
size = struct.calcsize(format)
if(len(slice) < size):
raise Exception("Failed to read slice, buffer is too small.")
return struct.unpack(format, slice[:size]), slice[size:]
def readSection(self, type, extra = None):
header = self.readFormat("III")
header = (header[0], header[1], RwTypes.decodeVersion(header[2]))
if type >= 0 and header[0] != type:
raise Exception("Expected type " + str(type) + ", found " + str(header[0]))
curPos = self.f.tell()
res = None
if header[0] == RwTypes.STRUCT: res = self.readSectionStruct(header)
elif header[0] == RwTypes.STRING: res = self.readSectionString(header)
elif header[0] == RwTypes.EXTENSION: res = self.readSectionExtension(header, extra)
elif header[0] == RwTypes.TEXTURE: res = self.readSectionTexture(header, extra)
elif header[0] == RwTypes.MATERIAL: res = self.readSectionMaterial(header, extra)
elif header[0] == RwTypes.MATERIALLIST: res = self.readSectionMaterialList(header, extra)
elif header[0] == RwTypes.FRAMELIST: res = self.readSectionFrameList(header)
elif header[0] == RwTypes.GEOMETRY: res = self.readSectionGeometry(header, extra)
elif header[0] == RwTypes.CLUMP: res = self.readSectionClump(header)
elif header[0] == RwTypes.ATOMIC: res = self.readSectionAtomic(header)
elif header[0] == RwTypes.GEOMETRYLIST: res = self.readSectionGeometryList(header)
elif header[0] == RwTypes.MORPHPLG: res = self.readSectionMorphPLG(header, extra)
elif header[0] == RwTypes.BINMESHPLG: res = self.readSectionBinMeshPLG(header, extra)
elif header[0] == RwTypes.FRAMENAME: res = self.readSectionFrameName(header, extra)
elif header[0] == RwTypes.COLLISION: res = self.readSectionCollision(header, extra)
elif header[0] == RwTypes.MATEFFECTS: res = self.readSectionMatEffects(header, extra)
elif header[0] == RwTypes.MATSPECULAR: res = self.readSectionMatSpecular(header, extra)
elif header[0] == RwTypes.MATREFLECTION: res = self.readSectionMatReflection(header, extra)
elif header[0] == RwTypes.MESHEXTENSION: res = self.readSectionMeshExtension(header, extra)
elif header[0] == RwTypes.RENDERRIGHTS: res = self.readSectionRenderRights(header, extra)
elif header[0] == RwTypes.HANIMPLG: res = self.readSectionHAnimPLG(header, extra)
elif header[0] == RwTypes.SKINPLG: res = self.readSectionSkinPLG(header, extra)
elif header[0] == RwTypes.NIGHTCOLS: res = self.readSectionNightCols(header, extra)
elif type >= 0: raise Exception("Missing read function for section type " + str(type))
else: print("Ignoring extension data of type " + hex(header[0]))
self.f.seek(curPos + header[1])
return res
def readSectionStruct(self, header):
return header, self.f.read(header[1])
def readSectionString(self, header):
byteList = b""
for i in range(header[1]):
newByte = self.f.read(1)
if newByte[0] == 0:
break
byteList += newByte
return header, byteList.decode("ascii")
def readSectionExtension(self, header, extra):
endPos = self.f.tell() + header[1]
while self.f.tell() < endPos:
self.readSection(RwTypes.ANY, extra)
return header, None
def readSectionTexture(self, header, material):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(flags, x), slice = self.readSlice("HH", slice)
x, texName = self.readSection(RwTypes.STRING)
x, alphaName = self.readSection(RwTypes.STRING)
if material.readenvmap:
texture = self.RwTexture(self, material, texName, 1, material.envIntensity)
material.setEnvTexture(texture)
else:
texture = self.RwTexture(self, material, texName, 0, 1)
material.setTexture(texture)
self.readSection(RwTypes.EXTENSION)
return header, None
def readSectionMaterial(self, header, geometry):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(flags,), slice = self.readSlice("I", slice)
col, slice = self.readSlice("BBBB", slice)
(x, textured, ambient, specular, diffuse), slice = self.readSlice("iifff", slice)
material = self.RpMaterial(geometry, flags, col, textured, ambient, specular, diffuse)
geometry.addMaterial(material)
if textured > 0:
self.readSection(RwTypes.TEXTURE, material)
self.readSection(RwTypes.EXTENSION, material)
return header, None
def readSectionMaterialList(self, header, geometry):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(matCount,), slice = self.readSlice("i", slice)
for i in range(matCount):
junk, slice = self.readSlice("i", slice)
for i in range(matCount):
self.readSection(RwTypes.MATERIAL, geometry)
return header, None
def readSectionFrameList(self, header):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(frameCount,), slice = self.readSlice("i", slice)
self.childrenOf = []
for i in range(frameCount+1):
self.childrenOf.append([])
for i in range(frameCount):
rot, slice = self.readSlice("fffffffff", slice)
pos, slice = self.readSlice("fff", slice)
(parent, flags), slice = self.readSlice("ii", slice)
self.frames.append(self.RwFrame(self, i, rot, pos, parent))
for i in range(frameCount):
self.readSection(RwTypes.EXTENSION, self.frames[i])
return header, None
def readSectionGeometry(self, header, index):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(flags, texCount, triCount, vertCount, morphCount), slice = self.readSlice("HHiii", slice)
geometry = self.RpGeometry(self, index)
self.geoms.append(geometry)
geometry.flags = flags
if metaHeader[2] < 0x34001:
(surfAmbient, surfSpecular, surfDiffuse), slice = self.readSlice("fff", slice)
for i in range(vertCount):
geometry.addVertex(self.RwVertex(None, None))
if flags & RpGeomFlag.PRELIT:
geometry.vertCol = []
for i in range(vertCount):
(vcr, vcg, vcb, vca), slice = self.readSlice("BBBB", slice)
geometry.vertCol.append((vcr / 255, vcg / 255, vcb / 255))
for i in range(vertCount):
uv, slice = self.readSlice("ff", slice)
geometry.vertices[i].uv = (uv[0], 1-uv[1])
if texCount > 1:
geometry.hasEnvUV = True
for i in range(vertCount):
uv_env, slice = self.readSlice("ff", slice)
geometry.vertices[i].uv_env = (uv_env[0], 1-uv_env[1])
if texCount > 2:
slice = slice[struct.calcsize("ff")*(texCount-2)*(vertCount):]
for i in range(triCount):
(c, b, mat, a), slice = self.readSlice("HHHH", slice)
if a >= vertCount or b >= vertCount or c >= vertCount:
raise Exception("Vertex indices out of range for triangle.")
geometry.addTriangle(self.RwTriangle((a, b, c), mat))
if morphCount is not 1:
raise Exception("Multiple frames not supported")
for i in range(morphCount):
(bx, by, bz, br, hasVerts, hasNormals), slice = self.readSlice("ffffii", slice)
if hasVerts > 0:
for j in range(vertCount):
coords, slice = self.readSlice("fff", slice)
geometry.vertices[j].coords = coords
if hasNormals > 0:
geometry.hasNormals = True
for j in range(vertCount):
normal, slice = self.readSlice("fff", slice)
geometry.vertices[j].normal = normal
self.readSection(RwTypes.MATERIALLIST, geometry)
self.readSection(RwTypes.EXTENSION, geometry)
return header, None
def readSectionClump(self, header):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(atomicCount,), slice = self.readSlice("i", slice)
if metaHeader[2] > 0x33000:
(lightCount, cameraCount), slice = self.readSlice("ii", slice)
self.readSection(RwTypes.FRAMELIST)
self.readSection(RwTypes.GEOMETRYLIST)
for i in range(atomicCount):
self.readSection(RwTypes.ATOMIC)
self.readSection(RwTypes.EXTENSION)
return header, None
def readSectionAtomic(self, header):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(frameIndex, geomIndex, flags, x, x, x, x), slice = self.readSlice("iiBBBBi", slice)
atomic = self.RpAtomic(self, self.frames[frameIndex], self.geoms[geomIndex], flags)
self.readSection(RwTypes.EXTENSION, atomic)
return header, None
def readSectionGeometryList(self, header):
metaHeader, slice = self.readSection(RwTypes.STRUCT)
(geomCount,), slice = self.readSlice("i", slice)
for i in range(geomCount):
self.readSection(RwTypes.GEOMETRY, i)
def readSectionMorphPLG(self, header, geometry):
return header, None
def readSectionBinMeshPLG(self, header, geometry):
slice = self.f.read(header[1])
(type, splits, total), slice = self.readSlice("iii", slice)
if type != 0 and type != 1:
print("Morph PLG section in unknown type - ignoring.")
return header, None
lookup = {}
for i in range(len(geometry.triangles)):
v = geometry.triangles[i].verts
v = list(v)
v.sort()
lookup[tuple(v)] = i
totals = 0
for i in range(splits):
(sub, mat), slice = self.readSlice("ii", slice)
if type == 0:
for j in range(sub//3):
vx, slice = self.readSlice("iii", slice)
vx = list(vx)
vx.sort()
vx = tuple(vx)
if vx in lookup:
geometry.triangles[lookup[vx]].mat = mat
else:
elems = deque()
for j in range(sub):
if len(elems) > 2:
elems.popleft()
(item,), slice = self.readSlice("i", slice)
if len(elems) > 1:
checklist = [elems[0], elems[1], item]
checklist.sort()
check = tuple(checklist)
if check in lookup:
geometry.triangles[lookup[check]].mat = mat
elems.append(item)
return header, None
def readSectionFrameName(self, header, frame):
frame.name = self.f.read(header[1]).decode("ascii")
return header, None
def readSectionCollision(self, header, geometry):
if not self.childrenOf or len(self.childrenOf[0]) is 0:
print("Collision extension - no frame to attach to.")
return header, None
binary = self.f.read(header[1])
self.colhex = base64.b64encode(zlib.compress(binary)).decode("ascii")
return header, None
def readSectionMatEffects(self, header, parent):
if parent.__class__ == self.RpMaterial:
return self.readSectionMaterialMatEffects(header, parent)
elif parent.__class__ == self.RpAtomic:
return self.readSectionAtomicMatEffects(header, parent)
return header, None
def readSectionMaterialMatEffects(self, header, material):
(flags,) = self.readFormat("I")
for i in range(2):
(effectType,) = self.readFormat("I")
if effectType == 0:
continue
elif effectType != 2:
print("Unknown material effect type.")
return header, None
(coefficient, frameBufferAlpha, textured) = self.readFormat("fii")
if textured:
material.readenvmap = True
material.envIntensity = coefficient
self.readSection(RwTypes.TEXTURE, material)
def readSectionAtomicMatEffects(self, header, atomic):
(check,) = self.readFormat("i")
if check != 0:
atomic.matfxpipe = True
return header, None
def readSectionMatSpecular(self, header, material):
slice = self.f.read(header[1])
(intensity,), slice = self.readSlice("f", slice)
specName = ""
for i in range(len(slice)):
if int(slice[i]) == 0:
break
specName += slice[i:i+1].decode("ascii")
texture = self.RwTexture(self, material, specName, 2, intensity)
material.setSpecTexture(texture)
return header, None
def readSectionMatReflection(self, header, material):
slice = self.f.read(header[1])
colour, slice = self.readSlice("fff", slice)
(x, intensity), slice = self.readSlice("ff", slice)
material.setReflection(colour, intensity)
return header, None
def readSectionMeshExtension(self, header, geometry):
slice = self.f.read(header[1])
(hasData,), slice = self.readSlice("i", slice)
if hasData:
print("Mesh extension extension actually has data. Not sure what to do with it.")
return header, None
def readSectionRenderRights(self, header, atomic):
if not hasattr(atomic, "__class__") or atomic.__class__ != self.RpAtomic:
print("Render rights extension is not in the right section, should be in atomic.")
return
slice = self.f.read(header[1])
(plugin, extra), slice = self.readSlice("ii", slice)
atomic.setRenderRights(plugin, extra)
def readSectionHAnimPLG(self, header, frame):
if not hasattr(frame, "__class__") or frame.__class__ != self.RwFrame:
print("HAnim extension is not in the right section, should be in frame.")
return
binary = self.f.read(header[1])
frame.hanimdata = base64.b64encode(zlib.compress(binary)).decode("ascii")
return header, None
def readSectionSkinPLG(self, header, geometry):
if not hasattr(geometry, "__class__") or geometry.__class__ != self.RpGeometry:
print("Skin extension is not in the right section, should be in geometry.")
return
binary = self.f.read(header[1])
geometry.skindata = base64.b64encode(zlib.compress(binary)).decode("ascii")
return header, None
def readSectionNightCols(self, header, geometry):
if not hasattr(geometry, "__class__") or geometry.__class__ != self.RpGeometry:
print("Night vertex colours extension is not in the right section, should be in geometry.")
return
slice = self.f.read(header[1])
(x,), slice = self.readSlice("I", slice)
geometry.nightVertCol = []
for i in range(len(geometry.vertices)):
(vcr, vcg, vcb, vca), slice = self.readSlice("BBBB", slice)
geometry.nightVertCol.append((vcr / 255, vcg / 255, vcb / 255))
return header, None
class ExportRenderware:
class RwChunkHeader:
def __init__(self, type, size):
self.type = type
self.size = size
def bin(self):
return struct.pack("III", self.type, self.size, ExportRenderware.targetVer)
class RwVector3:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def bin(self):
return struct.pack("fff", self.x, self.y, self.z)
class RwRotMatrix:
def __init__(self):
self.m = [1, 0, 0, 0, 1, 0, 0, 0, 1]
def bin(self):
return struct.pack("9f", *self.m)
class RwFrameList:
def __init__(self):
self.R = ExportRenderware
self.frames = []
def bin(self):
payload = struct.pack("i", len(self.frames))
for frame in self.frames:
payload += frame.binraw()
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
for frame in self.frames:
payload += frame.binext()
header = self.R.RwChunkHeader(RwTypes.FRAMELIST, len(payload)).bin()
return header + payload
class RwFrame:
def __init__(self, clump, object, parentFrame):
self.R = ExportRenderware
self.clump = clump
self.object = object
self.index = len(clump.frameList.frames)
clump.frameList.frames.append(self)
self.name = self.object.name
self.parent = parentFrame
self.rotation = self.R.RwRotMatrix()
self.position = self.R.RwVector3(0, 0, 0)
if parentFrame is not None:
ux = object.matrix_local.to_3x3()
self.rotation.m = [ux[0][0], ux[0][1], ux[0][2], ux[1][0], ux[1][1], ux[1][2], ux[2][0], ux[2][1], ux[2][2]]
self.position.x = object.matrix_local.translation[0]
self.position.y = object.matrix_local.translation[1]
self.position.z = object.matrix_local.translation[2]
if str(object.type) == "MESH":
self.atomic = self.R.RpAtomic(self)
elif str(object.type) == "EMPTY":
self.atomic = None
else:
raise Exception("Unsupported object type selected: " + str(object.type))
for child in self.object.children:
if str(object.type) != "MESH" and str(object.type) != "EMPTY":
print("Ignoring object " + object.name + ", type " + object.type)
continue
self.R.RwFrame(self.clump, child, self)
if not clump.colbin:
try:
if len(object.collhex) > 0:
textf = bpy.data.texts[object.collhex].as_string()
clump.colbin = zlib.decompress(base64.b64decode(bytes(textf, "ascii")))
except:
clump.colbin = None
def binraw(self):
payload = self.rotation.bin()
payload += self.position.bin()
payload += struct.pack("ii", -1 if self.parent is None else self.parent.index, 0)
return payload
def binext_name(self):
noname = "noname_"
if self.name[:len(noname)] == noname:
return b""
writename = self.R.unmangleName(self.name)
if len(writename) > 23:
writename = writename[:23]
print("Warning, frame name '", writename , "' truncated to 23 characters.")
payload = struct.pack(str(len(writename)) + "s", bytearray(writename, "ascii"))
header = self.R.RwChunkHeader(RwTypes.FRAMENAME, len(payload)).bin()
return header + payload
def binext_hanim(self):
object = self.object
try:
if len(object.rw_hanimdata) > 0:
textf = bpy.data.texts[object.rw_hanimdata].as_string()
rawdata = zlib.decompress(base64.b64decode(bytes(textf, "ascii")))
else:
return b""
except:
return b""
payload = rawdata
header = self.R.RwChunkHeader(RwTypes.HANIMPLG, len(payload)).bin()
return header + payload
def binext(self):
payload = self.binext_name() + self.binext_hanim()
header = self.R.RwChunkHeader(RwTypes.EXTENSION, len(payload)).bin()
return header + payload
class RpAtomicChunkInfo:
def __init__(self, frameIndex, geometryIndex, flags):
self.R = ExportRenderware
self.frameIndex = frameIndex
self.geometryIndex = geometryIndex
self.flags = flags
def bin(self):
payload = struct.pack("iiii", self.frameIndex, self.geometryIndex, self.flags, 0)
header = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin()
return header + payload
class RpAtomic:
def __init__(self, frame):
self.R = ExportRenderware
self.clump = frame.clump
self.frame = frame
self.mesh = frame.object.to_mesh(self.clump.context.scene, False, "PREVIEW")
self.geometry = self.R.RpGeometry(self)
self.flags = 5
def binext_rights(self):
if self.frame.object.renderright == 0:
return b""
payload = struct.pack("ii", self.frame.object.renderright, self.frame.object.renderextra)
header = self.R.RwChunkHeader(RwTypes.RENDERRIGHTS, len(payload)).bin()
return header + payload
def binext_matfx(self):
if self.frame.object.matfxpipe != True and self.R.decodedVer > 0x34003:
return b""
payload = struct.pack("i", 1)
header = self.R.RwChunkHeader(RwTypes.MATEFFECTS, len(payload)).bin()
return header + payload
def bin(self):
payload = self.R.RpAtomicChunkInfo(self.frame.index, self.geometry.index, self.flags).bin()
extensions = self.binext_rights() + self.binext_matfx()
extensions = self.R.RwChunkHeader(RwTypes.EXTENSION, len(extensions)).bin() + extensions
payload += extensions
header = self.R.RwChunkHeader(RwTypes.ATOMIC, len(payload)).bin()
return header + payload
class RpVertex:
def __init__(self, pos, uv, uve, normal):
self.pos = pos
self.uv = uv
self.uve = uve
self.normal = normal
class RpTriangle:
def __init__(self, a, b, c, mat):
self.a = a
self.b = b
self.c = c
self.mat = mat
def bin(self):
return struct.pack("HHHH", self.a, self.b, self.mat, self.c)
class RwUVCoord:
def __init__(self, u, v):
self.u = u
self.v = v
def bin(self):
return struct.pack("ff", self.u, 1-self.v)
class RwTexture:
def __init__(self, material, bltexslot):
self.R = ExportRenderware
self.material = material
self.bltexslot = bltexslot
self.bltex = bltexslot.texture
def bin(self):
payload = struct.pack("HH", 0x1106, 0)
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
strdata = struct.pack(str(len(self.bltex.name)) + "s", bytearray(self.bltex.name, "ascii"))
for i in range(4 - (len(self.bltex.name)&3)):
strdata += struct.pack("B", 0)
payload += self.R.RwChunkHeader(RwTypes.STRING, len(strdata)).bin() + strdata
strdata = struct.pack("i", 0)
payload += self.R.RwChunkHeader(RwTypes.STRING, len(strdata)).bin() + strdata
extensions = b""
extensions = self.R.RwChunkHeader(RwTypes.EXTENSION, len(extensions)).bin() + extensions
payload += extensions
header = self.R.RwChunkHeader(RwTypes.TEXTURE, len(payload)).bin()
return header + payload
class RpMaterial:
def __init__(self, materialList, blMaterial):
self.R = ExportRenderware
self.materialList = materialList
self.index = len(materialList.mats)
self.mesh = materialList.mesh
self.blmaterial = blMaterial
self.red = min(255, max(0, blMaterial.diffuse_color[0] * 256))
self.green = min(255, max(0, blMaterial.diffuse_color[1] * 256))
self.blue = min(255, max(0, blMaterial.diffuse_color[2] * 256))
self.alpha = min(255, max(0, blMaterial.alpha * 256))
self.ambient = blMaterial.ambient
self.specular = blMaterial.specular_intensity
self.diffuse = blMaterial.diffuse_intensity
self.bltex_diffuse = self.findTexSlot("DIFFUSE")
self.bltex_specular = self.findTexSlot("SPECULAR")
self.bltex_envmap = self.findTexSlot("ENVMAP")
self.tex_diffuse = None
self.tex_envmap = None
if self.bltex_diffuse:
self.tex_diffuse = self.R.RwTexture(self, self.bltex_diffuse)
if self.bltex_diffuse.texture_coords == "UV" and len(self.bltex_diffuse.uv_layer) > 0 and not self.materialList.geometry.uvname_diff:
self.materialList.geometry.uvname_diff = self.bltex_diffuse.uv_layer
if self.bltex_envmap:
self.tex_envmap = self.R.RwTexture(self, self.bltex_envmap)
if self.bltex_envmap.texture_coords == "UV" and len(self.bltex_envmap.uv_layer) > 0 and not self.materialList.geometry.uvname_env:
self.materialList.geometry.uvname_env = self.bltex_envmap.uv_layer
def findTexSlot(self, type):
for i in range(len(self.blmaterial.texture_slots)):
textype = ""
slot = self.blmaterial.texture_slots[i]
if slot and slot.texture:
if slot.texture.type == "ENVIRONMENT_MAP":
textype = "ENVMAP"
elif slot.use_map_color_spec and not slot.use_map_color_diffuse:
textype = "SPECULAR"
elif slot.use_map_color_diffuse and not slot.use_map_color_spec:
textype = "DIFFUSE"
if textype == type:
return slot
return None
def binext_matfx(self):
if not self.tex_envmap:
return b""
payload = struct.pack("iifii", 2, 2, self.bltex_envmap.specular_color_factor, 0, 1)
payload += self.tex_envmap.bin()
payload += struct.pack("i", 0)
header = self.R.RwChunkHeader(RwTypes.MATEFFECTS, len(payload)).bin()
return header + payload
def binext_reflect(self):
if not self.blmaterial.raytrace_mirror.use and ExportRenderware.decodedVer <= 0x34003:
return b""
factor = self.blmaterial.raytrace_mirror.reflect_factor if self.blmaterial.raytrace_mirror.use else 0
colour = self.blmaterial.mirror_color
payload = struct.pack("fffffi", colour[0], colour[1], colour[2], 1, self.blmaterial.raytrace_mirror.reflect_factor, 0)
header = self.R.RwChunkHeader(RwTypes.MATREFLECTION, len(payload)).bin()
return header + payload
def binext_specular(self):
if not self.bltex_specular:
return b""
payload = struct.pack("f", self.bltex_specular.specular_color_factor)
texname = bytes(self.bltex_specular.texture.name, "ascii")
payload += texname[:23]
nullbyte = struct.pack("B", 0)
for i in range(24 - min(23, len(texname))):
payload += nullbyte
header = self.R.RwChunkHeader(RwTypes.MATSPECULAR, len(payload)).bin()
return header + payload
def bin(self):
payload = struct.pack("iBBBBiIfff", 0, int(self.red), int(self.green), int(self.blue), int(self.alpha), 0, 1 if self.tex_diffuse else 0, self.ambient, self.specular, self.diffuse)
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
if self.tex_diffuse:
payload += self.tex_diffuse.bin()
extensions = self.binext_matfx() + self.binext_reflect() + self.binext_specular()
extensions = self.R.RwChunkHeader(RwTypes.EXTENSION, len(extensions)).bin() + extensions
payload += extensions
header = self.R.RwChunkHeader(RwTypes.MATERIAL, len(payload)).bin()
return header + payload
class RpMaterialList:
def __init__(self, geometry):
self.R = ExportRenderware
self.geometry = geometry
self.clump = geometry.clump
self.mesh = geometry.mesh
self.mats = []
for mat in self.mesh.materials:
self.mats.append(self.R.RpMaterial(self, mat))
def bin(self):
payload = struct.pack("i", len(self.mesh.materials))
for mat in self.mats:
payload += struct.pack("i", -1)
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
for mat in self.mats:
payload += mat.bin()
header = self.R.RwChunkHeader(RwTypes.MATERIALLIST, len(payload)).bin()
return header + payload
class RpGeometryList:
def __init__(self):
self.R = ExportRenderware
self.geoms = []
def bin(self):
payload = struct.pack("i", len(self.geoms))
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
for geom in self.geoms:
payload += geom.bin()
header = self.R.RwChunkHeader(RwTypes.GEOMETRYLIST, len(payload)).bin()
return header + payload
class RpGeometryChunkInfo:
def __init__(self):
self.flags = RpGeomFlag.TEXTURED | RpGeomFlag.NORMALS | RpGeomFlag.LIGHT | RpGeomFlag.MODULATEMATERIALCOLOR
self.texCount = 1
self.triangleCount = 0
self.vertexCount = 0
self.frameCount = 1
def binraw(self):
return struct.pack("HHiii", self.flags, self.texCount, self.triangleCount, self.vertexCount, self.frameCount)
class RpGeometry:
def __init__(self, atomic):
self.R = ExportRenderware
self.clump = atomic.clump
self.atomic = atomic
self.mesh = atomic.mesh
self.index = len(self.clump.geometryList.geoms)
self.clump.geometryList.geoms.append(self)
self.chunkInfo = self.R.RpGeometryChunkInfo()
self.uvname_diff = None
self.uvname_env = None
self.materialList = self.R.RpMaterialList(self)
self.matTris = []
for i in range(len(self.materialList.mats)):
self.matTris.append([])
mesh = self.mesh
self.vdict = []
for i in range(len(mesh.vertices)):
self.vdict.append({})
self.uvc = self.getUVData(self.uvname_diff)
self.uvce = None
if self.uvname_env and self.uvname_env != self.uvname_diff:
self.uvce = self.getUVData(self.uvname_env)
self.vertices = []
self.triangles = []
self.vertCol = None
self.nightVertCol = None
self.vertColData = None
self.nightVertColData = None
for vcol in self.mesh.vertex_colors:
if vcol.name.lower() == "night" and self.nightVertCol is None:
self.nightVertCol = []
self.nightVertColData = vcol.data
elif self.vertCol is None:
self.vertCol = []
self.vertColData = vcol.data
for poly in mesh.polygons:
self.addBlenderPoly(poly)
if len(self.vertices) > 65535:
raise Exception("Aborting export: vertex count exceeds 65535")
self.maxDist = 0
for v in self.mesh.vertices:
self.maxDist = max(self.maxDist, math.sqrt(v.co[0]*v.co[0] + v.co[1]*v.co[1] + v.co[2]*v.co[2]))
self.chunkInfo.triangleCount = len(self.triangles)
self.chunkInfo.vertexCount = len(self.vertices)
if self.uvce:
self.chunkInfo.texCount = 2
self.chunkInfo.flags = self.chunkInfo.flags & (~RpGeomFlag.TEXTURED)
self.chunkInfo.flags |= RpGeomFlag.TEXTURED2
if self.R.decodedVer > 0x34003:
self.chunkInfo.flags |= RpGeomFlag.POSITIONS
if self.vertColData:
self.chunkInfo.flags |= RpGeomFlag.PRELIT
def findVertex(self, type):
for i in range(len(self.blmaterial.texture_slots)):
textype = ""
slot = self.blmaterial.texture_slots[i]
if slot and slot.texture:
if slot.texture.type == "ENVIRONMENT_MAP":
textype = "ENVMAP"
elif slot.use_map_color_spec and not slot.use_map_color_diffuse:
textype = "SPECULAR"
elif slot.use_map_color_diffuse and not slot.use_map_color_spec:
textype = "DIFFUSE"
if textype == type:
return slot
return None
def getUVData(self, name):
for i in range(len(self.mesh.uv_textures)):
if name and self.mesh.uv_textures[i] and self.mesh.uv_textures[i].name == name:
return self.mesh.uv_layers[i].data
return None
def newVertId(self, id, uv, uve):
if (uv + uve) not in self.vdict[id]:
self.vdict[id][uv + uve] = len(self.vertices)
self.vertices.append(self.R.RpVertex(self.mesh.vertices[id].co, uv, uve, self.mesh.vertices[id].normal))
if self.vertColData:
self.vertCol.append((int(self.vertColData[id].color[0]*255), int(self.vertColData[id].color[1]*255), int(self.vertColData[id].color[2]*255)))
if self.nightVertColData:
self.nightVertCol.append((int(self.nightVertColData[id].color[0]*255), int(self.nightVertColData[id].color[1]*255), int(self.nightVertColData[id].color[2]*255)))
return self.vdict[id][(uv + uve)]
def addRawPoly(self, verts, uvs, mat):
newIds = []
for i in range(3):
uv = tuple(self.uvc[uvs[i]].uv) if self.uvc else (0, 0)
uve = tuple(self.uvce[uvs[i]].uv) if self.uvce else (0, 0)
newIds.append(self.newVertId(verts[i], uv, uve))
self.triangles.append(self.R.RpTriangle(newIds[0], newIds[1], newIds[2], mat))
if mat >= 0:
self.matTris[mat].append(newIds[0])
self.matTris[mat].append(newIds[1])
self.matTris[mat].append(newIds[2])
def addBlenderPoly(self, p):
if len(p.vertices) < 3 or len(p.vertices) > 4:
raise Exception("Aborting export: Invalid number of vertices on an edge.")
self.addRawPoly([p.vertices[0], p.vertices[1], p.vertices[2]], [p.loop_indices[0], p.loop_indices[1], p.loop_indices[2]], p.material_index)
if len(p.vertices) == 4:
self.addRawPoly([p.vertices[0], p.vertices[3], p.vertices[2]], [p.loop_indices[0], p.loop_indices[3], p.loop_indices[2]], p.material_index)
def binext_binmesh(self):
payload = b""
splits = 0
total = 0
for i in range(len(self.matTris)):
if len(self.matTris[i]) == 0:
continue
splits += 1
total += len(self.matTris[i])
payload += struct.pack("ii", len(self.matTris[i]), i)
for id in self.matTris[i]:
payload += struct.pack("i", id)
payload = struct.pack("iii", 0, splits, total) + payload
header = self.R.RwChunkHeader(RwTypes.BINMESHPLG, len(payload)).bin()
return header + payload
def binext_morph(self):
if self.R.decodedVer > 0x34003 or self.R.decodedVer < 0x33000:
return b""
payload = struct.pack("i", 0)
header = self.R.RwChunkHeader(RwTypes.MORPHPLG, len(payload)).bin()
return header + payload
def binext_meshext(self):
if self.R.decodedVer <= 0x34003:
return b""
payload = struct.pack("i", 0)
header = self.R.RwChunkHeader(RwTypes.MESHEXTENSION, len(payload)).bin()
return header + payload
def binext_skin(self):
object = self.atomic.frame.object
try:
if len(object.rw_skindata) > 0:
textf = bpy.data.texts[object.rw_skindata].as_string()
rawdata = zlib.decompress(base64.b64decode(bytes(textf, "ascii")))
else:
return b""
except:
return b""
payload = rawdata
header = self.R.RwChunkHeader(RwTypes.SKINPLG, len(payload)).bin()
return header + payload
def binext_nightcol(self):
if not self.nightVertCol:
return b""
payload = struct.pack("I", 1)
for col in self.nightVertCol:
payload += struct.pack("BBBB", col[0], col[1], col[2], 255)
header = self.R.RwChunkHeader(RwTypes.NIGHTCOLS, len(payload)).bin()
return header + payload
def bin(self):
payload = self.chunkInfo.binraw()
if self.R.decodedVer < 0x34001:
payload += struct.pack("fff", 0, 0, 1)
if self.vertCol:
for col in self.vertCol:
payload += struct.pack("BBBB", col[0], col[1], col[2], 255)
for vertex in self.vertices:
payload += self.R.RwUVCoord(vertex.uv[0], vertex.uv[1]).bin()
if self.uvce:
for vertex in self.vertices:
payload += self.R.RwUVCoord(vertex.uve[0], vertex.uve[1]).bin()
for triangle in self.triangles:
payload += triangle.bin()
payload += struct.pack("ffffii", 0, 0, 0, self.maxDist, 1, 1)
for vertex in self.vertices:
payload += self.R.RwVector3(vertex.pos[0], vertex.pos[1], vertex.pos[2]).bin()
for vertex in self.vertices:
payload += self.R.RwVector3(vertex.normal[0], vertex.normal[1], vertex.normal[2]).bin()
payload = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin() + payload
payload += self.materialList.bin()
extensions = self.binext_binmesh() + self.binext_skin() + self.binext_morph() + self.binext_meshext() + self.binext_nightcol()
extensions = self.R.RwChunkHeader(RwTypes.EXTENSION, len(extensions)).bin() + extensions
payload += extensions
header = self.R.RwChunkHeader(RwTypes.GEOMETRY, len(payload)).bin()
return header + payload
class RpClumpChunkInfo:
def __init__(self, atomicCount, lightCount, cameraCount):
self.R = ExportRenderware
self.atomicCount = atomicCount
self.lightCount = lightCount
self.cameraCount = cameraCount
def bin(self):
payload = struct.pack("i", self.atomicCount)
if self.R.decodedVer > 0x33000:
payload += struct.pack("ii", self.lightCount, self.cameraCount)
header = self.R.RwChunkHeader(RwTypes.STRUCT, len(payload)).bin()
return header + payload
class RpClump:
def __init__(self, context, exportVer):
self.R = ExportRenderware
self.R.targetVer = exportVer
self.R.decodedVer = RwTypes.decodeVersion(self.R.targetVer)
self.context = context
self.frameList = self.R.RwFrameList()
self.geometryList = self.R.RpGeometryList()
self.colbin = None
exportables = []
for object in context.selected_objects:
parent = object.parent
add = True
while parent:
if parent in context.selected_objects:
add = False
break
parent = parent.parent
if add:
exportables.append(object)
for object in exportables:
if str(object.type) != "MESH" and str(object.type) != "EMPTY":
print("Ignoring object " + object.name + ", type " + object.type)
continue
self.R.RwFrame(self, object, None)
if len(self.frameList.frames) == 0:
raise Exception("Aborting export: no frames selected.")
def binext_coll(self):
if not self.colbin:
return b""
payload = self.colbin
header = self.R.RwChunkHeader(RwTypes.COLLISION, len(self.colbin)).bin()
return header + payload
def bin(self):
payload = self.R.RpClumpChunkInfo(len(self.geometryList.geoms), 0, 0).bin()
payload += self.frameList.bin()
payload += self.geometryList.bin()
for geometry in self.geometryList.geoms:
payload += geometry.atomic.bin()
extensions = self.binext_coll()
extensions = self.R.RwChunkHeader(RwTypes.EXTENSION, len(extensions)).bin() + extensions
payload += extensions
header = self.R.RwChunkHeader(RwTypes.CLUMP, len(payload)).bin()
return header + payload
def __init__(self, context, exportVerIndex, filepath):
if exportVerIndex == "1":
exportVer = 0x0800FFFF
elif exportVerIndex == "2":
exportVer = 0x1003FFFF
else:
exportVer = 0x1803FFFF
outf = open(filepath, "wb")
outf.write(self.RpClump(context, exportVer).bin())
outf.close()
def unmangleName(name):
if len(name) > 4 and name[-4] == "." and name[-3:].isnumeric():
return name[:-4]
else:
return name
class ExportRenderwareMenu(bpy.types.Operator):
expVersionValues = (("1", "GTA III", ""), ("2", "Vice City", ""), ("3", "San Andreas", ""))
bl_idname = "export_rw.dff"
bl_label = "Export Renderware (.dff)"
filename_ext = ".dff"
filepath = StringProperty(subtype = "FILE_PATH")
expVersion = EnumProperty(name = "Export version", items = expVersionValues, default="2")
def invoke(self, context, event):
wm = context.window_manager
wm.fileselect_add(self)
return {"RUNNING_MODAL"}
def execute(self, context):
setupProps()
ExportRenderware(context, self.expVersion, self.filepath)
return {"FINISHED"}
class ImportRenderwareMenu(bpy.types.Operator):
bl_idname = "import_rw.dff"
bl_label = "Import Renderware (.dff)"
filename_ext = ".dff"
filepath = StringProperty(subtype = "FILE_PATH")
def invoke(self, context, event):
wm = context.window_manager
wm.fileselect_add(self)
return {"RUNNING_MODAL"}
def execute(self, context):
setupProps()
ImportRenderware(self.filepath)
return {"FINISHED"}
def export_func(self, context):
self.layout.operator(ExportRenderwareMenu.bl_idname, text="Renderware (.dff)")
def import_func(self, context):
self.layout.operator(ImportRenderwareMenu.bl_idname, text="Renderware (.dff)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(export_func)
bpy.types.INFO_MT_file_import.append(import_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(export_func)
bpy.types.INFO_MT_file_import.remove(import_func)
def setupProps():
class renderwarePanel(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_label = "Renderware"
def draw(self, context):
self.layout.prop(bpy.context.active_object, "renderright")
self.layout.prop(bpy.context.active_object, "renderextra")
self.layout.prop(bpy.context.active_object, "matfxpipe")
self.layout.prop(bpy.context.active_object, "collhex")
self.layout.prop(bpy.context.active_object, "rw_hanimdata")
self.layout.prop(bpy.context.active_object, "rw_skindata")
if hasattr(bpy.types.Object, "collhex"):
return
bpy.types.Object.collhex = bpy.props.StringProperty(name = "Collision", description = "Name of the text object that contains collision binary data.", maxlen = 100)
bpy.types.Object.renderright = bpy.props.IntProperty(name = "RenderRight", description = "Index of the plugin whose pipeline is used for rendering.")
bpy.types.Object.renderextra = bpy.props.IntProperty(name = "RenderExtra", description = "Extra arguments to the render pipeline.")
bpy.types.Object.matfxpipe = bpy.props.BoolProperty(name = "MatFX pipeline", description = "Whether rendering is handled by MatFX pipeline.")
bpy.types.Object.rw_hanimdata = bpy.props.StringProperty(name = "HAnimData", description = "Info for this skin bone.", maxlen = 100)
bpy.types.Object.rw_skindata = bpy.props.StringProperty(name = "SkinData", description = "Skin data (bone vertices etc) for this mesh.", maxlen = 100)
bpy.utils.register_class(renderwarePanel)
if __name__ == "__main__":
unregister()
register()
setupProps()
| true
| true
|
f718a33508e80df74065a9e4fa1542458bff559b
| 2,125
|
py
|
Python
|
formatdata/bin/extract_ref_bimf.py
|
lvclark/h3agwas
|
5e42e60123b819d3c331a91b25ee50846e55af3b
|
[
"MIT"
] | 62
|
2016-08-29T11:27:35.000Z
|
2022-03-10T17:16:14.000Z
|
formatdata/bin/extract_ref_bimf.py
|
lvclark/h3agwas
|
5e42e60123b819d3c331a91b25ee50846e55af3b
|
[
"MIT"
] | 33
|
2016-12-26T13:48:19.000Z
|
2021-12-05T13:34:06.000Z
|
formatdata/bin/extract_ref_bimf.py
|
lvclark/h3agwas
|
5e42e60123b819d3c331a91b25ee50846e55af3b
|
[
"MIT"
] | 50
|
2017-04-15T04:17:43.000Z
|
2022-03-30T07:26:01.000Z
|
#!/usr/bin/env python3
import sys
import os
import argparse
import gzip
def readfastagz(File) :
Dic={}
with gzip.open(File, "r") as ReadL :
for ligne in ReadL:
ligne=ligne.decode('utf-8').replace('\n','')
if ligne[0]=='>' :
Key=ligne.split(' ')[0].split('|')[0].split('\t')[0].replace('>','')
if Key in Dic :
print(Dic.keys())
print('fasta file '+ File+' more than one time same chro '+ Key)
sys.exit(2)
Dic[Key]=""
Dic[Key]+=ligne
return Dic
def readfastastdin() :
Dic={}
for ligne in sys.stdin:
ligne=ligne.replace('\n','')
if ligne[0]=='>' :
Key=ligne.split(' ')[0].split('|')[0].split('\t')[0].replace('>','')
if Key in Dic :
print(Dic.keys())
print('fasta file '+ File+' more than one time same chro '+ Key)
sys.exit(2)
Dic[Key]=""
Dic[Key]+=ligne
return Dic
def parseArguments():
parser = argparse.ArgumentParser(description='transform file and header')
parser.add_argument('--bim',type=str,required=True, help="input file association")
parser.add_argument('--fasta',type=str,required=False, help="input file association")
parser.add_argument('--out',type=str,required=True, help="input file association")
parser.add_argument('--run', type=str, required=False, default='False')
args = parser.parse_args()
return args
args=parseArguments()
if args.run[0]=='T':
if args.fasta :
fasta=readfastagz(args.fasta)
else :
fasta=readfastastdin()
readbim=open(args.bim)
writetodel=open(args.out+'.del')
writeref=open(args.out+'.allref')
for line in readbim:
spll=line.split()
chro=spll[0]
#0 200610-10 0 0 C T
#26 200610-37 0 16482 G A
if chro in fasta :
pos=int(spll[3])
all1=spll[4]
all2=spll[5]
allref=fasta[chro][pos-1]
if allref == all1 or allref == all2:
writeref.write(spll[1]+'\t'+allref+'\n')
writetodel.write("\t".join([chro,str(pos),spll[1],spll[4], spll[5], allref])+'\n')
else :
writetodel.write("\t".join([chro,str(pos),spll[1],spll[4], spll[5], "NA"])+'\n')
| 29.513889
| 91
| 0.602353
|
import sys
import os
import argparse
import gzip
def readfastagz(File) :
Dic={}
with gzip.open(File, "r") as ReadL :
for ligne in ReadL:
ligne=ligne.decode('utf-8').replace('\n','')
if ligne[0]=='>' :
Key=ligne.split(' ')[0].split('|')[0].split('\t')[0].replace('>','')
if Key in Dic :
print(Dic.keys())
print('fasta file '+ File+' more than one time same chro '+ Key)
sys.exit(2)
Dic[Key]=""
Dic[Key]+=ligne
return Dic
def readfastastdin() :
Dic={}
for ligne in sys.stdin:
ligne=ligne.replace('\n','')
if ligne[0]=='>' :
Key=ligne.split(' ')[0].split('|')[0].split('\t')[0].replace('>','')
if Key in Dic :
print(Dic.keys())
print('fasta file '+ File+' more than one time same chro '+ Key)
sys.exit(2)
Dic[Key]=""
Dic[Key]+=ligne
return Dic
def parseArguments():
parser = argparse.ArgumentParser(description='transform file and header')
parser.add_argument('--bim',type=str,required=True, help="input file association")
parser.add_argument('--fasta',type=str,required=False, help="input file association")
parser.add_argument('--out',type=str,required=True, help="input file association")
parser.add_argument('--run', type=str, required=False, default='False')
args = parser.parse_args()
return args
args=parseArguments()
if args.run[0]=='T':
if args.fasta :
fasta=readfastagz(args.fasta)
else :
fasta=readfastastdin()
readbim=open(args.bim)
writetodel=open(args.out+'.del')
writeref=open(args.out+'.allref')
for line in readbim:
spll=line.split()
chro=spll[0]
if chro in fasta :
pos=int(spll[3])
all1=spll[4]
all2=spll[5]
allref=fasta[chro][pos-1]
if allref == all1 or allref == all2:
writeref.write(spll[1]+'\t'+allref+'\n')
writetodel.write("\t".join([chro,str(pos),spll[1],spll[4], spll[5], allref])+'\n')
else :
writetodel.write("\t".join([chro,str(pos),spll[1],spll[4], spll[5], "NA"])+'\n')
| true
| true
|
f718a3bfab465e69af8eecdfc4731de81a8437f6
| 3,893
|
py
|
Python
|
deletionwatcher.py
|
Floern/SmokeDetector
|
2818bbd23af15440836c61c4023d063264433c66
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
deletionwatcher.py
|
Floern/SmokeDetector
|
2818bbd23af15440836c61c4023d063264433c66
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
deletionwatcher.py
|
Floern/SmokeDetector
|
2818bbd23af15440836c61c4023d063264433c66
|
[
"Apache-2.0",
"MIT"
] | 1
|
2018-10-11T13:41:49.000Z
|
2018-10-11T13:41:49.000Z
|
# coding=utf-8
import json
import requests
import time
# noinspection PyPackageRequirements
import websocket
# noinspection PyPackageRequirements
from bs4 import BeautifulSoup
from threading import Thread
from urllib.parse import urlparse
import metasmoke
from globalvars import GlobalVars
import datahandling
# noinspection PyClassHasNoInit,PyBroadException,PyMethodParameters
class DeletionWatcher:
@classmethod
def update_site_id_list(self):
soup = BeautifulSoup(requests.get("https://meta.stackexchange.com/topbar/site-switcher/site-list").text,
"html.parser")
site_id_dict = {}
for site in soup.findAll("a", attrs={"data-id": True}):
site_name = urlparse(site["href"]).netloc
site_id = site["data-id"]
site_id_dict[site_name] = site_id
GlobalVars.site_id_dict = site_id_dict
@classmethod
def check_websocket_for_deletion(self, post_site_id, post_url, timeout):
time_to_check = time.time() + timeout
post_id = post_site_id[0]
post_type = post_site_id[2]
if post_type == "answer":
question_id = str(datahandling.get_post_site_id_link(post_site_id))
if question_id is None:
return
else:
question_id = post_id
post_site = post_site_id[1]
if post_site not in GlobalVars.site_id_dict:
return
site_id = GlobalVars.site_id_dict[post_site]
ws = websocket.create_connection("wss://qa.sockets.stackexchange.com/")
ws.send(site_id + "-question-" + question_id)
while time.time() < time_to_check:
ws.settimeout(time_to_check - time.time())
try:
a = ws.recv()
except websocket.WebSocketTimeoutException:
t_metasmoke = Thread(name="metasmoke send deletion stats",
target=metasmoke.Metasmoke.send_deletion_stats_for_post, args=(post_url, False))
t_metasmoke.start()
return False
if a is not None and a != "":
try:
action = json.loads(a)["action"]
if action == "hb":
ws.send("hb")
continue
else:
d = json.loads(json.loads(a)["data"])
except:
continue
if d["a"] == "post-deleted" and str(d["qId"]) == question_id \
and ((post_type == "answer" and "aId" in d and str(d["aId"]) == post_id) or
post_type == "question"):
t_metasmoke = Thread(name="metasmoke send deletion stats",
target=metasmoke.Metasmoke.send_deletion_stats_for_post, args=(post_url, True))
t_metasmoke.start()
return True
t_metasmoke = Thread(name="metasmoke send deletion stats",
target=metasmoke.Metasmoke.send_deletion_stats_for_post, args=(post_url, False))
t_metasmoke.start()
return False
@classmethod
def check_if_report_was_deleted(self, post_site_id, post_url, message):
was_report_deleted = self.check_websocket_for_deletion(post_site_id, post_url, 1200)
if was_report_deleted:
try:
message.delete()
except:
pass
@classmethod
def post_message_if_not_deleted(self, post_site_id, post_url, message_text, room):
was_report_deleted = self.check_websocket_for_deletion(post_site_id, post_url, 300)
if not was_report_deleted and not datahandling.is_false_positive(post_site_id[0:2]) and not \
datahandling.is_ignored_post(post_site_id[0:2]):
room.send_message(message_text)
| 40.552083
| 120
| 0.599281
|
import json
import requests
import time
import websocket
from bs4 import BeautifulSoup
from threading import Thread
from urllib.parse import urlparse
import metasmoke
from globalvars import GlobalVars
import datahandling
class DeletionWatcher:
@classmethod
def update_site_id_list(self):
soup = BeautifulSoup(requests.get("https://meta.stackexchange.com/topbar/site-switcher/site-list").text,
"html.parser")
site_id_dict = {}
for site in soup.findAll("a", attrs={"data-id": True}):
site_name = urlparse(site["href"]).netloc
site_id = site["data-id"]
site_id_dict[site_name] = site_id
GlobalVars.site_id_dict = site_id_dict
@classmethod
def check_websocket_for_deletion(self, post_site_id, post_url, timeout):
time_to_check = time.time() + timeout
post_id = post_site_id[0]
post_type = post_site_id[2]
if post_type == "answer":
question_id = str(datahandling.get_post_site_id_link(post_site_id))
if question_id is None:
return
else:
question_id = post_id
post_site = post_site_id[1]
if post_site not in GlobalVars.site_id_dict:
return
site_id = GlobalVars.site_id_dict[post_site]
ws = websocket.create_connection("wss://qa.sockets.stackexchange.com/")
ws.send(site_id + "-question-" + question_id)
while time.time() < time_to_check:
ws.settimeout(time_to_check - time.time())
try:
a = ws.recv()
except websocket.WebSocketTimeoutException:
t_metasmoke = Thread(name="metasmoke send deletion stats",
target=metasmoke.Metasmoke.send_deletion_stats_for_post, args=(post_url, False))
t_metasmoke.start()
return False
if a is not None and a != "":
try:
action = json.loads(a)["action"]
if action == "hb":
ws.send("hb")
continue
else:
d = json.loads(json.loads(a)["data"])
except:
continue
if d["a"] == "post-deleted" and str(d["qId"]) == question_id \
and ((post_type == "answer" and "aId" in d and str(d["aId"]) == post_id) or
post_type == "question"):
t_metasmoke = Thread(name="metasmoke send deletion stats",
target=metasmoke.Metasmoke.send_deletion_stats_for_post, args=(post_url, True))
t_metasmoke.start()
return True
t_metasmoke = Thread(name="metasmoke send deletion stats",
target=metasmoke.Metasmoke.send_deletion_stats_for_post, args=(post_url, False))
t_metasmoke.start()
return False
@classmethod
def check_if_report_was_deleted(self, post_site_id, post_url, message):
was_report_deleted = self.check_websocket_for_deletion(post_site_id, post_url, 1200)
if was_report_deleted:
try:
message.delete()
except:
pass
@classmethod
def post_message_if_not_deleted(self, post_site_id, post_url, message_text, room):
was_report_deleted = self.check_websocket_for_deletion(post_site_id, post_url, 300)
if not was_report_deleted and not datahandling.is_false_positive(post_site_id[0:2]) and not \
datahandling.is_ignored_post(post_site_id[0:2]):
room.send_message(message_text)
| true
| true
|
f718a4274f3aa4ad39db83b7e97ebcddc1e14a69
| 3,159
|
py
|
Python
|
NLP/code.py
|
prasadph/ga-learner-dsmp-repo
|
ac1cc9d96250718f2842592e643c885d54ab2903
|
[
"MIT"
] | 1
|
2021-01-18T15:24:07.000Z
|
2021-01-18T15:24:07.000Z
|
NLP/code.py
|
prasadph/ga-learner-dsmp-repo
|
ac1cc9d96250718f2842592e643c885d54ab2903
|
[
"MIT"
] | null | null | null |
NLP/code.py
|
prasadph/ga-learner-dsmp-repo
|
ac1cc9d96250718f2842592e643c885d54ab2903
|
[
"MIT"
] | 1
|
2019-05-01T04:24:19.000Z
|
2019-05-01T04:24:19.000Z
|
# --------------
# import packages
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import re
from nltk.corpus import stopwords
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.multiclass import OneVsRestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score ,confusion_matrix
# Code starts here
# load data
news = pd.read_csv(path)
# subset data
news = news[["TITLE","CATEGORY"]]
# distribution of classes
dist = news.CATEGORY.value_counts()
# display class distribution
print(dist)
# display data
print(news.head())
# Code ends here
# --------------
# Code starts here
# stopwords
stop = set(stopwords.words('english'))
# retain only alphabets
news.TITLE = news.TITLE.apply(lambda x:re.sub("[^a-zA-Z]", " ",x))
# convert to lowercase and tokenize
news.TITLE = news.TITLE.apply(lambda row:row.lower().split())
# remove stopwords
news.TITLE = news.TITLE.apply(lambda row:[i for i in row if i not in stop] )
# join list elements
news.TITLE = news.TITLE.apply(lambda x: ' '.join(x))
# split into training and test sets
X_train, X_test, y_train, y_test = train_test_split(news['TITLE'], news['CATEGORY'], test_size=0.2, random_state=3)
# Code ends here
# --------------
# Code starts here
# initialize count vectorizer
count_vectorizer = CountVectorizer()
# initialize tfidf vectorizer
tfidf_vectorizer = TfidfVectorizer(ngram_range=(1,3))
# fit and transform with count vectorizer
X_train_count = count_vectorizer.fit_transform(X_train)
X_test_count = count_vectorizer.transform(X_test)
# fit and transform with tfidf vectorizer
X_train_tfidf = tfidf_vectorizer.fit_transform(X_train)
X_test_tfidf = tfidf_vectorizer.transform(X_test)
# Code ends here
# --------------
# Code starts here
# initialize multinomial naive bayes
nb_1 = MultinomialNB()
nb_2 = MultinomialNB()
# fit on count vectorizer training data
nb_1.fit(X_train_count, y_train)
# fit on tfidf vectorizer training data
nb_2.fit(X_train_tfidf, y_train)
# accuracy with count vectorizer
acc_count_nb = accuracy_score(nb_1.predict(X_test_count), y_test)
# accuracy with tfidf vectorizer
acc_tfidf_nb = accuracy_score(nb_2.predict(X_test_tfidf), y_test)
# display accuracies
print(acc_count_nb)
print(acc_tfidf_nb)
# Code ends here
# --------------
import warnings
warnings.filterwarnings('ignore')
# initialize logistic regression
logreg_1 = OneVsRestClassifier(LogisticRegression(random_state=10))
logreg_2 = OneVsRestClassifier(LogisticRegression(random_state=10))
# fit on count vectorizer training data
logreg_1.fit(X_train_count, y_train)
# fit on tfidf vectorizer training data
logreg_2.fit(X_train_tfidf, y_train)
# accuracy with count vectorizer
acc_count_logreg = accuracy_score(logreg_1.predict(X_test_count), y_test)
# accuracy with tfidf vectorizer
acc_tfidf_logreg = accuracy_score(logreg_2.predict(X_test_tfidf), y_test)
# display accuracies
print(acc_count_logreg)
print(acc_tfidf_logreg)
# Code ends here
| 24.874016
| 115
| 0.773663
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import re
from nltk.corpus import stopwords
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.multiclass import OneVsRestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score ,confusion_matrix
news = pd.read_csv(path)
news = news[["TITLE","CATEGORY"]]
dist = news.CATEGORY.value_counts()
print(dist)
print(news.head())
stop = set(stopwords.words('english'))
news.TITLE = news.TITLE.apply(lambda x:re.sub("[^a-zA-Z]", " ",x))
news.TITLE = news.TITLE.apply(lambda row:row.lower().split())
news.TITLE = news.TITLE.apply(lambda row:[i for i in row if i not in stop] )
news.TITLE = news.TITLE.apply(lambda x: ' '.join(x))
X_train, X_test, y_train, y_test = train_test_split(news['TITLE'], news['CATEGORY'], test_size=0.2, random_state=3)
count_vectorizer = CountVectorizer()
tfidf_vectorizer = TfidfVectorizer(ngram_range=(1,3))
X_train_count = count_vectorizer.fit_transform(X_train)
X_test_count = count_vectorizer.transform(X_test)
X_train_tfidf = tfidf_vectorizer.fit_transform(X_train)
X_test_tfidf = tfidf_vectorizer.transform(X_test)
nb_1 = MultinomialNB()
nb_2 = MultinomialNB()
nb_1.fit(X_train_count, y_train)
nb_2.fit(X_train_tfidf, y_train)
acc_count_nb = accuracy_score(nb_1.predict(X_test_count), y_test)
acc_tfidf_nb = accuracy_score(nb_2.predict(X_test_tfidf), y_test)
print(acc_count_nb)
print(acc_tfidf_nb)
import warnings
warnings.filterwarnings('ignore')
logreg_1 = OneVsRestClassifier(LogisticRegression(random_state=10))
logreg_2 = OneVsRestClassifier(LogisticRegression(random_state=10))
logreg_1.fit(X_train_count, y_train)
logreg_2.fit(X_train_tfidf, y_train)
acc_count_logreg = accuracy_score(logreg_1.predict(X_test_count), y_test)
acc_tfidf_logreg = accuracy_score(logreg_2.predict(X_test_tfidf), y_test)
print(acc_count_logreg)
print(acc_tfidf_logreg)
| true
| true
|
f718a4ee414e973fde7694a37fb7480545ae3804
| 3,814
|
py
|
Python
|
onnx/backend/test/case/node/xor.py
|
okdshin/onnx
|
31ca96ca3331d05884a71c38975d34870eb9c81d
|
[
"MIT"
] | 2
|
2021-07-31T20:42:42.000Z
|
2021-11-17T11:01:14.000Z
|
onnx/backend/test/case/node/xor.py
|
lokitoth/onnx
|
27b40225ea98f6412ae2879ed67211d49564af2a
|
[
"MIT"
] | null | null | null |
onnx/backend/test/case/node/xor.py
|
lokitoth/onnx
|
27b40225ea98f6412ae2879ed67211d49564af2a
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import onnx
from ..base import Base
from . import expect
class Xor(Base):
@staticmethod
def export():
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
)
# 2d
x = (np.random.randn(3, 4) > 0).astype(np.bool)
y = (np.random.randn(3, 4) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor2d')
# 3d
x = (np.random.randn(3, 4, 5) > 0).astype(np.bool)
y = (np.random.randn(3, 4, 5) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor3d')
# 4d
x = (np.random.randn(3, 4, 5, 6) > 0).astype(np.bool)
y = (np.random.randn(3, 4, 5, 6) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor4d')
@staticmethod
def export_xor_broadcast():
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
broadcast=1,
)
#3d vs 1d
x = (np.random.randn(3, 4, 5) > 0).astype(np.bool)
y = (np.random.randn(5) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_bcast3v1d')
#3d vs 2d
x = (np.random.randn(3, 4, 5) > 0).astype(np.bool)
y = (np.random.randn(4, 5) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_bcast3v2d')
#4d vs 2d
x = (np.random.randn(3, 4, 5, 6) > 0).astype(np.bool)
y = (np.random.randn(5, 6) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_bcast4v2d')
#4d vs 3d
x = (np.random.randn(3, 4, 5, 6) > 0).astype(np.bool)
y = (np.random.randn(4, 5, 6) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_bcast4v3d')
@staticmethod
def export_xor_axis():
x = (np.random.randn(5, 5, 5, 5) > 0).astype(np.bool)
y = (np.random.randn(5) > 0).astype(np.bool)
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
broadcast=1,
axis=0,
)
z = np.logical_xor(x, y[:, np.newaxis, np.newaxis, np.newaxis])
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_axis0')
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
broadcast=1,
axis=1,
)
z = np.logical_xor(x, y[:, np.newaxis, np.newaxis,])
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_axis1')
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
broadcast=1,
axis=2,
)
z = np.logical_xor(x, y[:, np.newaxis,])
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_axis2')
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
broadcast=1,
axis=3,
)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_axis3')
| 28.893939
| 71
| 0.484793
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import onnx
from ..base import Base
from . import expect
class Xor(Base):
@staticmethod
def export():
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
)
x = (np.random.randn(3, 4) > 0).astype(np.bool)
y = (np.random.randn(3, 4) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor2d')
x = (np.random.randn(3, 4, 5) > 0).astype(np.bool)
y = (np.random.randn(3, 4, 5) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor3d')
x = (np.random.randn(3, 4, 5, 6) > 0).astype(np.bool)
y = (np.random.randn(3, 4, 5, 6) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor4d')
@staticmethod
def export_xor_broadcast():
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
broadcast=1,
)
x = (np.random.randn(3, 4, 5) > 0).astype(np.bool)
y = (np.random.randn(5) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_bcast3v1d')
x = (np.random.randn(3, 4, 5) > 0).astype(np.bool)
y = (np.random.randn(4, 5) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_bcast3v2d')
x = (np.random.randn(3, 4, 5, 6) > 0).astype(np.bool)
y = (np.random.randn(5, 6) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_bcast4v2d')
x = (np.random.randn(3, 4, 5, 6) > 0).astype(np.bool)
y = (np.random.randn(4, 5, 6) > 0).astype(np.bool)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_bcast4v3d')
@staticmethod
def export_xor_axis():
x = (np.random.randn(5, 5, 5, 5) > 0).astype(np.bool)
y = (np.random.randn(5) > 0).astype(np.bool)
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
broadcast=1,
axis=0,
)
z = np.logical_xor(x, y[:, np.newaxis, np.newaxis, np.newaxis])
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_axis0')
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
broadcast=1,
axis=1,
)
z = np.logical_xor(x, y[:, np.newaxis, np.newaxis,])
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_axis1')
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
broadcast=1,
axis=2,
)
z = np.logical_xor(x, y[:, np.newaxis,])
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_axis2')
node = onnx.helper.make_node(
'Xor',
inputs=['x', 'y'],
outputs=['xor'],
broadcast=1,
axis=3,
)
z = np.logical_xor(x, y)
expect(node, inputs=[x, y], outputs=[z],
name='test_xor_axis3')
| true
| true
|
f718a52a8a96bb85eb0cdd0745fb1c73e627e679
| 21,417
|
py
|
Python
|
pandas/tests/io/test_common.py
|
kuantan/pandas
|
e18921eb0cc86f71c84a4aa0bd6d0c1b7de89def
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2021-02-20T13:36:45.000Z
|
2021-02-20T13:36:45.000Z
|
pandas/tests/io/test_common.py
|
fanoway/pandas
|
71312683b41b5177faf7ecd63555059504853cbd
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | 4
|
2019-12-14T16:32:46.000Z
|
2022-02-12T00:32:28.000Z
|
pandas/tests/io/test_common.py
|
lithomas1/pandas
|
e18921eb0cc86f71c84a4aa0bd6d0c1b7de89def
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
"""
Tests for the pandas.io.common functionalities
"""
import codecs
import errno
from functools import partial
from io import (
BytesIO,
StringIO,
UnsupportedOperation,
)
import mmap
import os
from pathlib import Path
import tempfile
import pytest
from pandas.compat import is_platform_windows
import pandas.util._test_decorators as td
import pandas as pd
import pandas._testing as tm
import pandas.io.common as icom
class CustomFSPath:
"""For testing fspath on unknown objects"""
def __init__(self, path):
self.path = path
def __fspath__(self):
return self.path
# Functions that consume a string path and return a string or path-like object
path_types = [str, CustomFSPath, Path]
try:
from py.path import local as LocalPath
path_types.append(LocalPath)
except ImportError:
pass
HERE = os.path.abspath(os.path.dirname(__file__))
# https://github.com/cython/cython/issues/1720
@pytest.mark.filterwarnings("ignore:can't resolve package:ImportWarning")
class TestCommonIOCapabilities:
data1 = """index,A,B,C,D
foo,2,3,4,5
bar,7,8,9,10
baz,12,13,14,15
qux,12,13,14,15
foo2,12,13,14,15
bar2,12,13,14,15
"""
def test_expand_user(self):
filename = "~/sometest"
expanded_name = icom._expand_user(filename)
assert expanded_name != filename
assert os.path.isabs(expanded_name)
assert os.path.expanduser(filename) == expanded_name
def test_expand_user_normal_path(self):
filename = "/somefolder/sometest"
expanded_name = icom._expand_user(filename)
assert expanded_name == filename
assert os.path.expanduser(filename) == expanded_name
def test_stringify_path_pathlib(self):
rel_path = icom.stringify_path(Path("."))
assert rel_path == "."
redundant_path = icom.stringify_path(Path("foo//bar"))
assert redundant_path == os.path.join("foo", "bar")
@td.skip_if_no("py.path")
def test_stringify_path_localpath(self):
path = os.path.join("foo", "bar")
abs_path = os.path.abspath(path)
lpath = LocalPath(path)
assert icom.stringify_path(lpath) == abs_path
def test_stringify_path_fspath(self):
p = CustomFSPath("foo/bar.csv")
result = icom.stringify_path(p)
assert result == "foo/bar.csv"
def test_stringify_file_and_path_like(self):
# GH 38125: do not stringify file objects that are also path-like
fsspec = pytest.importorskip("fsspec")
with tm.ensure_clean() as path:
with fsspec.open(f"file://{path}", mode="wb") as fsspec_obj:
assert fsspec_obj == icom.stringify_path(fsspec_obj)
@pytest.mark.parametrize("path_type", path_types)
def test_infer_compression_from_path(self, compression_format, path_type):
extension, expected = compression_format
path = path_type("foo/bar.csv" + extension)
compression = icom.infer_compression(path, compression="infer")
assert compression == expected
@pytest.mark.parametrize("path_type", [str, CustomFSPath, Path])
def test_get_handle_with_path(self, path_type):
# ignore LocalPath: it creates strange paths: /absolute/~/sometest
with tempfile.TemporaryDirectory(dir=Path.home()) as tmp:
filename = path_type("~/" + Path(tmp).name + "/sometest")
with icom.get_handle(filename, "w") as handles:
assert Path(handles.handle.name).is_absolute()
assert os.path.expanduser(filename) == handles.handle.name
def test_get_handle_with_buffer(self):
input_buffer = StringIO()
with icom.get_handle(input_buffer, "r") as handles:
assert handles.handle == input_buffer
assert not input_buffer.closed
input_buffer.close()
# Test that BytesIOWrapper(get_handle) returns correct amount of bytes every time
def test_bytesiowrapper_returns_correct_bytes(self):
# Test latin1, ucs-2, and ucs-4 chars
data = """a,b,c
1,2,3
©,®,®
Look,a snake,🐍"""
with icom.get_handle(StringIO(data), "rb", is_text=False) as handles:
result = b""
chunksize = 5
while True:
chunk = handles.handle.read(chunksize)
# Make sure each chunk is correct amount of bytes
assert len(chunk) <= chunksize
if len(chunk) < chunksize:
# Can be less amount of bytes, but only at EOF
# which happens when read returns empty
assert len(handles.handle.read()) == 0
result += chunk
break
result += chunk
assert result == data.encode("utf-8")
# Test that pyarrow can handle a file opened with get_handle
@td.skip_if_no("pyarrow", min_version="0.15.0")
def test_get_handle_pyarrow_compat(self):
from pyarrow import csv
# Test latin1, ucs-2, and ucs-4 chars
data = """a,b,c
1,2,3
©,®,®
Look,a snake,🐍"""
expected = pd.DataFrame(
{"a": ["1", "©", "Look"], "b": ["2", "®", "a snake"], "c": ["3", "®", "🐍"]}
)
s = StringIO(data)
with icom.get_handle(s, "rb", is_text=False) as handles:
df = csv.read_csv(handles.handle).to_pandas()
tm.assert_frame_equal(df, expected)
assert not s.closed
def test_iterator(self):
with pd.read_csv(StringIO(self.data1), chunksize=1) as reader:
result = pd.concat(reader, ignore_index=True)
expected = pd.read_csv(StringIO(self.data1))
tm.assert_frame_equal(result, expected)
# GH12153
with pd.read_csv(StringIO(self.data1), chunksize=1) as it:
first = next(it)
tm.assert_frame_equal(first, expected.iloc[[0]])
tm.assert_frame_equal(pd.concat(it), expected.iloc[1:])
@pytest.mark.parametrize(
"reader, module, error_class, fn_ext",
[
(pd.read_csv, "os", FileNotFoundError, "csv"),
(pd.read_fwf, "os", FileNotFoundError, "txt"),
(pd.read_excel, "xlrd", FileNotFoundError, "xlsx"),
(pd.read_feather, "pyarrow", OSError, "feather"),
(pd.read_hdf, "tables", FileNotFoundError, "h5"),
(pd.read_stata, "os", FileNotFoundError, "dta"),
(pd.read_sas, "os", FileNotFoundError, "sas7bdat"),
(pd.read_json, "os", ValueError, "json"),
(pd.read_pickle, "os", FileNotFoundError, "pickle"),
],
)
def test_read_non_existent(self, reader, module, error_class, fn_ext):
pytest.importorskip(module)
path = os.path.join(HERE, "data", "does_not_exist." + fn_ext)
msg1 = fr"File (b')?.+does_not_exist\.{fn_ext}'? does not exist"
msg2 = fr"\[Errno 2\] No such file or directory: '.+does_not_exist\.{fn_ext}'"
msg3 = "Expected object or value"
msg4 = "path_or_buf needs to be a string file path or file-like"
msg5 = (
fr"\[Errno 2\] File .+does_not_exist\.{fn_ext} does not exist: "
fr"'.+does_not_exist\.{fn_ext}'"
)
msg6 = fr"\[Errno 2\] 没有那个文件或目录: '.+does_not_exist\.{fn_ext}'"
msg7 = (
fr"\[Errno 2\] File o directory non esistente: '.+does_not_exist\.{fn_ext}'"
)
msg8 = fr"Failed to open local file.+does_not_exist\.{fn_ext}"
with pytest.raises(
error_class,
match=fr"({msg1}|{msg2}|{msg3}|{msg4}|{msg5}|{msg6}|{msg7}|{msg8})",
):
reader(path)
@pytest.mark.parametrize(
"method, module, error_class, fn_ext",
[
(pd.DataFrame.to_csv, "os", OSError, "csv"),
(pd.DataFrame.to_html, "os", OSError, "html"),
(pd.DataFrame.to_excel, "xlrd", OSError, "xlsx"),
(pd.DataFrame.to_feather, "pyarrow", OSError, "feather"),
(pd.DataFrame.to_parquet, "pyarrow", OSError, "parquet"),
(pd.DataFrame.to_stata, "os", OSError, "dta"),
(pd.DataFrame.to_json, "os", OSError, "json"),
(pd.DataFrame.to_pickle, "os", OSError, "pickle"),
],
)
# NOTE: Missing parent directory for pd.DataFrame.to_hdf is handled by PyTables
def test_write_missing_parent_directory(self, method, module, error_class, fn_ext):
pytest.importorskip(module)
dummy_frame = pd.DataFrame({"a": [1, 2, 3], "b": [2, 3, 4], "c": [3, 4, 5]})
path = os.path.join(HERE, "data", "missing_folder", "does_not_exist." + fn_ext)
with pytest.raises(
error_class,
match=r"Cannot save file into a non-existent directory: .*missing_folder",
):
method(dummy_frame, path)
@pytest.mark.parametrize(
"reader, module, error_class, fn_ext",
[
(pd.read_csv, "os", FileNotFoundError, "csv"),
(pd.read_table, "os", FileNotFoundError, "csv"),
(pd.read_fwf, "os", FileNotFoundError, "txt"),
(pd.read_excel, "xlrd", FileNotFoundError, "xlsx"),
(pd.read_feather, "pyarrow", OSError, "feather"),
(pd.read_hdf, "tables", FileNotFoundError, "h5"),
(pd.read_stata, "os", FileNotFoundError, "dta"),
(pd.read_sas, "os", FileNotFoundError, "sas7bdat"),
(pd.read_json, "os", ValueError, "json"),
(pd.read_pickle, "os", FileNotFoundError, "pickle"),
],
)
def test_read_expands_user_home_dir(
self, reader, module, error_class, fn_ext, monkeypatch
):
pytest.importorskip(module)
path = os.path.join("~", "does_not_exist." + fn_ext)
monkeypatch.setattr(icom, "_expand_user", lambda x: os.path.join("foo", x))
msg1 = fr"File (b')?.+does_not_exist\.{fn_ext}'? does not exist"
msg2 = fr"\[Errno 2\] No such file or directory: '.+does_not_exist\.{fn_ext}'"
msg3 = "Unexpected character found when decoding 'false'"
msg4 = "path_or_buf needs to be a string file path or file-like"
msg5 = (
fr"\[Errno 2\] File .+does_not_exist\.{fn_ext} does not exist: "
fr"'.+does_not_exist\.{fn_ext}'"
)
msg6 = fr"\[Errno 2\] 没有那个文件或目录: '.+does_not_exist\.{fn_ext}'"
msg7 = (
fr"\[Errno 2\] File o directory non esistente: '.+does_not_exist\.{fn_ext}'"
)
msg8 = fr"Failed to open local file.+does_not_exist\.{fn_ext}"
with pytest.raises(
error_class,
match=fr"({msg1}|{msg2}|{msg3}|{msg4}|{msg5}|{msg6}|{msg7}|{msg8})",
):
reader(path)
@pytest.mark.parametrize(
"reader, module, path",
[
(pd.read_csv, "os", ("io", "data", "csv", "iris.csv")),
(pd.read_table, "os", ("io", "data", "csv", "iris.csv")),
(
pd.read_fwf,
"os",
("io", "data", "fixed_width", "fixed_width_format.txt"),
),
(pd.read_excel, "xlrd", ("io", "data", "excel", "test1.xlsx")),
(
pd.read_feather,
"pyarrow",
("io", "data", "feather", "feather-0_3_1.feather"),
),
(
pd.read_hdf,
"tables",
("io", "data", "legacy_hdf", "datetimetz_object.h5"),
),
(pd.read_stata, "os", ("io", "data", "stata", "stata10_115.dta")),
(pd.read_sas, "os", ("io", "sas", "data", "test1.sas7bdat")),
(pd.read_json, "os", ("io", "json", "data", "tsframe_v012.json")),
(
pd.read_pickle,
"os",
("io", "data", "pickle", "categorical.0.25.0.pickle"),
),
],
)
@pytest.mark.filterwarnings(
"ignore:CategoricalBlock is deprecated:DeprecationWarning"
)
@pytest.mark.filterwarnings( # pytables np.object usage
"ignore:`np.object` is a deprecated alias:DeprecationWarning"
)
def test_read_fspath_all(self, reader, module, path, datapath):
pytest.importorskip(module)
path = datapath(*path)
mypath = CustomFSPath(path)
result = reader(mypath)
expected = reader(path)
if path.endswith(".pickle"):
# categorical
tm.assert_categorical_equal(result, expected)
else:
tm.assert_frame_equal(result, expected)
@pytest.mark.filterwarnings("ignore:In future versions `DataFrame.to_latex`")
@pytest.mark.parametrize(
"writer_name, writer_kwargs, module",
[
("to_csv", {}, "os"),
("to_excel", {"engine": "xlwt"}, "xlwt"),
("to_feather", {}, "pyarrow"),
("to_html", {}, "os"),
("to_json", {}, "os"),
("to_latex", {}, "os"),
("to_pickle", {}, "os"),
("to_stata", {"time_stamp": pd.to_datetime("2019-01-01 00:00")}, "os"),
],
)
def test_write_fspath_all(self, writer_name, writer_kwargs, module):
p1 = tm.ensure_clean("string")
p2 = tm.ensure_clean("fspath")
df = pd.DataFrame({"A": [1, 2]})
with p1 as string, p2 as fspath:
pytest.importorskip(module)
mypath = CustomFSPath(fspath)
writer = getattr(df, writer_name)
writer(string, **writer_kwargs)
with open(string, "rb") as f:
expected = f.read()
writer(mypath, **writer_kwargs)
with open(fspath, "rb") as f:
result = f.read()
assert result == expected
@pytest.mark.filterwarnings( # pytables np.object usage
"ignore:`np.object` is a deprecated alias:DeprecationWarning"
)
def test_write_fspath_hdf5(self):
# Same test as write_fspath_all, except HDF5 files aren't
# necessarily byte-for-byte identical for a given dataframe, so we'll
# have to read and compare equality
pytest.importorskip("tables")
df = pd.DataFrame({"A": [1, 2]})
p1 = tm.ensure_clean("string")
p2 = tm.ensure_clean("fspath")
with p1 as string, p2 as fspath:
mypath = CustomFSPath(fspath)
df.to_hdf(mypath, key="bar")
df.to_hdf(string, key="bar")
result = pd.read_hdf(fspath, key="bar")
expected = pd.read_hdf(string, key="bar")
tm.assert_frame_equal(result, expected)
@pytest.fixture
def mmap_file(datapath):
return datapath("io", "data", "csv", "test_mmap.csv")
class TestMMapWrapper:
def test_constructor_bad_file(self, mmap_file):
non_file = StringIO("I am not a file")
non_file.fileno = lambda: -1
# the error raised is different on Windows
if is_platform_windows():
msg = "The parameter is incorrect"
err = OSError
else:
msg = "[Errno 22]"
err = mmap.error
with pytest.raises(err, match=msg):
icom._MMapWrapper(non_file)
target = open(mmap_file)
target.close()
msg = "I/O operation on closed file"
with pytest.raises(ValueError, match=msg):
icom._MMapWrapper(target)
def test_get_attr(self, mmap_file):
with open(mmap_file) as target:
wrapper = icom._MMapWrapper(target)
attrs = dir(wrapper.mmap)
attrs = [attr for attr in attrs if not attr.startswith("__")]
attrs.append("__next__")
for attr in attrs:
assert hasattr(wrapper, attr)
assert not hasattr(wrapper, "foo")
def test_next(self, mmap_file):
with open(mmap_file) as target:
wrapper = icom._MMapWrapper(target)
lines = target.readlines()
for line in lines:
next_line = next(wrapper)
assert next_line.strip() == line.strip()
with pytest.raises(StopIteration, match=r"^$"):
next(wrapper)
def test_unknown_engine(self):
with tm.ensure_clean() as path:
df = tm.makeDataFrame()
df.to_csv(path)
with pytest.raises(ValueError, match="Unknown engine"):
pd.read_csv(path, engine="pyt")
def test_binary_mode(self):
"""
'encoding' shouldn't be passed to 'open' in binary mode.
GH 35058
"""
with tm.ensure_clean() as path:
df = tm.makeDataFrame()
df.to_csv(path, mode="w+b")
tm.assert_frame_equal(df, pd.read_csv(path, index_col=0))
@pytest.mark.parametrize("encoding", ["utf-16", "utf-32"])
@pytest.mark.parametrize("compression_", ["bz2", "xz"])
def test_warning_missing_utf_bom(self, encoding, compression_):
"""
bz2 and xz do not write the byte order mark (BOM) for utf-16/32.
https://stackoverflow.com/questions/55171439
GH 35681
"""
df = tm.makeDataFrame()
with tm.ensure_clean() as path:
with tm.assert_produces_warning(UnicodeWarning):
df.to_csv(path, compression=compression_, encoding=encoding)
# reading should fail (otherwise we wouldn't need the warning)
msg = r"UTF-\d+ stream does not start with BOM"
with pytest.raises(UnicodeError, match=msg):
pd.read_csv(path, compression=compression_, encoding=encoding)
def test_is_fsspec_url():
assert icom.is_fsspec_url("gcs://pandas/somethingelse.com")
assert icom.is_fsspec_url("gs://pandas/somethingelse.com")
# the following is the only remote URL that is handled without fsspec
assert not icom.is_fsspec_url("http://pandas/somethingelse.com")
assert not icom.is_fsspec_url("random:pandas/somethingelse.com")
assert not icom.is_fsspec_url("/local/path")
assert not icom.is_fsspec_url("relative/local/path")
@pytest.mark.parametrize("encoding", [None, "utf-8"])
@pytest.mark.parametrize("format", ["csv", "json"])
def test_codecs_encoding(encoding, format):
# GH39247
expected = tm.makeDataFrame()
with tm.ensure_clean() as path:
with codecs.open(path, mode="w", encoding=encoding) as handle:
getattr(expected, f"to_{format}")(handle)
with codecs.open(path, mode="r", encoding=encoding) as handle:
if format == "csv":
df = pd.read_csv(handle, index_col=0)
else:
df = pd.read_json(handle)
tm.assert_frame_equal(expected, df)
def test_codecs_get_writer_reader():
# GH39247
expected = tm.makeDataFrame()
with tm.ensure_clean() as path:
with open(path, "wb") as handle:
with codecs.getwriter("utf-8")(handle) as encoded:
expected.to_csv(encoded)
with open(path, "rb") as handle:
with codecs.getreader("utf-8")(handle) as encoded:
df = pd.read_csv(encoded, index_col=0)
tm.assert_frame_equal(expected, df)
@pytest.mark.parametrize(
"io_class,mode,msg",
[
(BytesIO, "t", "a bytes-like object is required, not 'str'"),
(StringIO, "b", "string argument expected, got 'bytes'"),
],
)
def test_explicit_encoding(io_class, mode, msg):
# GH39247; this test makes sure that if a user provides mode="*t" or "*b",
# it is used. In the case of this test it leads to an error as intentionally the
# wrong mode is requested
expected = tm.makeDataFrame()
with io_class() as buffer:
with pytest.raises(TypeError, match=msg):
expected.to_csv(buffer, mode=f"w{mode}")
@pytest.mark.parametrize("encoding_errors", [None, "strict", "replace"])
@pytest.mark.parametrize("format", ["csv", "json"])
def test_encoding_errors(encoding_errors, format):
# GH39450
msg = "'utf-8' codec can't decode byte"
bad_encoding = b"\xe4"
if format == "csv":
return
content = bad_encoding + b"\n" + bad_encoding
reader = pd.read_csv
else:
content = (
b'{"'
+ bad_encoding * 2
+ b'": {"'
+ bad_encoding
+ b'":"'
+ bad_encoding
+ b'"}}'
)
reader = partial(pd.read_json, orient="index")
with tm.ensure_clean() as path:
file = Path(path)
file.write_bytes(content)
if encoding_errors != "replace":
with pytest.raises(UnicodeDecodeError, match=msg):
reader(path, encoding_errors=encoding_errors)
else:
df = reader(path, encoding_errors=encoding_errors)
decoded = bad_encoding.decode(errors=encoding_errors)
expected = pd.DataFrame({decoded: [decoded]}, index=[decoded * 2])
tm.assert_frame_equal(df, expected)
def test_bad_encdoing_errors():
# GH 39777
with tm.ensure_clean() as path:
with pytest.raises(LookupError, match="unknown error handler name"):
icom.get_handle(path, "w", errors="bad")
def test_errno_attribute():
# GH 13872
with pytest.raises(FileNotFoundError, match="\\[Errno 2\\]") as err:
pd.read_csv("doesnt_exist")
assert err.errno == errno.ENOENT
def test_fail_mmap():
with pytest.raises(UnsupportedOperation, match="fileno"):
with BytesIO() as buffer:
icom.get_handle(buffer, "rb", memory_map=True)
| 35.754591
| 88
| 0.591773
|
import codecs
import errno
from functools import partial
from io import (
BytesIO,
StringIO,
UnsupportedOperation,
)
import mmap
import os
from pathlib import Path
import tempfile
import pytest
from pandas.compat import is_platform_windows
import pandas.util._test_decorators as td
import pandas as pd
import pandas._testing as tm
import pandas.io.common as icom
class CustomFSPath:
def __init__(self, path):
self.path = path
def __fspath__(self):
return self.path
path_types = [str, CustomFSPath, Path]
try:
from py.path import local as LocalPath
path_types.append(LocalPath)
except ImportError:
pass
HERE = os.path.abspath(os.path.dirname(__file__))
@pytest.mark.filterwarnings("ignore:can't resolve package:ImportWarning")
class TestCommonIOCapabilities:
data1 = """index,A,B,C,D
foo,2,3,4,5
bar,7,8,9,10
baz,12,13,14,15
qux,12,13,14,15
foo2,12,13,14,15
bar2,12,13,14,15
"""
def test_expand_user(self):
filename = "~/sometest"
expanded_name = icom._expand_user(filename)
assert expanded_name != filename
assert os.path.isabs(expanded_name)
assert os.path.expanduser(filename) == expanded_name
def test_expand_user_normal_path(self):
filename = "/somefolder/sometest"
expanded_name = icom._expand_user(filename)
assert expanded_name == filename
assert os.path.expanduser(filename) == expanded_name
def test_stringify_path_pathlib(self):
rel_path = icom.stringify_path(Path("."))
assert rel_path == "."
redundant_path = icom.stringify_path(Path("foo//bar"))
assert redundant_path == os.path.join("foo", "bar")
@td.skip_if_no("py.path")
def test_stringify_path_localpath(self):
path = os.path.join("foo", "bar")
abs_path = os.path.abspath(path)
lpath = LocalPath(path)
assert icom.stringify_path(lpath) == abs_path
def test_stringify_path_fspath(self):
p = CustomFSPath("foo/bar.csv")
result = icom.stringify_path(p)
assert result == "foo/bar.csv"
def test_stringify_file_and_path_like(self):
# GH 38125: do not stringify file objects that are also path-like
fsspec = pytest.importorskip("fsspec")
with tm.ensure_clean() as path:
with fsspec.open(f"file://{path}", mode="wb") as fsspec_obj:
assert fsspec_obj == icom.stringify_path(fsspec_obj)
@pytest.mark.parametrize("path_type", path_types)
def test_infer_compression_from_path(self, compression_format, path_type):
extension, expected = compression_format
path = path_type("foo/bar.csv" + extension)
compression = icom.infer_compression(path, compression="infer")
assert compression == expected
@pytest.mark.parametrize("path_type", [str, CustomFSPath, Path])
def test_get_handle_with_path(self, path_type):
# ignore LocalPath: it creates strange paths: /absolute/~/sometest
with tempfile.TemporaryDirectory(dir=Path.home()) as tmp:
filename = path_type("~/" + Path(tmp).name + "/sometest")
with icom.get_handle(filename, "w") as handles:
assert Path(handles.handle.name).is_absolute()
assert os.path.expanduser(filename) == handles.handle.name
def test_get_handle_with_buffer(self):
input_buffer = StringIO()
with icom.get_handle(input_buffer, "r") as handles:
assert handles.handle == input_buffer
assert not input_buffer.closed
input_buffer.close()
# Test that BytesIOWrapper(get_handle) returns correct amount of bytes every time
def test_bytesiowrapper_returns_correct_bytes(self):
# Test latin1, ucs-2, and ucs-4 chars
data = """a,b,c
1,2,3
©,®,®
Look,a snake,🐍"""
with icom.get_handle(StringIO(data), "rb", is_text=False) as handles:
result = b""
chunksize = 5
while True:
chunk = handles.handle.read(chunksize)
# Make sure each chunk is correct amount of bytes
assert len(chunk) <= chunksize
if len(chunk) < chunksize:
# Can be less amount of bytes, but only at EOF
# which happens when read returns empty
assert len(handles.handle.read()) == 0
result += chunk
break
result += chunk
assert result == data.encode("utf-8")
# Test that pyarrow can handle a file opened with get_handle
@td.skip_if_no("pyarrow", min_version="0.15.0")
def test_get_handle_pyarrow_compat(self):
from pyarrow import csv
# Test latin1, ucs-2, and ucs-4 chars
data = """a,b,c
1,2,3
©,®,®
Look,a snake,🐍"""
expected = pd.DataFrame(
{"a": ["1", "©", "Look"], "b": ["2", "®", "a snake"], "c": ["3", "®", "🐍"]}
)
s = StringIO(data)
with icom.get_handle(s, "rb", is_text=False) as handles:
df = csv.read_csv(handles.handle).to_pandas()
tm.assert_frame_equal(df, expected)
assert not s.closed
def test_iterator(self):
with pd.read_csv(StringIO(self.data1), chunksize=1) as reader:
result = pd.concat(reader, ignore_index=True)
expected = pd.read_csv(StringIO(self.data1))
tm.assert_frame_equal(result, expected)
# GH12153
with pd.read_csv(StringIO(self.data1), chunksize=1) as it:
first = next(it)
tm.assert_frame_equal(first, expected.iloc[[0]])
tm.assert_frame_equal(pd.concat(it), expected.iloc[1:])
@pytest.mark.parametrize(
"reader, module, error_class, fn_ext",
[
(pd.read_csv, "os", FileNotFoundError, "csv"),
(pd.read_fwf, "os", FileNotFoundError, "txt"),
(pd.read_excel, "xlrd", FileNotFoundError, "xlsx"),
(pd.read_feather, "pyarrow", OSError, "feather"),
(pd.read_hdf, "tables", FileNotFoundError, "h5"),
(pd.read_stata, "os", FileNotFoundError, "dta"),
(pd.read_sas, "os", FileNotFoundError, "sas7bdat"),
(pd.read_json, "os", ValueError, "json"),
(pd.read_pickle, "os", FileNotFoundError, "pickle"),
],
)
def test_read_non_existent(self, reader, module, error_class, fn_ext):
pytest.importorskip(module)
path = os.path.join(HERE, "data", "does_not_exist." + fn_ext)
msg1 = fr"File (b')?.+does_not_exist\.{fn_ext}'? does not exist"
msg2 = fr"\[Errno 2\] No such file or directory: '.+does_not_exist\.{fn_ext}'"
msg3 = "Expected object or value"
msg4 = "path_or_buf needs to be a string file path or file-like"
msg5 = (
fr"\[Errno 2\] File .+does_not_exist\.{fn_ext} does not exist: "
fr"'.+does_not_exist\.{fn_ext}'"
)
msg6 = fr"\[Errno 2\] 没有那个文件或目录: '.+does_not_exist\.{fn_ext}'"
msg7 = (
fr"\[Errno 2\] File o directory non esistente: '.+does_not_exist\.{fn_ext}'"
)
msg8 = fr"Failed to open local file.+does_not_exist\.{fn_ext}"
with pytest.raises(
error_class,
match=fr"({msg1}|{msg2}|{msg3}|{msg4}|{msg5}|{msg6}|{msg7}|{msg8})",
):
reader(path)
@pytest.mark.parametrize(
"method, module, error_class, fn_ext",
[
(pd.DataFrame.to_csv, "os", OSError, "csv"),
(pd.DataFrame.to_html, "os", OSError, "html"),
(pd.DataFrame.to_excel, "xlrd", OSError, "xlsx"),
(pd.DataFrame.to_feather, "pyarrow", OSError, "feather"),
(pd.DataFrame.to_parquet, "pyarrow", OSError, "parquet"),
(pd.DataFrame.to_stata, "os", OSError, "dta"),
(pd.DataFrame.to_json, "os", OSError, "json"),
(pd.DataFrame.to_pickle, "os", OSError, "pickle"),
],
)
# NOTE: Missing parent directory for pd.DataFrame.to_hdf is handled by PyTables
def test_write_missing_parent_directory(self, method, module, error_class, fn_ext):
pytest.importorskip(module)
dummy_frame = pd.DataFrame({"a": [1, 2, 3], "b": [2, 3, 4], "c": [3, 4, 5]})
path = os.path.join(HERE, "data", "missing_folder", "does_not_exist." + fn_ext)
with pytest.raises(
error_class,
match=r"Cannot save file into a non-existent directory: .*missing_folder",
):
method(dummy_frame, path)
@pytest.mark.parametrize(
"reader, module, error_class, fn_ext",
[
(pd.read_csv, "os", FileNotFoundError, "csv"),
(pd.read_table, "os", FileNotFoundError, "csv"),
(pd.read_fwf, "os", FileNotFoundError, "txt"),
(pd.read_excel, "xlrd", FileNotFoundError, "xlsx"),
(pd.read_feather, "pyarrow", OSError, "feather"),
(pd.read_hdf, "tables", FileNotFoundError, "h5"),
(pd.read_stata, "os", FileNotFoundError, "dta"),
(pd.read_sas, "os", FileNotFoundError, "sas7bdat"),
(pd.read_json, "os", ValueError, "json"),
(pd.read_pickle, "os", FileNotFoundError, "pickle"),
],
)
def test_read_expands_user_home_dir(
self, reader, module, error_class, fn_ext, monkeypatch
):
pytest.importorskip(module)
path = os.path.join("~", "does_not_exist." + fn_ext)
monkeypatch.setattr(icom, "_expand_user", lambda x: os.path.join("foo", x))
msg1 = fr"File (b')?.+does_not_exist\.{fn_ext}'? does not exist"
msg2 = fr"\[Errno 2\] No such file or directory: '.+does_not_exist\.{fn_ext}'"
msg3 = "Unexpected character found when decoding 'false'"
msg4 = "path_or_buf needs to be a string file path or file-like"
msg5 = (
fr"\[Errno 2\] File .+does_not_exist\.{fn_ext} does not exist: "
fr"'.+does_not_exist\.{fn_ext}'"
)
msg6 = fr"\[Errno 2\] 没有那个文件或目录: '.+does_not_exist\.{fn_ext}'"
msg7 = (
fr"\[Errno 2\] File o directory non esistente: '.+does_not_exist\.{fn_ext}'"
)
msg8 = fr"Failed to open local file.+does_not_exist\.{fn_ext}"
with pytest.raises(
error_class,
match=fr"({msg1}|{msg2}|{msg3}|{msg4}|{msg5}|{msg6}|{msg7}|{msg8})",
):
reader(path)
@pytest.mark.parametrize(
"reader, module, path",
[
(pd.read_csv, "os", ("io", "data", "csv", "iris.csv")),
(pd.read_table, "os", ("io", "data", "csv", "iris.csv")),
(
pd.read_fwf,
"os",
("io", "data", "fixed_width", "fixed_width_format.txt"),
),
(pd.read_excel, "xlrd", ("io", "data", "excel", "test1.xlsx")),
(
pd.read_feather,
"pyarrow",
("io", "data", "feather", "feather-0_3_1.feather"),
),
(
pd.read_hdf,
"tables",
("io", "data", "legacy_hdf", "datetimetz_object.h5"),
),
(pd.read_stata, "os", ("io", "data", "stata", "stata10_115.dta")),
(pd.read_sas, "os", ("io", "sas", "data", "test1.sas7bdat")),
(pd.read_json, "os", ("io", "json", "data", "tsframe_v012.json")),
(
pd.read_pickle,
"os",
("io", "data", "pickle", "categorical.0.25.0.pickle"),
),
],
)
@pytest.mark.filterwarnings(
"ignore:CategoricalBlock is deprecated:DeprecationWarning"
)
@pytest.mark.filterwarnings( # pytables np.object usage
"ignore:`np.object` is a deprecated alias:DeprecationWarning"
)
def test_read_fspath_all(self, reader, module, path, datapath):
pytest.importorskip(module)
path = datapath(*path)
mypath = CustomFSPath(path)
result = reader(mypath)
expected = reader(path)
if path.endswith(".pickle"):
# categorical
tm.assert_categorical_equal(result, expected)
else:
tm.assert_frame_equal(result, expected)
@pytest.mark.filterwarnings("ignore:In future versions `DataFrame.to_latex`")
@pytest.mark.parametrize(
"writer_name, writer_kwargs, module",
[
("to_csv", {}, "os"),
("to_excel", {"engine": "xlwt"}, "xlwt"),
("to_feather", {}, "pyarrow"),
("to_html", {}, "os"),
("to_json", {}, "os"),
("to_latex", {}, "os"),
("to_pickle", {}, "os"),
("to_stata", {"time_stamp": pd.to_datetime("2019-01-01 00:00")}, "os"),
],
)
def test_write_fspath_all(self, writer_name, writer_kwargs, module):
p1 = tm.ensure_clean("string")
p2 = tm.ensure_clean("fspath")
df = pd.DataFrame({"A": [1, 2]})
with p1 as string, p2 as fspath:
pytest.importorskip(module)
mypath = CustomFSPath(fspath)
writer = getattr(df, writer_name)
writer(string, **writer_kwargs)
with open(string, "rb") as f:
expected = f.read()
writer(mypath, **writer_kwargs)
with open(fspath, "rb") as f:
result = f.read()
assert result == expected
@pytest.mark.filterwarnings( # pytables np.object usage
"ignore:`np.object` is a deprecated alias:DeprecationWarning"
)
def test_write_fspath_hdf5(self):
# Same test as write_fspath_all, except HDF5 files aren't
# have to read and compare equality
pytest.importorskip("tables")
df = pd.DataFrame({"A": [1, 2]})
p1 = tm.ensure_clean("string")
p2 = tm.ensure_clean("fspath")
with p1 as string, p2 as fspath:
mypath = CustomFSPath(fspath)
df.to_hdf(mypath, key="bar")
df.to_hdf(string, key="bar")
result = pd.read_hdf(fspath, key="bar")
expected = pd.read_hdf(string, key="bar")
tm.assert_frame_equal(result, expected)
@pytest.fixture
def mmap_file(datapath):
return datapath("io", "data", "csv", "test_mmap.csv")
class TestMMapWrapper:
def test_constructor_bad_file(self, mmap_file):
non_file = StringIO("I am not a file")
non_file.fileno = lambda: -1
# the error raised is different on Windows
if is_platform_windows():
msg = "The parameter is incorrect"
err = OSError
else:
msg = "[Errno 22]"
err = mmap.error
with pytest.raises(err, match=msg):
icom._MMapWrapper(non_file)
target = open(mmap_file)
target.close()
msg = "I/O operation on closed file"
with pytest.raises(ValueError, match=msg):
icom._MMapWrapper(target)
def test_get_attr(self, mmap_file):
with open(mmap_file) as target:
wrapper = icom._MMapWrapper(target)
attrs = dir(wrapper.mmap)
attrs = [attr for attr in attrs if not attr.startswith("__")]
attrs.append("__next__")
for attr in attrs:
assert hasattr(wrapper, attr)
assert not hasattr(wrapper, "foo")
def test_next(self, mmap_file):
with open(mmap_file) as target:
wrapper = icom._MMapWrapper(target)
lines = target.readlines()
for line in lines:
next_line = next(wrapper)
assert next_line.strip() == line.strip()
with pytest.raises(StopIteration, match=r"^$"):
next(wrapper)
def test_unknown_engine(self):
with tm.ensure_clean() as path:
df = tm.makeDataFrame()
df.to_csv(path)
with pytest.raises(ValueError, match="Unknown engine"):
pd.read_csv(path, engine="pyt")
def test_binary_mode(self):
with tm.ensure_clean() as path:
df = tm.makeDataFrame()
df.to_csv(path, mode="w+b")
tm.assert_frame_equal(df, pd.read_csv(path, index_col=0))
@pytest.mark.parametrize("encoding", ["utf-16", "utf-32"])
@pytest.mark.parametrize("compression_", ["bz2", "xz"])
def test_warning_missing_utf_bom(self, encoding, compression_):
df = tm.makeDataFrame()
with tm.ensure_clean() as path:
with tm.assert_produces_warning(UnicodeWarning):
df.to_csv(path, compression=compression_, encoding=encoding)
# reading should fail (otherwise we wouldn't need the warning)
msg = r"UTF-\d+ stream does not start with BOM"
with pytest.raises(UnicodeError, match=msg):
pd.read_csv(path, compression=compression_, encoding=encoding)
def test_is_fsspec_url():
assert icom.is_fsspec_url("gcs://pandas/somethingelse.com")
assert icom.is_fsspec_url("gs://pandas/somethingelse.com")
assert not icom.is_fsspec_url("http://pandas/somethingelse.com")
assert not icom.is_fsspec_url("random:pandas/somethingelse.com")
assert not icom.is_fsspec_url("/local/path")
assert not icom.is_fsspec_url("relative/local/path")
@pytest.mark.parametrize("encoding", [None, "utf-8"])
@pytest.mark.parametrize("format", ["csv", "json"])
def test_codecs_encoding(encoding, format):
expected = tm.makeDataFrame()
with tm.ensure_clean() as path:
with codecs.open(path, mode="w", encoding=encoding) as handle:
getattr(expected, f"to_{format}")(handle)
with codecs.open(path, mode="r", encoding=encoding) as handle:
if format == "csv":
df = pd.read_csv(handle, index_col=0)
else:
df = pd.read_json(handle)
tm.assert_frame_equal(expected, df)
def test_codecs_get_writer_reader():
expected = tm.makeDataFrame()
with tm.ensure_clean() as path:
with open(path, "wb") as handle:
with codecs.getwriter("utf-8")(handle) as encoded:
expected.to_csv(encoded)
with open(path, "rb") as handle:
with codecs.getreader("utf-8")(handle) as encoded:
df = pd.read_csv(encoded, index_col=0)
tm.assert_frame_equal(expected, df)
@pytest.mark.parametrize(
"io_class,mode,msg",
[
(BytesIO, "t", "a bytes-like object is required, not 'str'"),
(StringIO, "b", "string argument expected, got 'bytes'"),
],
)
def test_explicit_encoding(io_class, mode, msg):
expected = tm.makeDataFrame()
with io_class() as buffer:
with pytest.raises(TypeError, match=msg):
expected.to_csv(buffer, mode=f"w{mode}")
@pytest.mark.parametrize("encoding_errors", [None, "strict", "replace"])
@pytest.mark.parametrize("format", ["csv", "json"])
def test_encoding_errors(encoding_errors, format):
msg = "'utf-8' codec can't decode byte"
bad_encoding = b"\xe4"
if format == "csv":
return
content = bad_encoding + b"\n" + bad_encoding
reader = pd.read_csv
else:
content = (
b'{"'
+ bad_encoding * 2
+ b'": {"'
+ bad_encoding
+ b'":"'
+ bad_encoding
+ b'"}}'
)
reader = partial(pd.read_json, orient="index")
with tm.ensure_clean() as path:
file = Path(path)
file.write_bytes(content)
if encoding_errors != "replace":
with pytest.raises(UnicodeDecodeError, match=msg):
reader(path, encoding_errors=encoding_errors)
else:
df = reader(path, encoding_errors=encoding_errors)
decoded = bad_encoding.decode(errors=encoding_errors)
expected = pd.DataFrame({decoded: [decoded]}, index=[decoded * 2])
tm.assert_frame_equal(df, expected)
def test_bad_encdoing_errors():
# GH 39777
with tm.ensure_clean() as path:
with pytest.raises(LookupError, match="unknown error handler name"):
icom.get_handle(path, "w", errors="bad")
def test_errno_attribute():
# GH 13872
with pytest.raises(FileNotFoundError, match="\\[Errno 2\\]") as err:
pd.read_csv("doesnt_exist")
assert err.errno == errno.ENOENT
def test_fail_mmap():
with pytest.raises(UnsupportedOperation, match="fileno"):
with BytesIO() as buffer:
icom.get_handle(buffer, "rb", memory_map=True)
| true
| true
|
f718a539f818b3cbab4eb694387294e8a9cc035e
| 54,358
|
py
|
Python
|
python/ccxt/async_support/hitbtc2.py
|
OliverNChalk/ccxt
|
fcf55e88f3523d2969f905cbed3b4deec1433a5e
|
[
"MIT"
] | null | null | null |
python/ccxt/async_support/hitbtc2.py
|
OliverNChalk/ccxt
|
fcf55e88f3523d2969f905cbed3b4deec1433a5e
|
[
"MIT"
] | null | null | null |
python/ccxt/async_support/hitbtc2.py
|
OliverNChalk/ccxt
|
fcf55e88f3523d2969f905cbed3b4deec1433a5e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.hitbtc import hitbtc
import base64
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.decimal_to_precision import DECIMAL_PLACES
class hitbtc2 (hitbtc):
def describe(self):
return self.deep_extend(super(hitbtc2, self).describe(), {
'id': 'hitbtc2',
'name': 'HitBTC',
'countries': ['HK'],
'rateLimit': 1500,
'version': '2',
'has': {
'createDepositAddress': True,
'fetchDepositAddress': True,
'CORS': True,
'editOrder': True,
'fetchCurrencies': True,
'fetchOHLCV': True,
'fetchTickers': True,
'fetchOrder': True,
'fetchOrders': False,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchMyTrades': True,
'withdraw': True,
'fetchOrderTrades': False, # not implemented yet
'fetchDeposits': False,
'fetchWithdrawals': False,
'fetchTransactions': True,
'fetchTradingFee': True,
},
'timeframes': {
'1m': 'M1',
'3m': 'M3',
'5m': 'M5',
'15m': 'M15',
'30m': 'M30', # default
'1h': 'H1',
'4h': 'H4',
'1d': 'D1',
'1w': 'D7',
'1M': '1M',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766555-8eaec20e-5edc-11e7-9c5b-6dc69fc42f5e.jpg',
'api': 'https://api.hitbtc.com',
'www': 'https://hitbtc.com',
'referral': 'https://hitbtc.com/?ref_id=5a5d39a65d466',
'doc': [
'https://api.hitbtc.com',
'https://github.com/hitbtc-com/hitbtc-api/blob/master/APIv2.md',
],
'fees': [
'https://hitbtc.com/fees-and-limits',
'https://support.hitbtc.com/hc/en-us/articles/115005148605-Fees-and-limits',
],
},
'api': {
'public': {
'get': [
'symbol', # Available Currency Symbols
'symbol/{symbol}', # Get symbol info
'currency', # Available Currencies
'currency/{currency}', # Get currency info
'ticker', # Ticker list for all symbols
'ticker/{symbol}', # Ticker for symbol
'trades/{symbol}', # Trades
'orderbook/{symbol}', # Orderbook
'candles/{symbol}', # Candles
],
},
'private': {
'get': [
'order', # List your current open orders
'order/{clientOrderId}', # Get a single order by clientOrderId
'trading/balance', # Get trading balance
'trading/fee/all', # Get trading fee rate
'trading/fee/{symbol}', # Get trading fee rate
'history/trades', # Get historical trades
'history/order', # Get historical orders
'history/order/{id}/trades', # Get historical trades by specified order
'account/balance', # Get main acccount balance
'account/transactions', # Get account transactions
'account/transactions/{id}', # Get account transaction by id
'account/crypto/address/{currency}', # Get deposit crypro address
],
'post': [
'order', # Create new order
'account/crypto/withdraw', # Withdraw crypro
'account/crypto/address/{currency}', # Create new deposit crypro address
'account/transfer', # Transfer amount to trading
],
'put': [
'order/{clientOrderId}', # Create new order
'account/crypto/withdraw/{id}', # Commit withdraw crypro
],
'delete': [
'order', # Cancel all open orders
'order/{clientOrderId}', # Cancel order
'account/crypto/withdraw/{id}', # Rollback withdraw crypro
],
'patch': [
'order/{clientOrderId}', # Cancel Replace order
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.1 / 100,
'taker': 0.2 / 100,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'BTC': 0.001,
'BCC': 0.0018,
'ETH': 0.00958,
'BCH': 0.0018,
'USDT': 100,
'DASH': 0.03,
'BTG': 0.0005,
'XRP': 0.509,
'LTC': 0.003,
'ZEC': 0.0001,
'XMR': 0.09,
'1ST': 0.84,
'ADX': 5.7,
'AE': 6.7,
'AEON': 0.01006,
'AIR': 565,
'AMM': 14,
'AMP': 342,
'ANT': 6.7,
'ARDR': 1,
'ARN': 18.5,
'ART': 26,
'ATB': 0.0004,
'ATL': 27,
'ATM': 504,
'ATS': 860,
'AVT': 1.9,
'BAS': 113,
'BCN': 0.1,
'BET': 124,
'BKB': 46,
'BMC': 32,
'BMT': 100,
'BNT': 2.57,
'BQX': 4.7,
'BTCA': 351.21,
'BTM': 40,
'BTX': 0.04,
'BUS': 0.004,
'CAPP': 97,
'CCT': 6,
'CDT': 100,
'CDX': 30,
'CFI': 61,
'CL': 13.85,
'CLD': 0.88,
'CND': 574,
'CNX': 0.04,
'COSS': 65,
'CPAY': 5.487,
'CSNO': 16,
'CTR': 15,
'CTX': 146,
'CVC': 8.46,
'DATA': 12.949,
'DBIX': 0.0168,
'DCN': 1280,
'DCT': 0.02,
'DDF': 342,
'DENT': 1000,
'DGB': 0.4,
'DGD': 0.01,
'DICE': 0.32,
'DLT': 0.26,
'DNT': 0.21,
'DOGE': 2,
'DOV': 34,
'DRPU': 24,
'DRT': 240,
'DSH': 0.017,
'EBET': 84,
'EBTC': 20,
'EBTCOLD': 6.6,
'ECAT': 14,
'EDG': 2,
'EDO': 2.9,
'EKO': 1136.36,
'ELE': 0.00172,
'ELM': 0.004,
'EMC': 0.03,
'MGO': 14,
'ENJ': 163,
'EOS': 1.5,
'ERO': 34,
'ETBS': 15,
'ETC': 0.002,
'ETP': 0.004,
'EVX': 5.4,
'EXN': 456,
'FCN': 0.000005,
'FRD': 65,
'FUEL': 123.00105,
'FUN': 202.9598309,
'FYN': 1.849,
'FYP': 66.13,
'GAME': 0.004,
'GNO': 0.0034,
'GUP': 4,
'GVT': 1.2,
'HSR': 0.04,
'HAC': 144,
'HDG': 7,
'HGT': 1082,
'HPC': 0.4,
'HVN': 120,
'ICN': 0.55,
'ICO': 34,
'ICOS': 0.35,
'IND': 76,
'INDI': 790,
'ITS': 15.0012,
'IXT': 11,
'KBR': 143,
'KICK': 112,
'KMD': 4,
'LA': 41,
'LEND': 388,
'LAT': 1.44,
'LIFE': 13000,
'LRC': 27,
'LSK': 0.3,
'LOC': 11.076,
'LUN': 0.34,
'MAID': 5,
'MANA': 143,
'MCAP': 5.44,
'MIPS': 43,
'MNE': 1.33,
'MSP': 121,
'MCO': 0.357,
'MTH': 92,
'MYB': 3.9,
'NDC': 165,
'NEBL': 0.04,
'NET': 3.96,
'NTO': 998,
'NGC': 2.368,
'NXC': 13.39,
'NXT': 3,
'OAX': 15,
'ODN': 0.004,
'OMG': 2,
'OPT': 335,
'ORME': 2.8,
'OTN': 0.57,
'PAY': 3.1,
'PIX': 96,
'PLBT': 0.33,
'PLR': 114,
'PLU': 0.87,
'POE': 784,
'POLL': 3.5,
'PPT': 2,
'PRE': 32,
'PRG': 39,
'PRO': 41,
'PRS': 60,
'PTOY': 0.5,
'QAU': 63,
'QCN': 0.03,
'QTUM': 0.04,
'QVT': 64,
'REP': 0.02,
'RKC': 15,
'RLC': 1.21,
'RVT': 14,
'SC': 30,
'SAN': 2.24,
'SBD': 0.03,
'SCL': 2.6,
'SISA': 1640,
'SKIN': 407,
'SWFTC': 352.94,
'SMART': 0.4,
'SMS': 0.0375,
'SNC': 36,
'SNGLS': 4,
'SNM': 48,
'SNT': 233,
'STAR': 0.144,
'STORM': 153.19,
'STEEM': 0.01,
'STRAT': 0.01,
'SPF': 14.4,
'STU': 14,
'STX': 11,
'SUB': 17,
'SUR': 3,
'SWT': 0.51,
'TAAS': 0.91,
'TBT': 2.37,
'TFL': 15,
'TIME': 0.03,
'TIX': 7.1,
'TKN': 1,
'TGT': 173,
'TKR': 84,
'TNT': 90,
'TRST': 1.6,
'TRX': 270,
'UET': 480,
'UGT': 15,
'UTT': 3,
'VEN': 14,
'VERI': 0.037,
'VIB': 50,
'VIBE': 145,
'VOISE': 618,
'WEALTH': 0.0168,
'WINGS': 2.4,
'WTC': 0.75,
'WRC': 48,
'XAUR': 3.23,
'XDN': 0.01,
'XEM': 15,
'XUC': 0.9,
'YOYOW': 140,
'ZAP': 24,
'ZRX': 23,
'ZSC': 191,
},
'deposit': {
'BTC': 0,
'ETH': 0,
'BCH': 0,
'USDT': 0,
'BTG': 0,
'LTC': 0,
'ZEC': 0,
'XMR': 0,
'1ST': 0,
'ADX': 0,
'AE': 0,
'AEON': 0,
'AIR': 0,
'AMP': 0,
'ANT': 0,
'ARDR': 0,
'ARN': 0,
'ART': 0,
'ATB': 0,
'ATL': 0,
'ATM': 0,
'ATS': 0,
'AVT': 0,
'BAS': 0,
'BCN': 0,
'BET': 0,
'BKB': 0,
'BMC': 0,
'BMT': 0,
'BNT': 0,
'BQX': 0,
'BTM': 0,
'BTX': 0,
'BUS': 0,
'CCT': 0,
'CDT': 0,
'CDX': 0,
'CFI': 0,
'CLD': 0,
'CND': 0,
'CNX': 0,
'COSS': 0,
'CSNO': 0,
'CTR': 0,
'CTX': 0,
'CVC': 0,
'DBIX': 0,
'DCN': 0,
'DCT': 0,
'DDF': 0,
'DENT': 0,
'DGB': 0,
'DGD': 0,
'DICE': 0,
'DLT': 0,
'DNT': 0,
'DOGE': 0,
'DOV': 0,
'DRPU': 0,
'DRT': 0,
'DSH': 0,
'EBET': 0,
'EBTC': 0,
'EBTCOLD': 0,
'ECAT': 0,
'EDG': 0,
'EDO': 0,
'ELE': 0,
'ELM': 0,
'EMC': 0,
'EMGO': 0,
'ENJ': 0,
'EOS': 0,
'ERO': 0,
'ETBS': 0,
'ETC': 0,
'ETP': 0,
'EVX': 0,
'EXN': 0,
'FRD': 0,
'FUEL': 0,
'FUN': 0,
'FYN': 0,
'FYP': 0,
'GNO': 0,
'GUP': 0,
'GVT': 0,
'HAC': 0,
'HDG': 0,
'HGT': 0,
'HPC': 0,
'HVN': 0,
'ICN': 0,
'ICO': 0,
'ICOS': 0,
'IND': 0,
'INDI': 0,
'ITS': 0,
'IXT': 0,
'KBR': 0,
'KICK': 0,
'LA': 0,
'LAT': 0,
'LIFE': 0,
'LRC': 0,
'LSK': 0,
'LUN': 0,
'MAID': 0,
'MANA': 0,
'MCAP': 0,
'MIPS': 0,
'MNE': 0,
'MSP': 0,
'MTH': 0,
'MYB': 0,
'NDC': 0,
'NEBL': 0,
'NET': 0,
'NTO': 0,
'NXC': 0,
'NXT': 0,
'OAX': 0,
'ODN': 0,
'OMG': 0,
'OPT': 0,
'ORME': 0,
'OTN': 0,
'PAY': 0,
'PIX': 0,
'PLBT': 0,
'PLR': 0,
'PLU': 0,
'POE': 0,
'POLL': 0,
'PPT': 0,
'PRE': 0,
'PRG': 0,
'PRO': 0,
'PRS': 0,
'PTOY': 0,
'QAU': 0,
'QCN': 0,
'QTUM': 0,
'QVT': 0,
'REP': 0,
'RKC': 0,
'RVT': 0,
'SAN': 0,
'SBD': 0,
'SCL': 0,
'SISA': 0,
'SKIN': 0,
'SMART': 0,
'SMS': 0,
'SNC': 0,
'SNGLS': 0,
'SNM': 0,
'SNT': 0,
'STEEM': 0,
'STRAT': 0,
'STU': 0,
'STX': 0,
'SUB': 0,
'SUR': 0,
'SWT': 0,
'TAAS': 0,
'TBT': 0,
'TFL': 0,
'TIME': 0,
'TIX': 0,
'TKN': 0,
'TKR': 0,
'TNT': 0,
'TRST': 0,
'TRX': 0,
'UET': 0,
'UGT': 0,
'VEN': 0,
'VERI': 0,
'VIB': 0,
'VIBE': 0,
'VOISE': 0,
'WEALTH': 0,
'WINGS': 0,
'WTC': 0,
'XAUR': 0,
'XDN': 0,
'XEM': 0,
'XUC': 0,
'YOYOW': 0,
'ZAP': 0,
'ZRX': 0,
'ZSC': 0,
},
},
},
'options': {
'defaultTimeInForce': 'FOK',
},
'exceptions': {
'1003': PermissionDenied, # "Action is forbidden for self API key"
'2010': InvalidOrder, # "Quantity not a valid number"
'2011': InvalidOrder, # "Quantity too low"
'2020': InvalidOrder, # "Price not a valid number"
'20002': OrderNotFound, # canceling non-existent order
'20001': InsufficientFunds,
},
})
def fee_to_precision(self, symbol, fee):
return self.decimal_to_precision(fee, TRUNCATE, 8, DECIMAL_PLACES)
async def fetch_markets(self, params={}):
response = await self.publicGetSymbol(params)
result = []
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'baseCurrency')
quoteId = self.safe_string(market, 'quoteCurrency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
lot = self.safe_float(market, 'quantityIncrement')
step = self.safe_float(market, 'tickSize')
precision = {
'price': self.precision_from_string(market['tickSize']),
# FIXME: for lots > 1 the following line returns 0
# 'amount': self.precision_from_string(market['quantityIncrement']),
'amount': -1 * int(math.log10(lot)),
}
taker = self.safe_float(market, 'takeLiquidityRate')
maker = self.safe_float(market, 'provideLiquidityRate')
result.append(self.extend(self.fees['trading'], {
'info': market,
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': True,
'taker': taker,
'maker': maker,
'precision': precision,
'limits': {
'amount': {
'min': lot,
'max': None,
},
'price': {
'min': step,
'max': None,
},
'cost': {
'min': lot * step,
'max': None,
},
},
}))
return result
async def fetch_currencies(self, params={}):
response = await self.publicGetCurrency(params)
result = {}
for i in range(0, len(response)):
currency = response[i]
id = self.safe_string(currency, 'id')
# todo: will need to rethink the fees
# to add support for multiple withdrawal/deposit methods and
# differentiated fees for each particular method
precision = 8 # default precision, todo: fix "magic constants"
code = self.safe_currency_code(id)
payin = self.safe_value(currency, 'payinEnabled')
payout = self.safe_value(currency, 'payoutEnabled')
transfer = self.safe_value(currency, 'transferEnabled')
active = payin and payout and transfer
if 'disabled' in currency:
if currency['disabled']:
active = False
type = 'fiat'
if ('crypto' in list(currency.keys())) and currency['crypto']:
type = 'crypto'
name = self.safe_string(currency, 'fullName')
result[code] = {
'id': id,
'code': code,
'type': type,
'payin': payin,
'payout': payout,
'transfer': transfer,
'info': currency,
'name': name,
'active': active,
'fee': self.safe_float(currency, 'payoutFee'), # todo: redesign
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'price': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'cost': {
'min': None,
'max': None,
},
'withdraw': {
'min': None,
'max': math.pow(10, precision),
},
},
}
return result
async def fetch_trading_fee(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = self.extend({
'symbol': market['id'],
}, self.omit(params, 'symbol'))
response = await self.privateGetTradingFeeSymbol(request)
#
# {
# takeLiquidityRate: '0.001',
# provideLiquidityRate: '-0.0001'
# }
#
return {
'info': response,
'maker': self.safe_float(response, 'provideLiquidityRate'),
'taker': self.safe_float(response, 'takeLiquidityRate'),
}
async def fetch_balance(self, params={}):
await self.load_markets()
type = self.safe_string(params, 'type', 'trading')
method = 'privateGet' + self.capitalize(type) + 'Balance'
query = self.omit(params, 'type')
response = await getattr(self, method)(query)
result = {'info': response}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_float(balance, 'available')
account['used'] = self.safe_float(balance, 'reserved')
result[code] = account
return self.parse_balance(result)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1d', since=None, limit=None):
timestamp = self.parse8601(ohlcv['timestamp'])
return [
timestamp,
float(ohlcv['open']),
float(ohlcv['max']),
float(ohlcv['min']),
float(ohlcv['close']),
float(ohlcv['volume']),
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'period': self.timeframes[timeframe],
}
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit
response = await self.publicGetCandlesSymbol(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
request = {
'symbol': self.market_id(symbol),
}
if limit is not None:
request['limit'] = limit # default = 100, 0 = unlimited
response = await self.publicGetOrderbookSymbol(self.extend(request, params))
return self.parse_order_book(response, None, 'bid', 'ask', 'price', 'size')
def parse_ticker(self, ticker, market=None):
timestamp = self.parse8601(ticker['timestamp'])
symbol = None
if market is not None:
symbol = market['symbol']
baseVolume = self.safe_float(ticker, 'volume')
quoteVolume = self.safe_float(ticker, 'volumeQuote')
open = self.safe_float(ticker, 'open')
last = self.safe_float(ticker, 'last')
change = None
percentage = None
average = None
if last is not None and open is not None:
change = last - open
average = self.sum(last, open) / 2
if open > 0:
percentage = change / open * 100
vwap = None
if quoteVolume is not None:
if baseVolume is not None:
if baseVolume > 0:
vwap = quoteVolume / baseVolume
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'bid'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'ask'),
'askVolume': None,
'vwap': vwap,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': average,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
response = await self.publicGetTicker(params)
result = {}
for i in range(0, len(response)):
ticker = response[i]
marketId = self.safe_string(ticker, 'symbol')
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
result[symbol] = self.parse_ticker(ticker, market)
else:
result[marketId] = self.parse_ticker(ticker)
return result
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.publicGetTickerSymbol(self.extend(request, params))
if 'message' in response:
raise ExchangeError(self.id + ' ' + response['message'])
return self.parse_ticker(response, market)
def parse_trade(self, trade, market=None):
#
# createMarketOrder
#
# { fee: "0.0004644",
# id: 386394956,
# price: "0.4644",
# quantity: "1",
# timestamp: "2018-10-25T16:41:44.780Z"}
#
# fetchTrades ...
#
# fetchMyTrades ...
#
timestamp = self.parse8601(trade['timestamp'])
symbol = None
marketId = self.safe_string(trade, 'symbol')
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
symbol = marketId
if symbol is None:
if market is not None:
symbol = market['symbol']
fee = None
feeCost = self.safe_float(trade, 'fee')
if feeCost is not None:
feeCurrency = market['quote'] if market else None
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
orderId = self.safe_string(trade, 'clientOrderId')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'quantity')
cost = price * amount
side = self.safe_string(trade, 'side')
id = self.safe_string(trade, 'id')
return {
'info': trade,
'id': id,
'order': orderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
async def fetch_transactions(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
currency = None
request = {}
if code is not None:
currency = self.currency(code)
request['asset'] = currency['id']
if since is not None:
request['startTime'] = since
response = await self.privateGetAccountTransactions(self.extend(request, params))
return self.parseTransactions(response, currency, since, limit)
def parse_transaction(self, transaction, currency=None):
#
# {
# id: 'd53ee9df-89bf-4d09-886e-849f8be64647',
# index: 1044718371,
# type: 'payout',
# status: 'success',
# currency: 'ETH',
# amount: '4.522683200000000000000000',
# createdAt: '2018-06-07T00:43:32.426Z',
# updatedAt: '2018-06-07T00:45:36.447Z',
# hash: '0x973e5683dfdf80a1fb1e0b96e19085b6489221d2ddf864daa46903c5ec283a0f',
# address: '0xC5a59b21948C1d230c8C54f05590000Eb3e1252c',
# fee: '0.00958',
# },
# {
# id: 'e6c63331-467e-4922-9edc-019e75d20ba3',
# index: 1044714672,
# type: 'exchangeToBank',
# status: 'success',
# currency: 'ETH',
# amount: '4.532263200000000000',
# createdAt: '2018-06-07T00:42:39.543Z',
# updatedAt: '2018-06-07T00:42:39.683Z',
# },
# {
# id: '3b052faa-bf97-4636-a95c-3b5260015a10',
# index: 1009280164,
# type: 'bankToExchange',
# status: 'success',
# currency: 'CAS',
# amount: '104797.875800000000000000',
# createdAt: '2018-05-19T02:34:36.750Z',
# updatedAt: '2018-05-19T02:34:36.857Z',
# },
# {
# id: 'd525249f-7498-4c81-ba7b-b6ae2037dc08',
# index: 1009279948,
# type: 'payin',
# status: 'success',
# currency: 'CAS',
# amount: '104797.875800000000000000',
# createdAt: '2018-05-19T02:30:16.698Z',
# updatedAt: '2018-05-19T02:34:28.159Z',
# hash: '0xa6530e1231de409cf1f282196ed66533b103eac1df2aa4a7739d56b02c5f0388',
# address: '0xd53ed559a6d963af7cb3f3fcd0e7ca499054db8b',
# }
#
# {
# "id": "4f351f4f-a8ee-4984-a468-189ed590ddbd",
# "index": 3112719565,
# "type": "withdraw",
# "status": "success",
# "currency": "BCHOLD",
# "amount": "0.02423133",
# "createdAt": "2019-07-16T16:52:04.494Z",
# "updatedAt": "2019-07-16T16:54:07.753Z"
# }
id = self.safe_string(transaction, 'id')
timestamp = self.parse8601(self.safe_string(transaction, 'createdAt'))
updated = self.parse8601(self.safe_string(transaction, 'updatedAt'))
currencyId = self.safe_string(transaction, 'currency')
code = self.safe_currency_code(currencyId, currency)
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
amount = self.safe_float(transaction, 'amount')
address = self.safe_string(transaction, 'address')
txid = self.safe_string(transaction, 'hash')
fee = None
feeCost = self.safe_float(transaction, 'fee')
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': code,
}
type = self.parse_transaction_type(self.safe_string(transaction, 'type'))
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'tag': None,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
def parse_transaction_status(self, status):
statuses = {
'pending': 'pending',
'failed': 'failed',
'success': 'ok',
}
return self.safe_string(statuses, status, status)
def parse_transaction_type(self, type):
types = {
'payin': 'deposit',
'payout': 'withdrawal',
'withdraw': 'withdrawal',
}
return self.safe_string(types, type, type)
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit
if since is not None:
request['sort'] = 'ASC'
request['from'] = self.iso8601(since)
response = await self.publicGetTradesSymbol(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
# their max accepted length is 32 characters
uuid = self.uuid()
parts = uuid.split('-')
clientOrderId = ''.join(parts)
clientOrderId = clientOrderId[0:32]
amount = float(amount)
request = {
'clientOrderId': clientOrderId,
'symbol': market['id'],
'side': side,
'quantity': self.amount_to_precision(symbol, amount),
'type': type,
}
if type == 'limit':
request['price'] = self.price_to_precision(symbol, price)
else:
request['timeInForce'] = self.options['defaultTimeInForce']
response = await self.privatePostOrder(self.extend(request, params))
order = self.parse_order(response)
if order['status'] == 'rejected':
raise InvalidOrder(self.id + ' order was rejected by the exchange ' + self.json(order))
id = order['id']
self.orders[id] = order
return order
async def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
await self.load_markets()
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
# their max accepted length is 32 characters
uuid = self.uuid()
parts = uuid.split('-')
requestClientId = ''.join(parts)
requestClientId = requestClientId[0:32]
request = {
'clientOrderId': id,
'requestClientId': requestClientId,
}
if amount is not None:
request['quantity'] = self.amount_to_precision(symbol, amount)
if price is not None:
request['price'] = self.price_to_precision(symbol, price)
response = await self.privatePatchOrderClientOrderId(self.extend(request, params))
order = self.parse_order(response)
self.orders[order['id']] = order
return order
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
request = {
'clientOrderId': id,
}
response = await self.privateDeleteOrderClientOrderId(self.extend(request, params))
return self.parse_order(response)
def parse_order_status(self, status):
statuses = {
'new': 'open',
'suspended': 'open',
'partiallyFilled': 'open',
'filled': 'closed',
'canceled': 'canceled',
'expired': 'failed',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createMarketOrder
#
# {clientOrderId: "fe36aa5e190149bf9985fb673bbb2ea0",
# createdAt: "2018-10-25T16:41:44.780Z",
# cumQuantity: "1",
# id: "66799540063",
# quantity: "1",
# side: "sell",
# status: "filled",
# symbol: "XRPUSDT",
# timeInForce: "FOK",
# tradesReport: [{ fee: "0.0004644",
# id: 386394956,
# price: "0.4644",
# quantity: "1",
# timestamp: "2018-10-25T16:41:44.780Z"}],
# type: "market",
# updatedAt: "2018-10-25T16:41:44.780Z" }
#
created = self.parse8601(self.safe_string(order, 'createdAt'))
updated = self.parse8601(self.safe_string(order, 'updatedAt'))
marketId = self.safe_string(order, 'symbol')
symbol = None
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
symbol = marketId
if symbol is None:
if market is not None:
symbol = market['id']
amount = self.safe_float(order, 'quantity')
filled = self.safe_float(order, 'cumQuantity')
status = self.parse_order_status(self.safe_string(order, 'status'))
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
id = self.safe_string(order, 'clientOrderId')
price = self.safe_float(order, 'price')
if price is None:
if id in self.orders:
price = self.orders[id]['price']
remaining = None
cost = None
if amount is not None:
if filled is not None:
remaining = amount - filled
if price is not None:
cost = filled * price
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
trades = self.safe_value(order, 'tradesReport')
fee = None
average = None
if trades is not None:
trades = self.parse_trades(trades, market)
feeCost = None
numTrades = len(trades)
tradesCost = 0
for i in range(0, numTrades):
if feeCost is None:
feeCost = 0
tradesCost = self.sum(tradesCost, trades[i]['cost'])
feeCost = self.sum(feeCost, trades[i]['fee']['cost'])
cost = tradesCost
if (filled is not None) and (filled > 0):
average = cost / filled
if type == 'market':
if price is None:
price = average
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': market['quote'],
}
return {
'id': id,
'timestamp': created,
'datetime': self.iso8601(created),
'lastTradeTimestamp': updated,
'status': status,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'average': average,
'amount': amount,
'cost': cost,
'filled': filled,
'remaining': remaining,
'fee': fee,
'trades': trades,
'info': order,
}
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
request = {
'clientOrderId': id,
}
response = await self.privateGetHistoryOrder(self.extend(request, params))
numOrders = len(response)
if numOrders > 0:
return self.parse_order(response[0])
raise OrderNotFound(self.id + ' order ' + id + ' not found')
async def fetch_open_order(self, id, symbol=None, params={}):
await self.load_markets()
# we use clientOrderId as the order id with HitBTC intentionally
# because most of their endpoints will require clientOrderId
# explained here: https://github.com/ccxt/ccxt/issues/5674
request = {
'clientOrderId': id,
}
response = await self.privateGetOrderClientOrderId(self.extend(request, params))
return self.parse_order(response)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
request = {}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
response = await self.privateGetOrder(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
request = {}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['limit'] = limit
if since is not None:
request['from'] = self.iso8601(since)
response = await self.privateGetHistoryOrder(self.extend(request, params))
parsedOrders = self.parse_orders(response, market)
orders = []
for i in range(0, len(parsedOrders)):
order = parsedOrders[i]
status = order['status']
if (status == 'closed') or (status == 'canceled'):
orders.append(order)
return self.filter_by_since_limit(orders, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'symbol': 'BTC/USD', # optional
# 'sort': 'DESC', # or 'ASC'
# 'by': 'timestamp', # or 'id' String timestamp by default, or id
# 'from': 'Datetime or Number', # ISO 8601
# 'till': 'Datetime or Number',
# 'limit': 100,
# 'offset': 0,
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit
response = await self.privateGetHistoryTrades(self.extend(request, params))
#
# [
# {
# "id": 9535486,
# "clientOrderId": "f8dbaab336d44d5ba3ff578098a68454",
# "orderId": 816088377,
# "symbol": "ETHBTC",
# "side": "sell",
# "quantity": "0.061",
# "price": "0.045487",
# "fee": "0.000002775",
# "timestamp": "2017-05-17T12:32:57.848Z"
# },
# {
# "id": 9535437,
# "clientOrderId": "27b9bfc068b44194b1f453c7af511ed6",
# "orderId": 816088021,
# "symbol": "ETHBTC",
# "side": "buy",
# "quantity": "0.038",
# "price": "0.046000",
# "fee": "-0.000000174",
# "timestamp": "2017-05-17T12:30:57.848Z"
# }
# ]
#
return self.parse_trades(response, market, since, limit)
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
# The id needed here is the exchange's id, and not the clientOrderID,
# which is the id that is stored in the unified order id
# To get the exchange's id you need to grab it from order['info']['id']
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'id': id,
}
response = await self.privateGetHistoryOrderIdTrades(self.extend(request, params))
numOrders = len(response)
if numOrders > 0:
return self.parse_trades(response, market, since, limit)
raise OrderNotFound(self.id + ' order ' + id + ' not found, ' + self.id + '.fetchOrderTrades() requires an exchange-specific order id, you need to grab it from order["info"]["id"]')
async def create_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
}
response = await self.privatePostAccountCryptoAddressCurrency(self.extend(request, params))
address = self.safe_string(response, 'address')
self.check_address(address)
tag = self.safe_string(response, 'paymentId')
return {
'currency': currency,
'address': address,
'tag': tag,
'info': response,
}
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
}
response = await self.privateGetAccountCryptoAddressCurrency(self.extend(request, params))
address = self.safe_string(response, 'address')
self.check_address(address)
tag = self.safe_string(response, 'paymentId')
return {
'currency': currency['code'],
'address': address,
'tag': tag,
'info': response,
}
async def withdraw(self, code, amount, address, tag=None, params={}):
await self.load_markets()
self.check_address(address)
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': float(amount),
'address': address,
}
if tag:
request['paymentId'] = tag
response = await self.privatePostAccountCryptoWithdraw(self.extend(request, params))
return {
'info': response,
'id': response['id'],
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = '/api/' + self.version + '/'
query = self.omit(params, self.extract_params(path))
if api == 'public':
url += api + '/' + self.implode_params(path, params)
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
url += self.implode_params(path, params)
if method == 'GET':
if query:
url += '?' + self.urlencode(query)
elif query:
body = self.json(query)
payload = self.encode(self.apiKey + ':' + self.secret)
auth = base64.b64encode(payload)
headers = {
'Authorization': 'Basic ' + self.decode(auth),
'Content-Type': 'application/json',
}
url = self.urls['api'] + url
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return
if code >= 400:
feedback = self.id + ' ' + body
# {"code":504,"message":"Gateway Timeout","description":""}
if (code == 503) or (code == 504):
raise ExchangeNotAvailable(feedback)
# {"error":{"code":20002,"message":"Order not found","description":""}}
if body[0] == '{':
if 'error' in response:
code = self.safe_string(response['error'], 'code')
exceptions = self.exceptions
if code in exceptions:
raise exceptions[code](feedback)
message = self.safe_string(response['error'], 'message')
if message == 'Duplicate clientOrderId':
raise InvalidOrder(feedback)
raise ExchangeError(feedback)
| 38.910523
| 189
| 0.411237
|
rt.hitbtc import hitbtc
import base64
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.decimal_to_precision import DECIMAL_PLACES
class hitbtc2 (hitbtc):
def describe(self):
return self.deep_extend(super(hitbtc2, self).describe(), {
'id': 'hitbtc2',
'name': 'HitBTC',
'countries': ['HK'],
'rateLimit': 1500,
'version': '2',
'has': {
'createDepositAddress': True,
'fetchDepositAddress': True,
'CORS': True,
'editOrder': True,
'fetchCurrencies': True,
'fetchOHLCV': True,
'fetchTickers': True,
'fetchOrder': True,
'fetchOrders': False,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchMyTrades': True,
'withdraw': True,
'fetchOrderTrades': False,
'fetchDeposits': False,
'fetchWithdrawals': False,
'fetchTransactions': True,
'fetchTradingFee': True,
},
'timeframes': {
'1m': 'M1',
'3m': 'M3',
'5m': 'M5',
'15m': 'M15',
'30m': 'M30',
'1h': 'H1',
'4h': 'H4',
'1d': 'D1',
'1w': 'D7',
'1M': '1M',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766555-8eaec20e-5edc-11e7-9c5b-6dc69fc42f5e.jpg',
'api': 'https://api.hitbtc.com',
'www': 'https://hitbtc.com',
'referral': 'https://hitbtc.com/?ref_id=5a5d39a65d466',
'doc': [
'https://api.hitbtc.com',
'https://github.com/hitbtc-com/hitbtc-api/blob/master/APIv2.md',
],
'fees': [
'https://hitbtc.com/fees-and-limits',
'https://support.hitbtc.com/hc/en-us/articles/115005148605-Fees-and-limits',
],
},
'api': {
'public': {
'get': [
'symbol',
'symbol/{symbol}',
'currency',
'currency/{currency}',
'ticker',
'ticker/{symbol}',
'trades/{symbol}',
'orderbook/{symbol}',
'candles/{symbol}',
],
},
'private': {
'get': [
'order',
'order/{clientOrderId}',
'trading/balance',
'trading/fee/all',
'trading/fee/{symbol}',
'history/trades',
'history/order',
'history/order/{id}/trades',
'account/balance',
'account/transactions',
'account/transactions/{id}',
'account/crypto/address/{currency}',
],
'post': [
'order',
'account/crypto/withdraw',
'account/crypto/address/{currency}',
'account/transfer',
],
'put': [
'order/{clientOrderId}',
'account/crypto/withdraw/{id}',
],
'delete': [
'order',
'order/{clientOrderId}',
'account/crypto/withdraw/{id}',
],
'patch': [
'order/{clientOrderId}',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.1 / 100,
'taker': 0.2 / 100,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'BTC': 0.001,
'BCC': 0.0018,
'ETH': 0.00958,
'BCH': 0.0018,
'USDT': 100,
'DASH': 0.03,
'BTG': 0.0005,
'XRP': 0.509,
'LTC': 0.003,
'ZEC': 0.0001,
'XMR': 0.09,
'1ST': 0.84,
'ADX': 5.7,
'AE': 6.7,
'AEON': 0.01006,
'AIR': 565,
'AMM': 14,
'AMP': 342,
'ANT': 6.7,
'ARDR': 1,
'ARN': 18.5,
'ART': 26,
'ATB': 0.0004,
'ATL': 27,
'ATM': 504,
'ATS': 860,
'AVT': 1.9,
'BAS': 113,
'BCN': 0.1,
'BET': 124,
'BKB': 46,
'BMC': 32,
'BMT': 100,
'BNT': 2.57,
'BQX': 4.7,
'BTCA': 351.21,
'BTM': 40,
'BTX': 0.04,
'BUS': 0.004,
'CAPP': 97,
'CCT': 6,
'CDT': 100,
'CDX': 30,
'CFI': 61,
'CL': 13.85,
'CLD': 0.88,
'CND': 574,
'CNX': 0.04,
'COSS': 65,
'CPAY': 5.487,
'CSNO': 16,
'CTR': 15,
'CTX': 146,
'CVC': 8.46,
'DATA': 12.949,
'DBIX': 0.0168,
'DCN': 1280,
'DCT': 0.02,
'DDF': 342,
'DENT': 1000,
'DGB': 0.4,
'DGD': 0.01,
'DICE': 0.32,
'DLT': 0.26,
'DNT': 0.21,
'DOGE': 2,
'DOV': 34,
'DRPU': 24,
'DRT': 240,
'DSH': 0.017,
'EBET': 84,
'EBTC': 20,
'EBTCOLD': 6.6,
'ECAT': 14,
'EDG': 2,
'EDO': 2.9,
'EKO': 1136.36,
'ELE': 0.00172,
'ELM': 0.004,
'EMC': 0.03,
'MGO': 14,
'ENJ': 163,
'EOS': 1.5,
'ERO': 34,
'ETBS': 15,
'ETC': 0.002,
'ETP': 0.004,
'EVX': 5.4,
'EXN': 456,
'FCN': 0.000005,
'FRD': 65,
'FUEL': 123.00105,
'FUN': 202.9598309,
'FYN': 1.849,
'FYP': 66.13,
'GAME': 0.004,
'GNO': 0.0034,
'GUP': 4,
'GVT': 1.2,
'HSR': 0.04,
'HAC': 144,
'HDG': 7,
'HGT': 1082,
'HPC': 0.4,
'HVN': 120,
'ICN': 0.55,
'ICO': 34,
'ICOS': 0.35,
'IND': 76,
'INDI': 790,
'ITS': 15.0012,
'IXT': 11,
'KBR': 143,
'KICK': 112,
'KMD': 4,
'LA': 41,
'LEND': 388,
'LAT': 1.44,
'LIFE': 13000,
'LRC': 27,
'LSK': 0.3,
'LOC': 11.076,
'LUN': 0.34,
'MAID': 5,
'MANA': 143,
'MCAP': 5.44,
'MIPS': 43,
'MNE': 1.33,
'MSP': 121,
'MCO': 0.357,
'MTH': 92,
'MYB': 3.9,
'NDC': 165,
'NEBL': 0.04,
'NET': 3.96,
'NTO': 998,
'NGC': 2.368,
'NXC': 13.39,
'NXT': 3,
'OAX': 15,
'ODN': 0.004,
'OMG': 2,
'OPT': 335,
'ORME': 2.8,
'OTN': 0.57,
'PAY': 3.1,
'PIX': 96,
'PLBT': 0.33,
'PLR': 114,
'PLU': 0.87,
'POE': 784,
'POLL': 3.5,
'PPT': 2,
'PRE': 32,
'PRG': 39,
'PRO': 41,
'PRS': 60,
'PTOY': 0.5,
'QAU': 63,
'QCN': 0.03,
'QTUM': 0.04,
'QVT': 64,
'REP': 0.02,
'RKC': 15,
'RLC': 1.21,
'RVT': 14,
'SC': 30,
'SAN': 2.24,
'SBD': 0.03,
'SCL': 2.6,
'SISA': 1640,
'SKIN': 407,
'SWFTC': 352.94,
'SMART': 0.4,
'SMS': 0.0375,
'SNC': 36,
'SNGLS': 4,
'SNM': 48,
'SNT': 233,
'STAR': 0.144,
'STORM': 153.19,
'STEEM': 0.01,
'STRAT': 0.01,
'SPF': 14.4,
'STU': 14,
'STX': 11,
'SUB': 17,
'SUR': 3,
'SWT': 0.51,
'TAAS': 0.91,
'TBT': 2.37,
'TFL': 15,
'TIME': 0.03,
'TIX': 7.1,
'TKN': 1,
'TGT': 173,
'TKR': 84,
'TNT': 90,
'TRST': 1.6,
'TRX': 270,
'UET': 480,
'UGT': 15,
'UTT': 3,
'VEN': 14,
'VERI': 0.037,
'VIB': 50,
'VIBE': 145,
'VOISE': 618,
'WEALTH': 0.0168,
'WINGS': 2.4,
'WTC': 0.75,
'WRC': 48,
'XAUR': 3.23,
'XDN': 0.01,
'XEM': 15,
'XUC': 0.9,
'YOYOW': 140,
'ZAP': 24,
'ZRX': 23,
'ZSC': 191,
},
'deposit': {
'BTC': 0,
'ETH': 0,
'BCH': 0,
'USDT': 0,
'BTG': 0,
'LTC': 0,
'ZEC': 0,
'XMR': 0,
'1ST': 0,
'ADX': 0,
'AE': 0,
'AEON': 0,
'AIR': 0,
'AMP': 0,
'ANT': 0,
'ARDR': 0,
'ARN': 0,
'ART': 0,
'ATB': 0,
'ATL': 0,
'ATM': 0,
'ATS': 0,
'AVT': 0,
'BAS': 0,
'BCN': 0,
'BET': 0,
'BKB': 0,
'BMC': 0,
'BMT': 0,
'BNT': 0,
'BQX': 0,
'BTM': 0,
'BTX': 0,
'BUS': 0,
'CCT': 0,
'CDT': 0,
'CDX': 0,
'CFI': 0,
'CLD': 0,
'CND': 0,
'CNX': 0,
'COSS': 0,
'CSNO': 0,
'CTR': 0,
'CTX': 0,
'CVC': 0,
'DBIX': 0,
'DCN': 0,
'DCT': 0,
'DDF': 0,
'DENT': 0,
'DGB': 0,
'DGD': 0,
'DICE': 0,
'DLT': 0,
'DNT': 0,
'DOGE': 0,
'DOV': 0,
'DRPU': 0,
'DRT': 0,
'DSH': 0,
'EBET': 0,
'EBTC': 0,
'EBTCOLD': 0,
'ECAT': 0,
'EDG': 0,
'EDO': 0,
'ELE': 0,
'ELM': 0,
'EMC': 0,
'EMGO': 0,
'ENJ': 0,
'EOS': 0,
'ERO': 0,
'ETBS': 0,
'ETC': 0,
'ETP': 0,
'EVX': 0,
'EXN': 0,
'FRD': 0,
'FUEL': 0,
'FUN': 0,
'FYN': 0,
'FYP': 0,
'GNO': 0,
'GUP': 0,
'GVT': 0,
'HAC': 0,
'HDG': 0,
'HGT': 0,
'HPC': 0,
'HVN': 0,
'ICN': 0,
'ICO': 0,
'ICOS': 0,
'IND': 0,
'INDI': 0,
'ITS': 0,
'IXT': 0,
'KBR': 0,
'KICK': 0,
'LA': 0,
'LAT': 0,
'LIFE': 0,
'LRC': 0,
'LSK': 0,
'LUN': 0,
'MAID': 0,
'MANA': 0,
'MCAP': 0,
'MIPS': 0,
'MNE': 0,
'MSP': 0,
'MTH': 0,
'MYB': 0,
'NDC': 0,
'NEBL': 0,
'NET': 0,
'NTO': 0,
'NXC': 0,
'NXT': 0,
'OAX': 0,
'ODN': 0,
'OMG': 0,
'OPT': 0,
'ORME': 0,
'OTN': 0,
'PAY': 0,
'PIX': 0,
'PLBT': 0,
'PLR': 0,
'PLU': 0,
'POE': 0,
'POLL': 0,
'PPT': 0,
'PRE': 0,
'PRG': 0,
'PRO': 0,
'PRS': 0,
'PTOY': 0,
'QAU': 0,
'QCN': 0,
'QTUM': 0,
'QVT': 0,
'REP': 0,
'RKC': 0,
'RVT': 0,
'SAN': 0,
'SBD': 0,
'SCL': 0,
'SISA': 0,
'SKIN': 0,
'SMART': 0,
'SMS': 0,
'SNC': 0,
'SNGLS': 0,
'SNM': 0,
'SNT': 0,
'STEEM': 0,
'STRAT': 0,
'STU': 0,
'STX': 0,
'SUB': 0,
'SUR': 0,
'SWT': 0,
'TAAS': 0,
'TBT': 0,
'TFL': 0,
'TIME': 0,
'TIX': 0,
'TKN': 0,
'TKR': 0,
'TNT': 0,
'TRST': 0,
'TRX': 0,
'UET': 0,
'UGT': 0,
'VEN': 0,
'VERI': 0,
'VIB': 0,
'VIBE': 0,
'VOISE': 0,
'WEALTH': 0,
'WINGS': 0,
'WTC': 0,
'XAUR': 0,
'XDN': 0,
'XEM': 0,
'XUC': 0,
'YOYOW': 0,
'ZAP': 0,
'ZRX': 0,
'ZSC': 0,
},
},
},
'options': {
'defaultTimeInForce': 'FOK',
},
'exceptions': {
'1003': PermissionDenied,
'2010': InvalidOrder,
'2011': InvalidOrder,
'2020': InvalidOrder,
'20002': OrderNotFound,
'20001': InsufficientFunds,
},
})
def fee_to_precision(self, symbol, fee):
return self.decimal_to_precision(fee, TRUNCATE, 8, DECIMAL_PLACES)
async def fetch_markets(self, params={}):
response = await self.publicGetSymbol(params)
result = []
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'baseCurrency')
quoteId = self.safe_string(market, 'quoteCurrency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
lot = self.safe_float(market, 'quantityIncrement')
step = self.safe_float(market, 'tickSize')
precision = {
'price': self.precision_from_string(market['tickSize']),
'amount': -1 * int(math.log10(lot)),
}
taker = self.safe_float(market, 'takeLiquidityRate')
maker = self.safe_float(market, 'provideLiquidityRate')
result.append(self.extend(self.fees['trading'], {
'info': market,
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': True,
'taker': taker,
'maker': maker,
'precision': precision,
'limits': {
'amount': {
'min': lot,
'max': None,
},
'price': {
'min': step,
'max': None,
},
'cost': {
'min': lot * step,
'max': None,
},
},
}))
return result
async def fetch_currencies(self, params={}):
response = await self.publicGetCurrency(params)
result = {}
for i in range(0, len(response)):
currency = response[i]
id = self.safe_string(currency, 'id')
precision = 8
code = self.safe_currency_code(id)
payin = self.safe_value(currency, 'payinEnabled')
payout = self.safe_value(currency, 'payoutEnabled')
transfer = self.safe_value(currency, 'transferEnabled')
active = payin and payout and transfer
if 'disabled' in currency:
if currency['disabled']:
active = False
type = 'fiat'
if ('crypto' in list(currency.keys())) and currency['crypto']:
type = 'crypto'
name = self.safe_string(currency, 'fullName')
result[code] = {
'id': id,
'code': code,
'type': type,
'payin': payin,
'payout': payout,
'transfer': transfer,
'info': currency,
'name': name,
'active': active,
'fee': self.safe_float(currency, 'payoutFee'),
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'price': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'cost': {
'min': None,
'max': None,
},
'withdraw': {
'min': None,
'max': math.pow(10, precision),
},
},
}
return result
async def fetch_trading_fee(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = self.extend({
'symbol': market['id'],
}, self.omit(params, 'symbol'))
response = await self.privateGetTradingFeeSymbol(request)
return {
'info': response,
'maker': self.safe_float(response, 'provideLiquidityRate'),
'taker': self.safe_float(response, 'takeLiquidityRate'),
}
async def fetch_balance(self, params={}):
await self.load_markets()
type = self.safe_string(params, 'type', 'trading')
method = 'privateGet' + self.capitalize(type) + 'Balance'
query = self.omit(params, 'type')
response = await getattr(self, method)(query)
result = {'info': response}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_float(balance, 'available')
account['used'] = self.safe_float(balance, 'reserved')
result[code] = account
return self.parse_balance(result)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1d', since=None, limit=None):
timestamp = self.parse8601(ohlcv['timestamp'])
return [
timestamp,
float(ohlcv['open']),
float(ohlcv['max']),
float(ohlcv['min']),
float(ohlcv['close']),
float(ohlcv['volume']),
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'period': self.timeframes[timeframe],
}
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit
response = await self.publicGetCandlesSymbol(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
request = {
'symbol': self.market_id(symbol),
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderbookSymbol(self.extend(request, params))
return self.parse_order_book(response, None, 'bid', 'ask', 'price', 'size')
def parse_ticker(self, ticker, market=None):
timestamp = self.parse8601(ticker['timestamp'])
symbol = None
if market is not None:
symbol = market['symbol']
baseVolume = self.safe_float(ticker, 'volume')
quoteVolume = self.safe_float(ticker, 'volumeQuote')
open = self.safe_float(ticker, 'open')
last = self.safe_float(ticker, 'last')
change = None
percentage = None
average = None
if last is not None and open is not None:
change = last - open
average = self.sum(last, open) / 2
if open > 0:
percentage = change / open * 100
vwap = None
if quoteVolume is not None:
if baseVolume is not None:
if baseVolume > 0:
vwap = quoteVolume / baseVolume
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'bid'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'ask'),
'askVolume': None,
'vwap': vwap,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': average,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
response = await self.publicGetTicker(params)
result = {}
for i in range(0, len(response)):
ticker = response[i]
marketId = self.safe_string(ticker, 'symbol')
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
result[symbol] = self.parse_ticker(ticker, market)
else:
result[marketId] = self.parse_ticker(ticker)
return result
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.publicGetTickerSymbol(self.extend(request, params))
if 'message' in response:
raise ExchangeError(self.id + ' ' + response['message'])
return self.parse_ticker(response, market)
def parse_trade(self, trade, market=None):
timestamp = self.parse8601(trade['timestamp'])
symbol = None
marketId = self.safe_string(trade, 'symbol')
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
symbol = marketId
if symbol is None:
if market is not None:
symbol = market['symbol']
fee = None
feeCost = self.safe_float(trade, 'fee')
if feeCost is not None:
feeCurrency = market['quote'] if market else None
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
orderId = self.safe_string(trade, 'clientOrderId')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'quantity')
cost = price * amount
side = self.safe_string(trade, 'side')
id = self.safe_string(trade, 'id')
return {
'info': trade,
'id': id,
'order': orderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
async def fetch_transactions(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
currency = None
request = {}
if code is not None:
currency = self.currency(code)
request['asset'] = currency['id']
if since is not None:
request['startTime'] = since
response = await self.privateGetAccountTransactions(self.extend(request, params))
return self.parseTransactions(response, currency, since, limit)
def parse_transaction(self, transaction, currency=None):
id = self.safe_string(transaction, 'id')
timestamp = self.parse8601(self.safe_string(transaction, 'createdAt'))
updated = self.parse8601(self.safe_string(transaction, 'updatedAt'))
currencyId = self.safe_string(transaction, 'currency')
code = self.safe_currency_code(currencyId, currency)
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
amount = self.safe_float(transaction, 'amount')
address = self.safe_string(transaction, 'address')
txid = self.safe_string(transaction, 'hash')
fee = None
feeCost = self.safe_float(transaction, 'fee')
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': code,
}
type = self.parse_transaction_type(self.safe_string(transaction, 'type'))
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'tag': None,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
def parse_transaction_status(self, status):
statuses = {
'pending': 'pending',
'failed': 'failed',
'success': 'ok',
}
return self.safe_string(statuses, status, status)
def parse_transaction_type(self, type):
types = {
'payin': 'deposit',
'payout': 'withdrawal',
'withdraw': 'withdrawal',
}
return self.safe_string(types, type, type)
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit
if since is not None:
request['sort'] = 'ASC'
request['from'] = self.iso8601(since)
response = await self.publicGetTradesSymbol(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
uuid = self.uuid()
parts = uuid.split('-')
clientOrderId = ''.join(parts)
clientOrderId = clientOrderId[0:32]
amount = float(amount)
request = {
'clientOrderId': clientOrderId,
'symbol': market['id'],
'side': side,
'quantity': self.amount_to_precision(symbol, amount),
'type': type,
}
if type == 'limit':
request['price'] = self.price_to_precision(symbol, price)
else:
request['timeInForce'] = self.options['defaultTimeInForce']
response = await self.privatePostOrder(self.extend(request, params))
order = self.parse_order(response)
if order['status'] == 'rejected':
raise InvalidOrder(self.id + ' order was rejected by the exchange ' + self.json(order))
id = order['id']
self.orders[id] = order
return order
async def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
await self.load_markets()
uuid = self.uuid()
parts = uuid.split('-')
requestClientId = ''.join(parts)
requestClientId = requestClientId[0:32]
request = {
'clientOrderId': id,
'requestClientId': requestClientId,
}
if amount is not None:
request['quantity'] = self.amount_to_precision(symbol, amount)
if price is not None:
request['price'] = self.price_to_precision(symbol, price)
response = await self.privatePatchOrderClientOrderId(self.extend(request, params))
order = self.parse_order(response)
self.orders[order['id']] = order
return order
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'clientOrderId': id,
}
response = await self.privateDeleteOrderClientOrderId(self.extend(request, params))
return self.parse_order(response)
def parse_order_status(self, status):
statuses = {
'new': 'open',
'suspended': 'open',
'partiallyFilled': 'open',
'filled': 'closed',
'canceled': 'canceled',
'expired': 'failed',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
created = self.parse8601(self.safe_string(order, 'createdAt'))
updated = self.parse8601(self.safe_string(order, 'updatedAt'))
marketId = self.safe_string(order, 'symbol')
symbol = None
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
symbol = marketId
if symbol is None:
if market is not None:
symbol = market['id']
amount = self.safe_float(order, 'quantity')
filled = self.safe_float(order, 'cumQuantity')
status = self.parse_order_status(self.safe_string(order, 'status'))
id = self.safe_string(order, 'clientOrderId')
price = self.safe_float(order, 'price')
if price is None:
if id in self.orders:
price = self.orders[id]['price']
remaining = None
cost = None
if amount is not None:
if filled is not None:
remaining = amount - filled
if price is not None:
cost = filled * price
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
trades = self.safe_value(order, 'tradesReport')
fee = None
average = None
if trades is not None:
trades = self.parse_trades(trades, market)
feeCost = None
numTrades = len(trades)
tradesCost = 0
for i in range(0, numTrades):
if feeCost is None:
feeCost = 0
tradesCost = self.sum(tradesCost, trades[i]['cost'])
feeCost = self.sum(feeCost, trades[i]['fee']['cost'])
cost = tradesCost
if (filled is not None) and (filled > 0):
average = cost / filled
if type == 'market':
if price is None:
price = average
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': market['quote'],
}
return {
'id': id,
'timestamp': created,
'datetime': self.iso8601(created),
'lastTradeTimestamp': updated,
'status': status,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'average': average,
'amount': amount,
'cost': cost,
'filled': filled,
'remaining': remaining,
'fee': fee,
'trades': trades,
'info': order,
}
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'clientOrderId': id,
}
response = await self.privateGetHistoryOrder(self.extend(request, params))
numOrders = len(response)
if numOrders > 0:
return self.parse_order(response[0])
raise OrderNotFound(self.id + ' order ' + id + ' not found')
async def fetch_open_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'clientOrderId': id,
}
response = await self.privateGetOrderClientOrderId(self.extend(request, params))
return self.parse_order(response)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
request = {}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
response = await self.privateGetOrder(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
request = {}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['limit'] = limit
if since is not None:
request['from'] = self.iso8601(since)
response = await self.privateGetHistoryOrder(self.extend(request, params))
parsedOrders = self.parse_orders(response, market)
orders = []
for i in range(0, len(parsedOrders)):
order = parsedOrders[i]
status = order['status']
if (status == 'closed') or (status == 'canceled'):
orders.append(order)
return self.filter_by_since_limit(orders, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['from'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit
response = await self.privateGetHistoryTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
# which is the id that is stored in the unified order id
# To get the exchange's id you need to grab it from order['info']['id']
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'id': id,
}
response = await self.privateGetHistoryOrderIdTrades(self.extend(request, params))
numOrders = len(response)
if numOrders > 0:
return self.parse_trades(response, market, since, limit)
raise OrderNotFound(self.id + ' order ' + id + ' not found, ' + self.id + '.fetchOrderTrades() requires an exchange-specific order id, you need to grab it from order["info"]["id"]')
async def create_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
}
response = await self.privatePostAccountCryptoAddressCurrency(self.extend(request, params))
address = self.safe_string(response, 'address')
self.check_address(address)
tag = self.safe_string(response, 'paymentId')
return {
'currency': currency,
'address': address,
'tag': tag,
'info': response,
}
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
}
response = await self.privateGetAccountCryptoAddressCurrency(self.extend(request, params))
address = self.safe_string(response, 'address')
self.check_address(address)
tag = self.safe_string(response, 'paymentId')
return {
'currency': currency['code'],
'address': address,
'tag': tag,
'info': response,
}
async def withdraw(self, code, amount, address, tag=None, params={}):
await self.load_markets()
self.check_address(address)
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': float(amount),
'address': address,
}
if tag:
request['paymentId'] = tag
response = await self.privatePostAccountCryptoWithdraw(self.extend(request, params))
return {
'info': response,
'id': response['id'],
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = '/api/' + self.version + '/'
query = self.omit(params, self.extract_params(path))
if api == 'public':
url += api + '/' + self.implode_params(path, params)
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
url += self.implode_params(path, params)
if method == 'GET':
if query:
url += '?' + self.urlencode(query)
elif query:
body = self.json(query)
payload = self.encode(self.apiKey + ':' + self.secret)
auth = base64.b64encode(payload)
headers = {
'Authorization': 'Basic ' + self.decode(auth),
'Content-Type': 'application/json',
}
url = self.urls['api'] + url
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return
if code >= 400:
feedback = self.id + ' ' + body
if (code == 503) or (code == 504):
raise ExchangeNotAvailable(feedback)
if body[0] == '{':
if 'error' in response:
code = self.safe_string(response['error'], 'code')
exceptions = self.exceptions
if code in exceptions:
raise exceptions[code](feedback)
message = self.safe_string(response['error'], 'message')
if message == 'Duplicate clientOrderId':
raise InvalidOrder(feedback)
raise ExchangeError(feedback)
| true
| true
|
f718a55c5117ea9a28e1c1de9ac32377c6e29ca9
| 20,501
|
py
|
Python
|
skfda/_utils/_utils.py
|
GAA-UAM/scikit-fda
|
a9953a3104195ce9796397d094b17b1b90fd090f
|
[
"BSD-3-Clause"
] | 147
|
2019-05-10T20:46:42.000Z
|
2022-03-25T17:23:19.000Z
|
skfda/_utils/_utils.py
|
GAA-UAM/scikit-fda
|
a9953a3104195ce9796397d094b17b1b90fd090f
|
[
"BSD-3-Clause"
] | 306
|
2019-04-26T08:56:05.000Z
|
2022-03-30T11:12:48.000Z
|
skfda/_utils/_utils.py
|
GAA-UAM/scikit-fda
|
a9953a3104195ce9796397d094b17b1b90fd090f
|
[
"BSD-3-Clause"
] | 38
|
2019-09-03T17:24:04.000Z
|
2022-01-06T05:09:18.000Z
|
"""Module with generic methods."""
from __future__ import annotations
import functools
import numbers
from typing import (
TYPE_CHECKING,
Any,
Callable,
Iterable,
List,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
cast,
overload,
)
import numpy as np
import scipy.integrate
from numpy import ndarray
from pandas.api.indexers import check_array_indexer
from sklearn.base import clone
from sklearn.preprocessing import LabelEncoder
from sklearn.utils.multiclass import check_classification_targets
from typing_extensions import Literal, Protocol
from ..representation._typing import (
ArrayLike,
DomainRange,
DomainRangeLike,
GridPoints,
GridPointsLike,
)
from ..representation.extrapolation import ExtrapolationLike
RandomStateLike = Optional[Union[int, np.random.RandomState]]
if TYPE_CHECKING:
from ..exploratory.depth import Depth
from ..representation import FData, FDataGrid
from ..representation.basis import Basis
T = TypeVar("T", bound=FData)
def check_is_univariate(fd: FData) -> None:
"""Check if an FData is univariate and raises an error.
Args:
fd: Functional object to check if is univariate.
Raises:
ValueError: If it is not univariate, i.e., `fd.dim_domain != 1` or
`fd.dim_codomain != 1`.
"""
if fd.dim_domain != 1 or fd.dim_codomain != 1:
domain_str = (
"" if fd.dim_domain == 1
else f"(currently is {fd.dim_domain}) "
)
codomain_str = (
"" if fd.dim_codomain == 1
else f"(currently is {fd.dim_codomain})"
)
raise ValueError(
f"The functional data must be univariate, i.e., "
f"with dim_domain=1 {domain_str}"
f"and dim_codomain=1 {codomain_str}",
)
def _check_compatible_fdata(fdata1: FData, fdata2: FData) -> None:
"""Check that fdata is compatible."""
if (fdata1.dim_domain != fdata2.dim_domain):
raise ValueError(
f"Functional data has incompatible domain dimensions: "
f"{fdata1.dim_domain} != {fdata2.dim_domain}",
)
if (fdata1.dim_codomain != fdata2.dim_codomain):
raise ValueError(
f"Functional data has incompatible codomain dimensions: "
f"{fdata1.dim_codomain} != {fdata2.dim_codomain}",
)
def _to_grid(
X: FData,
y: FData,
eval_points: Optional[np.ndarray] = None,
) -> Tuple[FDataGrid, FDataGrid]:
"""Transform a pair of FDatas in grids to perform calculations."""
from .. import FDataGrid
x_is_grid = isinstance(X, FDataGrid)
y_is_grid = isinstance(y, FDataGrid)
if eval_points is not None:
X = X.to_grid(eval_points)
y = y.to_grid(eval_points)
elif x_is_grid and not y_is_grid:
y = y.to_grid(X.grid_points[0])
elif not x_is_grid and y_is_grid:
X = X.to_grid(y.grid_points[0])
elif not x_is_grid and not y_is_grid:
X = X.to_grid()
y = y.to_grid()
return X, y
def _to_grid_points(grid_points_like: GridPointsLike) -> GridPoints:
"""Convert to grid points.
If the original list is one-dimensional (e.g. [1, 2, 3]), return list to
array (in this case [array([1, 2, 3])]).
If the original list is two-dimensional (e.g. [[1, 2, 3], [4, 5]]), return
a list containing other one-dimensional arrays (in this case
[array([1, 2, 3]), array([4, 5])]).
In any other case the behaviour is unespecified.
"""
unidimensional = False
if not isinstance(grid_points_like, Iterable):
grid_points_like = [grid_points_like]
if not isinstance(grid_points_like[0], Iterable):
unidimensional = True
if unidimensional:
return (_int_to_real(np.asarray(grid_points_like)),)
return tuple(_int_to_real(np.asarray(i)) for i in grid_points_like)
def _to_domain_range(sequence: DomainRangeLike) -> DomainRange:
"""Convert sequence to a proper domain range."""
seq_aux = cast(
Sequence[Sequence[float]],
(sequence,) if isinstance(sequence[0], numbers.Real) else sequence,
)
tuple_aux = tuple(tuple(s) for s in seq_aux)
if not all(len(s) == 2 and s[0] <= s[1] for s in tuple_aux):
raise ValueError(
"Domain intervals should have 2 bounds for "
"dimension: (lower, upper).",
)
return cast(DomainRange, tuple_aux)
def _to_array_maybe_ragged(
array: Iterable[ArrayLike],
*,
row_shape: Optional[Sequence[int]] = None,
) -> np.ndarray:
"""
Convert to an array where each element may or may not be of equal length.
If each element is of equal length the array is multidimensional.
Otherwise it is a ragged array.
"""
def convert_row(row: ArrayLike) -> np.ndarray:
r = np.array(row)
if row_shape is not None:
r = r.reshape(row_shape)
return r
array_list = [convert_row(a) for a in array]
shapes = [a.shape for a in array_list]
if all(s == shapes[0] for s in shapes):
return np.array(array_list)
res = np.empty(len(array_list), dtype=np.object_)
for i, a in enumerate(array_list):
res[i] = a
return res
@overload
def _cartesian_product(
axes: Sequence[np.ndarray],
*,
flatten: bool = True,
return_shape: Literal[False] = False,
) -> np.ndarray:
pass
@overload
def _cartesian_product(
axes: Sequence[np.ndarray],
*,
flatten: bool = True,
return_shape: Literal[True],
) -> Tuple[np.ndarray, Tuple[int, ...]]:
pass
def _cartesian_product( # noqa: WPS234
axes: Sequence[np.ndarray],
*,
flatten: bool = True,
return_shape: bool = False,
) -> Union[np.ndarray, Tuple[np.ndarray, Tuple[int, ...]]]:
"""
Compute the cartesian product of the axes.
Computes the cartesian product of the axes and returns a numpy array of
1 dimension with all the possible combinations, for an arbitrary number of
dimensions.
Args:
axes: List with axes.
flatten: Whether to return the flatten array or keep one dimension per
axis.
return_shape: If ``True`` return the shape of the array before
flattening.
Returns:
Numpy 2-D array with all the possible combinations.
The entry (i,j) represent the j-th coordinate of the i-th point.
If ``return_shape`` is ``True`` returns also the shape of the array
before flattening.
Examples:
>>> from skfda._utils import _cartesian_product
>>> axes = [[0,1],[2,3]]
>>> _cartesian_product(axes)
array([[0, 2],
[0, 3],
[1, 2],
[1, 3]])
>>> axes = [[0,1],[2,3],[4]]
>>> _cartesian_product(axes)
array([[0, 2, 4],
[0, 3, 4],
[1, 2, 4],
[1, 3, 4]])
>>> axes = [[0,1]]
>>> _cartesian_product(axes)
array([[0],
[1]])
"""
cartesian = np.stack(np.meshgrid(*axes, indexing='ij'), -1)
shape = cartesian.shape
if flatten:
cartesian = cartesian.reshape(-1, len(axes))
if return_shape:
return cartesian, shape
return cartesian
def _same_domain(fd: Union[Basis, FData], fd2: Union[Basis, FData]) -> bool:
"""Check if the domain range of two objects is the same."""
return np.array_equal(fd.domain_range, fd2.domain_range)
@overload
def _reshape_eval_points(
eval_points: ArrayLike,
*,
aligned: Literal[True],
n_samples: int,
dim_domain: int,
) -> np.ndarray:
pass
@overload
def _reshape_eval_points(
eval_points: Sequence[ArrayLike],
*,
aligned: Literal[True],
n_samples: int,
dim_domain: int,
) -> np.ndarray:
pass
@overload
def _reshape_eval_points(
eval_points: Union[ArrayLike, Sequence[ArrayLike]],
*,
aligned: bool,
n_samples: int,
dim_domain: int,
) -> np.ndarray:
pass
def _reshape_eval_points(
eval_points: Union[ArrayLike, Iterable[ArrayLike]],
*,
aligned: bool,
n_samples: int,
dim_domain: int,
) -> np.ndarray:
"""Convert and reshape the eval_points to ndarray.
Args:
eval_points: Evaluation points to be reshaped.
aligned: Boolean flag. True if all the samples
will be evaluated at the same evaluation_points.
n_samples: Number of observations.
dim_domain: Dimension of the domain.
Returns:
Numpy array with the eval_points, if
evaluation_aligned is True with shape `number of evaluation points`
x `dim_domain`. If the points are not aligned the shape of the
points will be `n_samples` x `number of evaluation points`
x `dim_domain`.
"""
if aligned:
eval_points = np.asarray(eval_points)
else:
eval_points = cast(Iterable[ArrayLike], eval_points)
eval_points = _to_array_maybe_ragged(
eval_points,
row_shape=(-1, dim_domain),
)
# Case evaluation of a single value, i.e., f(0)
# Only allowed for aligned evaluation
if aligned and (
eval_points.shape == (dim_domain,)
or (eval_points.ndim == 0 and dim_domain == 1)
):
eval_points = np.array([eval_points])
if aligned: # Samples evaluated at same eval points
eval_points = eval_points.reshape(
(eval_points.shape[0], dim_domain),
)
else: # Different eval_points for each sample
if eval_points.shape[0] != n_samples:
raise ValueError(
f"eval_points should be a list "
f"of length {n_samples} with the "
f"evaluation points for each sample.",
)
return eval_points
def _one_grid_to_points(
axes: GridPointsLike,
*,
dim_domain: int,
) -> Tuple[np.ndarray, Tuple[int, ...]]:
"""
Convert a list of ndarrays, one per domain dimension, in the points.
Returns also the shape containing the information of how each point
is formed.
"""
axes = _to_grid_points(axes)
if len(axes) != dim_domain:
raise ValueError(
f"Length of axes should be {dim_domain}",
)
cartesian, shape = _cartesian_product(axes, return_shape=True)
# Drop domain size dimension, as it is not needed to reshape the output
shape = shape[:-1]
return cartesian, shape
class EvaluateMethod(Protocol):
"""Evaluation method."""
def __call__(
self,
__eval_points: np.ndarray, # noqa: WPS112
extrapolation: Optional[ExtrapolationLike],
aligned: bool,
) -> np.ndarray:
"""Evaluate a function."""
pass
@overload
def _evaluate_grid(
axes: GridPointsLike,
*,
evaluate_method: EvaluateMethod,
n_samples: int,
dim_domain: int,
dim_codomain: int,
extrapolation: Optional[ExtrapolationLike] = None,
aligned: Literal[True] = True,
) -> np.ndarray:
pass
@overload
def _evaluate_grid(
axes: Iterable[GridPointsLike],
*,
evaluate_method: EvaluateMethod,
n_samples: int,
dim_domain: int,
dim_codomain: int,
extrapolation: Optional[ExtrapolationLike] = None,
aligned: Literal[False],
) -> np.ndarray:
pass
def _evaluate_grid( # noqa: WPS234
axes: Union[GridPointsLike, Iterable[GridPointsLike]],
*,
evaluate_method: EvaluateMethod,
n_samples: int,
dim_domain: int,
dim_codomain: int,
extrapolation: Optional[ExtrapolationLike] = None,
aligned: bool = True,
) -> np.ndarray:
"""
Evaluate the functional object in the cartesian grid.
This method is called internally by :meth:`evaluate` when the argument
`grid` is True.
Evaluates the functional object in the grid generated by the cartesian
product of the axes. The length of the list of axes should be equal
than the domain dimension of the object.
If the list of axes has lengths :math:`n_1, n_2, ..., n_m`, where
:math:`m` is equal than the dimension of the domain, the result of the
evaluation in the grid will be a matrix with :math:`m+1` dimensions and
shape :math:`n_{samples} x n_1 x n_2 x ... x n_m`.
If `aligned` is false each sample is evaluated in a
different grid, and the list of axes should contain a list of axes for
each sample.
If the domain dimension is 1, the result of the behaviour of the
evaluation will be the same than :meth:`evaluate` without the grid
option, but with worst performance.
Args:
axes: List of axes to generated the grid where the
object will be evaluated.
evaluate_method: Function used to evaluate the functional object.
n_samples: Number of samples.
dim_domain: Domain dimension.
dim_codomain: Codomain dimension.
extrapolation: Controls the
extrapolation mode for elements outside the domain range. By
default it is used the mode defined during the instance of the
object.
aligned: If False evaluates each sample
in a different grid.
evaluate_method: method to use to evaluate the points
n_samples: number of samples
dim_domain: dimension of the domain
dim_codomain: dimensions of the codomain
Returns:
Numpy array with dim_domain + 1 dimensions with
the result of the evaluation.
Raises:
ValueError: If there are a different number of axes than the domain
dimension.
"""
# Compute intersection points and resulting shapes
if aligned:
axes = cast(GridPointsLike, axes)
eval_points, shape = _one_grid_to_points(axes, dim_domain=dim_domain)
else:
axes_per_sample = cast(Iterable[GridPointsLike], axes)
axes_per_sample = list(axes_per_sample)
eval_points_tuple, shape_tuple = zip(
*[
_one_grid_to_points(a, dim_domain=dim_domain)
for a in axes_per_sample
],
)
if len(eval_points_tuple) != n_samples:
raise ValueError(
"Should be provided a list of axis per sample",
)
eval_points = _to_array_maybe_ragged(eval_points_tuple)
# Evaluate the points
evaluated = evaluate_method(
eval_points,
extrapolation=extrapolation,
aligned=aligned,
)
# Reshape the result
if aligned:
res = evaluated.reshape(
[n_samples] + list(shape) + [dim_codomain],
)
else:
res = _to_array_maybe_ragged([
r.reshape(list(s) + [dim_codomain])
for r, s in zip(evaluated, shape_tuple)
])
return res
def nquad_vec(
func: Callable[[np.ndarray], np.ndarray],
ranges: Sequence[Tuple[float, float]],
) -> np.ndarray:
"""Perform multiple integration of vector valued functions."""
initial_depth = len(ranges) - 1
def integrate(*args: Any, depth: int) -> np.ndarray: # noqa: WPS430
if depth == 0:
f = functools.partial(func, *args)
else:
f = functools.partial(integrate, *args, depth=depth - 1)
return scipy.integrate.quad_vec(f, *ranges[initial_depth - depth])[0]
return integrate(depth=initial_depth)
def _map_in_batches(
function: Callable[..., np.ndarray],
arguments: Tuple[Union[FData, np.ndarray], ...],
indexes: Tuple[np.ndarray, ...],
memory_per_batch: Optional[int] = None,
**kwargs: Any,
) -> np.ndarray:
"""
Map a function over samples of FData or ndarray tuples efficiently.
This function prevents a large set of indexes to use all available
memory and hang the PC.
"""
if memory_per_batch is None:
# 256MB is not too big
memory_per_batch = 256 * 1024 * 1024 # noqa: WPS432
memory_per_element = sum(a.nbytes // len(a) for a in arguments)
n_elements_per_batch_allowed = memory_per_batch // memory_per_element
if n_elements_per_batch_allowed < 1:
raise ValueError("Too few memory allowed for the operation")
n_indexes = len(indexes[0])
assert all(n_indexes == len(i) for i in indexes)
batches: List[np.ndarray] = []
for pos in range(0, n_indexes, n_elements_per_batch_allowed):
batch_args = tuple(
a[i[pos:pos + n_elements_per_batch_allowed]]
for a, i in zip(arguments, indexes)
)
batches.append(function(*batch_args, **kwargs))
return np.concatenate(batches, axis=0)
def _pairwise_symmetric(
function: Callable[..., np.ndarray],
arg1: Union[FData, np.ndarray],
arg2: Optional[Union[FData, np.ndarray]] = None,
memory_per_batch: Optional[int] = None,
**kwargs: Any,
) -> np.ndarray:
"""Compute pairwise a commutative function."""
dim1 = len(arg1)
if arg2 is None or arg2 is arg1:
indices = np.triu_indices(dim1)
matrix = np.empty((dim1, dim1))
triang_vec = _map_in_batches(
function,
(arg1, arg1),
indices,
memory_per_batch=memory_per_batch,
**kwargs,
)
# Set upper matrix
matrix[indices] = triang_vec
# Set lower matrix
matrix[(indices[1], indices[0])] = triang_vec
return matrix
dim2 = len(arg2)
indices = np.indices((dim1, dim2))
vec = _map_in_batches(
function,
(arg1, arg2),
(indices[0].ravel(), indices[1].ravel()),
memory_per_batch=memory_per_batch,
**kwargs,
)
return vec.reshape((dim1, dim2))
def _int_to_real(array: np.ndarray) -> np.ndarray:
"""Convert integer arrays to floating point."""
return array + 0.0
def _check_array_key(array: np.ndarray, key: Any) -> Any:
"""Check a getitem key."""
key = check_array_indexer(array, key)
if isinstance(key, tuple):
non_ellipsis = [i for i in key if i is not Ellipsis]
if len(non_ellipsis) > 1:
raise KeyError(key)
key = non_ellipsis[0]
if isinstance(key, numbers.Integral): # To accept also numpy ints
key = int(key)
key = range(len(array))[key]
return slice(key, key + 1)
return key
def _check_estimator(estimator):
from sklearn.utils.estimator_checks import (
check_get_params_invariance,
check_set_params,
)
name = estimator.__name__
instance = estimator()
check_get_params_invariance(name, instance)
check_set_params(name, instance)
def _classifier_get_classes(y: ndarray) -> Tuple[ndarray, ndarray]:
check_classification_targets(y)
le = LabelEncoder()
y_ind = le.fit_transform(y)
classes = le.classes_
if classes.size < 2:
raise ValueError(
f'The number of classes has to be greater than'
f'one; got {classes.size} class',
)
return classes, y_ind
def _classifier_get_depth_methods(
classes: ndarray,
X: T,
y_ind: ndarray,
depth_methods: Sequence[Depth[T]],
) -> Sequence[Depth[T]]:
return [
clone(depth_method).fit(X[y_ind == cur_class])
for cur_class in range(classes.size)
for depth_method in depth_methods
]
def _classifier_fit_depth_methods(
X: T,
y: ndarray,
depth_methods: Sequence[Depth[T]],
) -> Tuple[ndarray, Sequence[Depth[T]]]:
classes, y_ind = _classifier_get_classes(y)
class_depth_methods_ = _classifier_get_depth_methods(
classes, X, y_ind, depth_methods,
)
return classes, class_depth_methods_
_DependenceMeasure = Callable[[np.ndarray, np.ndarray], np.ndarray]
def _compute_dependence(
X: np.ndarray,
y: np.ndarray,
*,
dependence_measure: _DependenceMeasure,
) -> np.ndarray:
"""
Compute dependence between points and target.
Computes the dependence of each point in each trajectory in X with the
corresponding class label in Y.
"""
from dcor import rowwise
# Move n_samples to the end
# The shape is now input_shape + n_samples + n_output
X = np.moveaxis(X, 0, -2)
input_shape = X.shape[:-2]
# Join input in a list for rowwise
X = X.reshape(-1, X.shape[-2], X.shape[-1])
if y.ndim == 1:
y = np.atleast_2d(y).T
Y = np.array([y] * len(X))
dependence_results = rowwise(dependence_measure, X, Y)
return dependence_results.reshape(input_shape)
| 26.728814
| 78
| 0.632457
|
from __future__ import annotations
import functools
import numbers
from typing import (
TYPE_CHECKING,
Any,
Callable,
Iterable,
List,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
cast,
overload,
)
import numpy as np
import scipy.integrate
from numpy import ndarray
from pandas.api.indexers import check_array_indexer
from sklearn.base import clone
from sklearn.preprocessing import LabelEncoder
from sklearn.utils.multiclass import check_classification_targets
from typing_extensions import Literal, Protocol
from ..representation._typing import (
ArrayLike,
DomainRange,
DomainRangeLike,
GridPoints,
GridPointsLike,
)
from ..representation.extrapolation import ExtrapolationLike
RandomStateLike = Optional[Union[int, np.random.RandomState]]
if TYPE_CHECKING:
from ..exploratory.depth import Depth
from ..representation import FData, FDataGrid
from ..representation.basis import Basis
T = TypeVar("T", bound=FData)
def check_is_univariate(fd: FData) -> None:
if fd.dim_domain != 1 or fd.dim_codomain != 1:
domain_str = (
"" if fd.dim_domain == 1
else f"(currently is {fd.dim_domain}) "
)
codomain_str = (
"" if fd.dim_codomain == 1
else f"(currently is {fd.dim_codomain})"
)
raise ValueError(
f"The functional data must be univariate, i.e., "
f"with dim_domain=1 {domain_str}"
f"and dim_codomain=1 {codomain_str}",
)
def _check_compatible_fdata(fdata1: FData, fdata2: FData) -> None:
if (fdata1.dim_domain != fdata2.dim_domain):
raise ValueError(
f"Functional data has incompatible domain dimensions: "
f"{fdata1.dim_domain} != {fdata2.dim_domain}",
)
if (fdata1.dim_codomain != fdata2.dim_codomain):
raise ValueError(
f"Functional data has incompatible codomain dimensions: "
f"{fdata1.dim_codomain} != {fdata2.dim_codomain}",
)
def _to_grid(
X: FData,
y: FData,
eval_points: Optional[np.ndarray] = None,
) -> Tuple[FDataGrid, FDataGrid]:
from .. import FDataGrid
x_is_grid = isinstance(X, FDataGrid)
y_is_grid = isinstance(y, FDataGrid)
if eval_points is not None:
X = X.to_grid(eval_points)
y = y.to_grid(eval_points)
elif x_is_grid and not y_is_grid:
y = y.to_grid(X.grid_points[0])
elif not x_is_grid and y_is_grid:
X = X.to_grid(y.grid_points[0])
elif not x_is_grid and not y_is_grid:
X = X.to_grid()
y = y.to_grid()
return X, y
def _to_grid_points(grid_points_like: GridPointsLike) -> GridPoints:
unidimensional = False
if not isinstance(grid_points_like, Iterable):
grid_points_like = [grid_points_like]
if not isinstance(grid_points_like[0], Iterable):
unidimensional = True
if unidimensional:
return (_int_to_real(np.asarray(grid_points_like)),)
return tuple(_int_to_real(np.asarray(i)) for i in grid_points_like)
def _to_domain_range(sequence: DomainRangeLike) -> DomainRange:
seq_aux = cast(
Sequence[Sequence[float]],
(sequence,) if isinstance(sequence[0], numbers.Real) else sequence,
)
tuple_aux = tuple(tuple(s) for s in seq_aux)
if not all(len(s) == 2 and s[0] <= s[1] for s in tuple_aux):
raise ValueError(
"Domain intervals should have 2 bounds for "
"dimension: (lower, upper).",
)
return cast(DomainRange, tuple_aux)
def _to_array_maybe_ragged(
array: Iterable[ArrayLike],
*,
row_shape: Optional[Sequence[int]] = None,
) -> np.ndarray:
def convert_row(row: ArrayLike) -> np.ndarray:
r = np.array(row)
if row_shape is not None:
r = r.reshape(row_shape)
return r
array_list = [convert_row(a) for a in array]
shapes = [a.shape for a in array_list]
if all(s == shapes[0] for s in shapes):
return np.array(array_list)
res = np.empty(len(array_list), dtype=np.object_)
for i, a in enumerate(array_list):
res[i] = a
return res
@overload
def _cartesian_product(
axes: Sequence[np.ndarray],
*,
flatten: bool = True,
return_shape: Literal[False] = False,
) -> np.ndarray:
pass
@overload
def _cartesian_product(
axes: Sequence[np.ndarray],
*,
flatten: bool = True,
return_shape: Literal[True],
) -> Tuple[np.ndarray, Tuple[int, ...]]:
pass
def _cartesian_product(
axes: Sequence[np.ndarray],
*,
flatten: bool = True,
return_shape: bool = False,
) -> Union[np.ndarray, Tuple[np.ndarray, Tuple[int, ...]]]:
cartesian = np.stack(np.meshgrid(*axes, indexing='ij'), -1)
shape = cartesian.shape
if flatten:
cartesian = cartesian.reshape(-1, len(axes))
if return_shape:
return cartesian, shape
return cartesian
def _same_domain(fd: Union[Basis, FData], fd2: Union[Basis, FData]) -> bool:
return np.array_equal(fd.domain_range, fd2.domain_range)
@overload
def _reshape_eval_points(
eval_points: ArrayLike,
*,
aligned: Literal[True],
n_samples: int,
dim_domain: int,
) -> np.ndarray:
pass
@overload
def _reshape_eval_points(
eval_points: Sequence[ArrayLike],
*,
aligned: Literal[True],
n_samples: int,
dim_domain: int,
) -> np.ndarray:
pass
@overload
def _reshape_eval_points(
eval_points: Union[ArrayLike, Sequence[ArrayLike]],
*,
aligned: bool,
n_samples: int,
dim_domain: int,
) -> np.ndarray:
pass
def _reshape_eval_points(
eval_points: Union[ArrayLike, Iterable[ArrayLike]],
*,
aligned: bool,
n_samples: int,
dim_domain: int,
) -> np.ndarray:
if aligned:
eval_points = np.asarray(eval_points)
else:
eval_points = cast(Iterable[ArrayLike], eval_points)
eval_points = _to_array_maybe_ragged(
eval_points,
row_shape=(-1, dim_domain),
)
if aligned and (
eval_points.shape == (dim_domain,)
or (eval_points.ndim == 0 and dim_domain == 1)
):
eval_points = np.array([eval_points])
if aligned:
eval_points = eval_points.reshape(
(eval_points.shape[0], dim_domain),
)
else:
if eval_points.shape[0] != n_samples:
raise ValueError(
f"eval_points should be a list "
f"of length {n_samples} with the "
f"evaluation points for each sample.",
)
return eval_points
def _one_grid_to_points(
axes: GridPointsLike,
*,
dim_domain: int,
) -> Tuple[np.ndarray, Tuple[int, ...]]:
axes = _to_grid_points(axes)
if len(axes) != dim_domain:
raise ValueError(
f"Length of axes should be {dim_domain}",
)
cartesian, shape = _cartesian_product(axes, return_shape=True)
shape = shape[:-1]
return cartesian, shape
class EvaluateMethod(Protocol):
def __call__(
self,
__eval_points: np.ndarray,
extrapolation: Optional[ExtrapolationLike],
aligned: bool,
) -> np.ndarray:
pass
@overload
def _evaluate_grid(
axes: GridPointsLike,
*,
evaluate_method: EvaluateMethod,
n_samples: int,
dim_domain: int,
dim_codomain: int,
extrapolation: Optional[ExtrapolationLike] = None,
aligned: Literal[True] = True,
) -> np.ndarray:
pass
@overload
def _evaluate_grid(
axes: Iterable[GridPointsLike],
*,
evaluate_method: EvaluateMethod,
n_samples: int,
dim_domain: int,
dim_codomain: int,
extrapolation: Optional[ExtrapolationLike] = None,
aligned: Literal[False],
) -> np.ndarray:
pass
def _evaluate_grid(
axes: Union[GridPointsLike, Iterable[GridPointsLike]],
*,
evaluate_method: EvaluateMethod,
n_samples: int,
dim_domain: int,
dim_codomain: int,
extrapolation: Optional[ExtrapolationLike] = None,
aligned: bool = True,
) -> np.ndarray:
if aligned:
axes = cast(GridPointsLike, axes)
eval_points, shape = _one_grid_to_points(axes, dim_domain=dim_domain)
else:
axes_per_sample = cast(Iterable[GridPointsLike], axes)
axes_per_sample = list(axes_per_sample)
eval_points_tuple, shape_tuple = zip(
*[
_one_grid_to_points(a, dim_domain=dim_domain)
for a in axes_per_sample
],
)
if len(eval_points_tuple) != n_samples:
raise ValueError(
"Should be provided a list of axis per sample",
)
eval_points = _to_array_maybe_ragged(eval_points_tuple)
evaluated = evaluate_method(
eval_points,
extrapolation=extrapolation,
aligned=aligned,
)
if aligned:
res = evaluated.reshape(
[n_samples] + list(shape) + [dim_codomain],
)
else:
res = _to_array_maybe_ragged([
r.reshape(list(s) + [dim_codomain])
for r, s in zip(evaluated, shape_tuple)
])
return res
def nquad_vec(
func: Callable[[np.ndarray], np.ndarray],
ranges: Sequence[Tuple[float, float]],
) -> np.ndarray:
initial_depth = len(ranges) - 1
def integrate(*args: Any, depth: int) -> np.ndarray:
if depth == 0:
f = functools.partial(func, *args)
else:
f = functools.partial(integrate, *args, depth=depth - 1)
return scipy.integrate.quad_vec(f, *ranges[initial_depth - depth])[0]
return integrate(depth=initial_depth)
def _map_in_batches(
function: Callable[..., np.ndarray],
arguments: Tuple[Union[FData, np.ndarray], ...],
indexes: Tuple[np.ndarray, ...],
memory_per_batch: Optional[int] = None,
**kwargs: Any,
) -> np.ndarray:
if memory_per_batch is None:
memory_per_batch = 256 * 1024 * 1024
memory_per_element = sum(a.nbytes // len(a) for a in arguments)
n_elements_per_batch_allowed = memory_per_batch // memory_per_element
if n_elements_per_batch_allowed < 1:
raise ValueError("Too few memory allowed for the operation")
n_indexes = len(indexes[0])
assert all(n_indexes == len(i) for i in indexes)
batches: List[np.ndarray] = []
for pos in range(0, n_indexes, n_elements_per_batch_allowed):
batch_args = tuple(
a[i[pos:pos + n_elements_per_batch_allowed]]
for a, i in zip(arguments, indexes)
)
batches.append(function(*batch_args, **kwargs))
return np.concatenate(batches, axis=0)
def _pairwise_symmetric(
function: Callable[..., np.ndarray],
arg1: Union[FData, np.ndarray],
arg2: Optional[Union[FData, np.ndarray]] = None,
memory_per_batch: Optional[int] = None,
**kwargs: Any,
) -> np.ndarray:
dim1 = len(arg1)
if arg2 is None or arg2 is arg1:
indices = np.triu_indices(dim1)
matrix = np.empty((dim1, dim1))
triang_vec = _map_in_batches(
function,
(arg1, arg1),
indices,
memory_per_batch=memory_per_batch,
**kwargs,
)
matrix[indices] = triang_vec
matrix[(indices[1], indices[0])] = triang_vec
return matrix
dim2 = len(arg2)
indices = np.indices((dim1, dim2))
vec = _map_in_batches(
function,
(arg1, arg2),
(indices[0].ravel(), indices[1].ravel()),
memory_per_batch=memory_per_batch,
**kwargs,
)
return vec.reshape((dim1, dim2))
def _int_to_real(array: np.ndarray) -> np.ndarray:
return array + 0.0
def _check_array_key(array: np.ndarray, key: Any) -> Any:
key = check_array_indexer(array, key)
if isinstance(key, tuple):
non_ellipsis = [i for i in key if i is not Ellipsis]
if len(non_ellipsis) > 1:
raise KeyError(key)
key = non_ellipsis[0]
if isinstance(key, numbers.Integral):
key = int(key)
key = range(len(array))[key]
return slice(key, key + 1)
return key
def _check_estimator(estimator):
from sklearn.utils.estimator_checks import (
check_get_params_invariance,
check_set_params,
)
name = estimator.__name__
instance = estimator()
check_get_params_invariance(name, instance)
check_set_params(name, instance)
def _classifier_get_classes(y: ndarray) -> Tuple[ndarray, ndarray]:
check_classification_targets(y)
le = LabelEncoder()
y_ind = le.fit_transform(y)
classes = le.classes_
if classes.size < 2:
raise ValueError(
f'The number of classes has to be greater than'
f'one; got {classes.size} class',
)
return classes, y_ind
def _classifier_get_depth_methods(
classes: ndarray,
X: T,
y_ind: ndarray,
depth_methods: Sequence[Depth[T]],
) -> Sequence[Depth[T]]:
return [
clone(depth_method).fit(X[y_ind == cur_class])
for cur_class in range(classes.size)
for depth_method in depth_methods
]
def _classifier_fit_depth_methods(
X: T,
y: ndarray,
depth_methods: Sequence[Depth[T]],
) -> Tuple[ndarray, Sequence[Depth[T]]]:
classes, y_ind = _classifier_get_classes(y)
class_depth_methods_ = _classifier_get_depth_methods(
classes, X, y_ind, depth_methods,
)
return classes, class_depth_methods_
_DependenceMeasure = Callable[[np.ndarray, np.ndarray], np.ndarray]
def _compute_dependence(
X: np.ndarray,
y: np.ndarray,
*,
dependence_measure: _DependenceMeasure,
) -> np.ndarray:
from dcor import rowwise
X = np.moveaxis(X, 0, -2)
input_shape = X.shape[:-2]
X = X.reshape(-1, X.shape[-2], X.shape[-1])
if y.ndim == 1:
y = np.atleast_2d(y).T
Y = np.array([y] * len(X))
dependence_results = rowwise(dependence_measure, X, Y)
return dependence_results.reshape(input_shape)
| true
| true
|
f718a596d8438f0be366ac1bbc612312c7461493
| 671
|
py
|
Python
|
tests/test_windows.py
|
tombackstrom/mdct
|
f59e708f9a7f65ee672dbf44e6f164e79c82d83a
|
[
"MIT"
] | 40
|
2016-11-16T14:45:36.000Z
|
2021-12-02T20:56:07.000Z
|
tests/test_windows.py
|
tombackstrom/mdct
|
f59e708f9a7f65ee672dbf44e6f164e79c82d83a
|
[
"MIT"
] | 3
|
2017-06-17T11:48:30.000Z
|
2021-06-28T04:47:00.000Z
|
tests/test_windows.py
|
tombackstrom/mdct
|
f59e708f9a7f65ee672dbf44e6f164e79c82d83a
|
[
"MIT"
] | 9
|
2016-10-01T20:20:40.000Z
|
2021-12-09T08:56:31.000Z
|
import pytest
import numpy
import mdct.windows
def test_kbd():
M = 100
w = mdct.windows.kaiser_derived(M, beta=4.)
assert numpy.allclose(w[:M//2] ** 2 + w[-M//2:] ** 2, 1.)
with pytest.raises(ValueError):
mdct.windows.kaiser_derived(M + 1, beta=4.)
assert numpy.allclose(
mdct.windows.kaiser_derived(2, beta=numpy.pi/2)[:1],
[numpy.sqrt(2)/2])
assert numpy.allclose(
mdct.windows.kaiser_derived(4, beta=numpy.pi/2)[:2],
[0.518562710536, 0.855039598640])
assert numpy.allclose(
mdct.windows.kaiser_derived(6, beta=numpy.pi/2)[:3],
[0.436168993154, 0.707106781187, 0.899864772847])
| 25.807692
| 61
| 0.630402
|
import pytest
import numpy
import mdct.windows
def test_kbd():
M = 100
w = mdct.windows.kaiser_derived(M, beta=4.)
assert numpy.allclose(w[:M//2] ** 2 + w[-M//2:] ** 2, 1.)
with pytest.raises(ValueError):
mdct.windows.kaiser_derived(M + 1, beta=4.)
assert numpy.allclose(
mdct.windows.kaiser_derived(2, beta=numpy.pi/2)[:1],
[numpy.sqrt(2)/2])
assert numpy.allclose(
mdct.windows.kaiser_derived(4, beta=numpy.pi/2)[:2],
[0.518562710536, 0.855039598640])
assert numpy.allclose(
mdct.windows.kaiser_derived(6, beta=numpy.pi/2)[:3],
[0.436168993154, 0.707106781187, 0.899864772847])
| true
| true
|
f718a61c448b93dcd1b999fa459d8e7cef048f93
| 22,011
|
py
|
Python
|
trainer.py
|
dpetrini/nova
|
00b7637901420f68c7d805c13ccd4c39d514efb1
|
[
"MIT"
] | 1
|
2020-10-19T23:49:00.000Z
|
2020-10-19T23:49:00.000Z
|
trainer.py
|
dpetrini/nova
|
00b7637901420f68c7d805c13ccd4c39d514efb1
|
[
"MIT"
] | null | null | null |
trainer.py
|
dpetrini/nova
|
00b7637901420f68c7d805c13ccd4c39d514efb1
|
[
"MIT"
] | null | null | null |
from matplotlib.pyplot import show
import torch
from torch.autograd import Variable
from torch.cuda.amp import GradScaler, autocast
import numpy as np
from sklearn.metrics import roc_auc_score
from callbacks.cb_handler import CallbackHandler
from callbacks.cb_base import BaseCB
from callbacks.cb_lr_patch_clf import LR_SchedCB_patch
from callbacks.cb_lr_full_clf import LR_SchedCB_full
from callbacks.cb_lr_2views_clf import LR_SchedCB_2views
from callbacks.cb_lr_w_cyc_cos import LR_SchedCB_W_Cyc_Cos
from callbacks.cb_lr_w_cos import LR_SchedCB_W_Cos
from callbacks.cb_auc import AUC_CB
# from parallel import DataParallelModel, DataParallelCriterion
from util.util import show_auc, calc_auc_desv
parallel = False
#APAGAR
import cv2
# Accuracy
def acc(y_hat, labels):
""" Default accuracy """
# para parallel
if len(y_hat) > 1 and parallel:
y_hat = torch.cat(y_hat)
return (torch.argmax(y_hat, dim=1) == labels).float().sum()
class Trainer():
"""
Many possible configurations for Trainer
config = {
'num_epochs': NUM_EPOCHS,
'batch_size': MINI_BATCH,
'name': 'example',
'title': 'Cats & Dogs Classifier',
'save_last': True, # optional: Save last model (default=False)
'save_best': True, # optional: Save best models (ACC, {AUC}) (default=True)
'stable_metric: N # optional: extend epochs number to wait N epochs with no metric change (ex.AUC)
'save_checkpoints': N, # Save checkpoint each N epochs
'features': ['auc'], # optional: features like auc stats or some scheduler (if none default:optim)
'save_path': folder, # if want to save artifacts in other place (eg.cloud)
'show_plots': False, # if want to show plots
'make_plots': False, # if want to disable plots
'cv_k': (number), # interactio number if using Cross Validation
}
"""
def __init__(self, model, train_dataloader, val_dataloader,
loss_criterion, optimizer, optimizer_args,
device, config):
self.model = model
self.device = device
self.loss_criterion = loss_criterion
# parts of config are only retrieved in callbacks
self.epochs = int(config['num_epochs']) if 'num_epochs' in config else 10
self.mini_batch = int(config['batch_size']) if 'batch_size' in config else 1
self.first_epoch = int(config['start_epoch']) if 'start_epoch' in config else 1
self.stable_metric = int(config['stable_metric']) if 'stable_metric' in config else False
self.name = config['name'] if 'name' in config else 'default'
self.title = config['title'] if 'title' in config else 'Classifier'
self.features = config['features'] if 'features' in config else []
self.make_plots = config['make_plots'] if 'make_plots' in config else True
if train_dataloader:
self.train_dataloader = train_dataloader
else:
return
self.train_dataloader = train_dataloader
self.val_dataloader = val_dataloader
self.optimizer = optimizer
self.optimizer_args = optimizer_args
print(self.title)
# Load Callbacks for this session
callbacks = [BaseCB(self.name, self.title, config)]
for feat in self.features:
if feat == 'auc':
callbacks.append(AUC_CB(self.name, config))
if feat == 'lr_step_full':
callbacks.append(LR_SchedCB_full())
if feat == 'lr_step_patch':
callbacks.append(LR_SchedCB_patch())
if feat == 'lr_step_2views':
callbacks.append(LR_SchedCB_2views())
if feat == 'lr_warmup_cos':
callbacks.append(LR_SchedCB_W_Cos())
if feat == 'lr_warmup_cyc_cos':
callbacks.append(LR_SchedCB_W_Cyc_Cos())
if feat == 'LR_SchedCB_W_Cos':
callbacks.append(LR_SchedCB_W_Cos())
self.cb = CallbackHandler(callbacks)
def train_and_validate(self, **kwargs):
"""
Main train and validate function that runs main loop (fit).
Receives all parameters and feed callback system.
Loop through epochs and executes pytorch forward, loss,
backpropagation and optimization (grads calc).
Returns the model trained.
"""
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
input_dict = kwargs.get('input_dict') if kwargs.get('input_dict') else []
if not self.cb.begin_train_val(self.epochs, self.model, self.train_dataloader,
self.val_dataloader, self.mini_batch, self.optimizer):
return
self.cb.update_loss(self.loss_criterion, calc_acc)
device = self.device
for epoch in range(self.first_epoch, self.epochs+1):
self.model.train()
train_loss, train_acc = 0.0, 0.0
val_loss, val_acc = 0.0, 0.0
if not self.cb.begin_epoch(epoch): return # noqa: E701
optim = self.cb.update_LR(epoch, self.model, self.optimizer, self.optimizer_args)
if optim: self.optimizer = optim
# Train loop
for _, (inputs, labels) in enumerate(self.train_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
# inserting MIXUP handling
res = self.cb.begin_batch(inputs, labels)
if res: inputs, labels, self.loss_criterion, calc_acc = res
self.optimizer.zero_grad() # clean existing gradients
outputs = self.model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
if parallel:
loss = loss.mean() # list in this case
loss.backward() # backprop the gradients
self.optimizer.step() # update parameters
train_loss += loss.item() * labels.size(0) # inputs.size(0) == mini_batch size
train_acc += calc_acc(outputs, labels).item()
self.cb.after_step(labels.size(0), labels, outputs)
# validation - no gradient tracking needed
with torch.no_grad():
self.model.eval()
self.cb.begin_val()
# validation loop
for _, (inputs, labels) in enumerate(self.val_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = self.model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
if parallel:
loss = loss.mean()
val_loss += loss.item() * labels.size(0) # inputs.size(0) == mini_batch size
val_acc += calc_acc(outputs, labels).item()
self.cb.after_step_val(labels.size(0), labels, outputs)
self.cb.after_epoch(self.model, train_acc, train_loss, val_acc, val_loss)
self.cb.after_train_val()
return self.model
def train_and_validate_amp(self, **kwargs):
"""
Mixed precision (automatic) version for train_and_validate.
Uses FP16 and FP32 in main loop with pytorch Automatic Mixed Precision.
In simple tests: use 75% of memory in 66% of time. Less memory and faster.
Sometimes it just don't work and get worse, like for resnest...
"""
assert torch.__version__ >= '1.6.0', "[Mixed precision] Please use PyTorch 1.6.0+"
print('Using AMP')
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
input_dict = kwargs.get('input_dict') if kwargs.get('input_dict') else []
if not self.cb.begin_train_val(self.epochs, self.model, self.train_dataloader,
self.val_dataloader, self.mini_batch, self.optimizer):
return
# Creates a GradScaler once at the beginning of training.
scaler = GradScaler()
device = self.device
# for epoch in range(self.first_epoch, self.epochs+1):
epoch = self.first_epoch # suport for "wait N epochs after best metric"
last_epoch = self.epochs
while epoch <= last_epoch:
self.model.train()
train_loss, train_acc = 0.0, 0.0
val_loss, val_acc = 0.0, 0.0
if not self.cb.begin_epoch(epoch): return # noqa: E701
optim = self.cb.update_LR(epoch, self.model, self.optimizer, self.optimizer_args)
if optim: self.optimizer = optim
# Train loop
for _, (inputs, labels) in enumerate(self.train_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
self.optimizer.zero_grad() # clean existing gradients
# Runs the forward pass with autocasting.
with autocast():
outputs = self.model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
if parallel:
loss = loss.mean() # list in this case
scaler.scale(loss).backward() # backward() on scaled loss for scaled gradients.
scaler.step(self.optimizer) # update parameters
scaler.update() # Updates the scale for next iteration.
train_loss += loss.item() * labels.size(0) # == mini_batch size
train_acc += calc_acc(outputs, labels).item()
self.cb.after_step(labels.size(0), labels, outputs)
# validation - no gradient tracking needed
with torch.no_grad():
self.model.eval()
# validation loop
for _, (inputs, labels) in enumerate(self.val_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = self.model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
if parallel:
loss = loss.mean()
val_loss += loss.item() * labels.size(0) # == mini_batch size
val_acc += calc_acc(outputs, labels).item()
self.cb.after_step_val(labels.size(0), labels, outputs)
self.cb.after_epoch(self.model, train_acc, train_loss, val_acc, val_loss)
epoch += 1
# print('-', self.cb.best_metric_epoch[self.cb.metric_name[-1]], last_epoch)
# Is use stable metric - will stop training earlier, after
# stable_metric epochs without validation metric (to be selected) improve
# last_epoch = self.epochs if not self.stable_metric else max(self.epochs, self.cb.best_metric_epoch[self.cb.metric_name[-1]] + self.stable_metric)
# for metric in self.cb.metric_name:
# print(metric)
last_epoch = self.epochs if not self.stable_metric else min(self.epochs, self.cb.best_metric_epoch[self.cb.metric_name[-1]] + self.stable_metric)
self.cb.after_train_val()
values = [self.cb.best_metric, self.cb.best_metric_epoch, self.cb.elapsed_mins,
self.cb.metric_name, self.cb.loss_plot, self.cb.metric_plot,
self.cb.best_model_file]
return values
def run_test(self, test_dataloader, model_type, **kwargs):
""" Run test from test_dataloader according to model_type.
if model_type = 'normal' : use last saved model
if model_type = 'best' : use best model
Uses: loss function from Trainer
Input: test_dataloader
"""
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
quiet = kwargs.get('quiet') if kwargs.get('quiet') else False
if model_type == 'normal':
model = self.cb.last_model
elif model_type == 'best':
model = self.cb.best_model
elif model_type == 'bootstrap':
model = self.model
test_acc, test_loss = 0., 0.
batch_val_counter = 0
device = self.device
with torch.no_grad():
model.eval()
# validation loop
for _, (inputs, labels) in enumerate(test_dataloader):
if isinstance(inputs, dict):
for key in ['CC', 'MLO']:
inputs[key] = inputs[key].to(device)
labels = Variable(labels.to(device))
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
if parallel:
loss = loss.mean()
test_loss += loss.item() * labels.size(0)
test_acc += calc_acc(outputs, labels).item()
batch_val_counter += labels.size(0)
# Find average test loss and test accuracy
avg_test_loss = test_loss/batch_val_counter
avg_test_acc = test_acc/batch_val_counter
if not quiet:
print(f'Model: {model_type} - Test accuracy : {avg_test_acc:.5f}' +
f' Test loss : {avg_test_loss:.5f}')
return avg_test_acc
def run_test_auc(self, test_dataloader, model_type, **kwargs):
""" Run test from test_dataloader, calculating AUC and ROC curve
According to model_type:
if model_type = 'normal' : use last saved model
if model_type = 'best' : use best model
If we are running test iunference only can pass model through kwargs.
Uses: loss function from Trainer
Input: test_dataloader
"""
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
model = kwargs.get('model') if kwargs.get('model') else None
show_results = kwargs.get('show_results') if kwargs.get('show_results') else False
m_positive = kwargs.get('m') if kwargs.get('m') else False
n_negative = kwargs.get('n') if kwargs.get('n') else False
if model is None:
if model_type == 'normal':
model = self.cb.last_model
elif model_type == 'best':
model = self.cb.best_model
elif model_type == 'test':
model = self.model
elif model_type == 'bootstrap':
model = self.model
test_acc, test_loss = 0., 0.
batch_val_counter = 0
y_hat_auc, label_auc = [], []
device = self.device
with torch.no_grad():
model.eval()
# validation loop
for _, (inputs, labels) in enumerate(test_dataloader):
if isinstance(inputs, dict):
for key in ['CC', 'MLO']:
inputs[key] = inputs[key].to(device)
labels = Variable(labels.to(device))
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
test_loss += loss.item() * labels.size(0)
# calculate acc
test_acc += calc_acc(outputs, labels).item()
batch_val_counter += labels.size(0)
# Store auc for malignant
label_auc = np.append(label_auc, labels.cpu().detach().numpy())
y_hat_auc = np.append(y_hat_auc, torch.softmax(outputs, dim=1)[:, 1].cpu().detach().numpy())
# enter show result mode
if self.mini_batch == 1 and show_results:
print(f'{labels.item()} {torch.softmax(outputs, dim=1)[:, 1].item():.3f}')
# Find average test loss and test accuracy
avg_test_loss = test_loss/batch_val_counter
avg_test_acc = test_acc/batch_val_counter
print(f"Model: {model_type} - Test accuracy : {avg_test_acc:.3f}" +
f" Test loss : {avg_test_loss:.4f}", end='')
# calculate AUC TEST
auc_mal_val = roc_auc_score(label_auc.ravel(), y_hat_auc.ravel())
# print(f' AUC Malignant: {auc_mal_val:.4f}', end='')
if m_positive and n_negative:
auc_final = f'{auc_mal_val:.4f}±{calc_auc_desv(m_positive, n_negative, auc_mal_val):.4f}'
# print(f'±{calc_auc_desv(m_positive, n_negative, auc_mal_val):.4f}')
print(f' AUC Malignant: {auc_final}')
else:
auc_final = f'{auc_mal_val:.4f}'
print(f' AUC Malignant: {auc_final}')
# print()
if self.make_plots:
show_auc(label_auc, y_hat_auc, self.title, show_plt=False)
# return auc_mal_val
return auc_final
# Not fully tested yet (2021-05)
# it seems to be working - maybe integrate in single function as above
# and use kwargs to indicate that it is test-data- aug?
def run_test_data_aug_auc(self, test_dataloader, model_type, **kwargs):
""" Run test from test_dataloader, calculating AUC and ROC curve
--> Using test-data augmentation: rotation 0°, 90°, 180°, 270°
--> All rotated sample will be infered and AUC will consider all.
According to model_type:
if model_type = 'normal' : use last saved model
if model_type = 'best' : use best model
If we are running test iunference only can pass model through kwargs.
Uses: loss function from Trainer
Input: test_dataloader
"""
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
model = kwargs.get('model') if kwargs.get('model') else None
if model is None:
if model_type == 'normal':
model = self.cb.last_model
elif model_type == 'best':
model = self.cb.best_model
elif model_type == 'test':
model = self.model
test_acc, test_loss = 0., 0.
batch_val_counter = 0
y_hat_auc, label_auc = [], []
device = self.device
with torch.no_grad():
model.eval()
# validation loop
for _, (inputs, labels) in enumerate(test_dataloader):
for rot in range(0,4):
# print(rot, inputs.shape)
inputs = torch.rot90(inputs, rot, [2, 3])
# inputs = Variable(inputs.to(device))
# labels = Variable(labels.to(device))
# print(counter, rot, inputs.shape)
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
# img = inputs.cpu().detach().numpy()
# img = img.transpose(0,2,3,1)
# print(img[0, :, :, 0:3].shape)
# cv2.imwrite('thrash/test-aug_'+str(rot)+'.png', img[0, :, :, 0:3]*65535)
outputs = model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
test_loss += loss.item() * labels.size(0)
# calculate acc
test_acc += calc_acc(outputs, labels).item()
batch_val_counter += labels.size(0)
# Store auc for malignant
label_auc = np.append(label_auc, labels.cpu().detach().numpy())
y_hat_auc = np.append(y_hat_auc, torch.softmax(outputs, dim=1)[:, 1].cpu().detach().numpy())
# enter show result mode
if self.mini_batch == 1:
print(f'{labels.item()} {torch.softmax(outputs, dim=1)[:, 1].item():.3f}')
print('batch_val_counter ', batch_val_counter)
# Find average test loss and test accuracy
avg_test_loss = test_loss/batch_val_counter
avg_test_acc = test_acc/batch_val_counter
print(f"Model: {model_type} - Test accuracy : {avg_test_acc:.3f}" +
f" Test loss : {avg_test_loss:.4f}", end='')
# calculate AUC TEST
auc_mal_val = roc_auc_score(label_auc.ravel(), y_hat_auc.ravel())
print(f' AUC Malignant: {auc_mal_val:.4f}')
if self.make_plots:
show_auc(label_auc, y_hat_auc, self.title, show_plt=False)
return auc_mal_val
| 42.005725
| 159
| 0.564445
|
from matplotlib.pyplot import show
import torch
from torch.autograd import Variable
from torch.cuda.amp import GradScaler, autocast
import numpy as np
from sklearn.metrics import roc_auc_score
from callbacks.cb_handler import CallbackHandler
from callbacks.cb_base import BaseCB
from callbacks.cb_lr_patch_clf import LR_SchedCB_patch
from callbacks.cb_lr_full_clf import LR_SchedCB_full
from callbacks.cb_lr_2views_clf import LR_SchedCB_2views
from callbacks.cb_lr_w_cyc_cos import LR_SchedCB_W_Cyc_Cos
from callbacks.cb_lr_w_cos import LR_SchedCB_W_Cos
from callbacks.cb_auc import AUC_CB
from util.util import show_auc, calc_auc_desv
parallel = False
import cv2
def acc(y_hat, labels):
if len(y_hat) > 1 and parallel:
y_hat = torch.cat(y_hat)
return (torch.argmax(y_hat, dim=1) == labels).float().sum()
class Trainer():
def __init__(self, model, train_dataloader, val_dataloader,
loss_criterion, optimizer, optimizer_args,
device, config):
self.model = model
self.device = device
self.loss_criterion = loss_criterion
self.epochs = int(config['num_epochs']) if 'num_epochs' in config else 10
self.mini_batch = int(config['batch_size']) if 'batch_size' in config else 1
self.first_epoch = int(config['start_epoch']) if 'start_epoch' in config else 1
self.stable_metric = int(config['stable_metric']) if 'stable_metric' in config else False
self.name = config['name'] if 'name' in config else 'default'
self.title = config['title'] if 'title' in config else 'Classifier'
self.features = config['features'] if 'features' in config else []
self.make_plots = config['make_plots'] if 'make_plots' in config else True
if train_dataloader:
self.train_dataloader = train_dataloader
else:
return
self.train_dataloader = train_dataloader
self.val_dataloader = val_dataloader
self.optimizer = optimizer
self.optimizer_args = optimizer_args
print(self.title)
callbacks = [BaseCB(self.name, self.title, config)]
for feat in self.features:
if feat == 'auc':
callbacks.append(AUC_CB(self.name, config))
if feat == 'lr_step_full':
callbacks.append(LR_SchedCB_full())
if feat == 'lr_step_patch':
callbacks.append(LR_SchedCB_patch())
if feat == 'lr_step_2views':
callbacks.append(LR_SchedCB_2views())
if feat == 'lr_warmup_cos':
callbacks.append(LR_SchedCB_W_Cos())
if feat == 'lr_warmup_cyc_cos':
callbacks.append(LR_SchedCB_W_Cyc_Cos())
if feat == 'LR_SchedCB_W_Cos':
callbacks.append(LR_SchedCB_W_Cos())
self.cb = CallbackHandler(callbacks)
def train_and_validate(self, **kwargs):
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
input_dict = kwargs.get('input_dict') if kwargs.get('input_dict') else []
if not self.cb.begin_train_val(self.epochs, self.model, self.train_dataloader,
self.val_dataloader, self.mini_batch, self.optimizer):
return
self.cb.update_loss(self.loss_criterion, calc_acc)
device = self.device
for epoch in range(self.first_epoch, self.epochs+1):
self.model.train()
train_loss, train_acc = 0.0, 0.0
val_loss, val_acc = 0.0, 0.0
if not self.cb.begin_epoch(epoch): return
optim = self.cb.update_LR(epoch, self.model, self.optimizer, self.optimizer_args)
if optim: self.optimizer = optim
for _, (inputs, labels) in enumerate(self.train_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
res = self.cb.begin_batch(inputs, labels)
if res: inputs, labels, self.loss_criterion, calc_acc = res
self.optimizer.zero_grad()
outputs = self.model(inputs)
loss = self.loss_criterion(outputs, labels)
if parallel:
loss = loss.mean()
loss.backward()
self.optimizer.step()
train_loss += loss.item() * labels.size(0)
train_acc += calc_acc(outputs, labels).item()
self.cb.after_step(labels.size(0), labels, outputs)
with torch.no_grad():
self.model.eval()
self.cb.begin_val()
for _, (inputs, labels) in enumerate(self.val_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = self.model(inputs)
loss = self.loss_criterion(outputs, labels)
if parallel:
loss = loss.mean()
val_loss += loss.item() * labels.size(0)
val_acc += calc_acc(outputs, labels).item()
self.cb.after_step_val(labels.size(0), labels, outputs)
self.cb.after_epoch(self.model, train_acc, train_loss, val_acc, val_loss)
self.cb.after_train_val()
return self.model
def train_and_validate_amp(self, **kwargs):
assert torch.__version__ >= '1.6.0', "[Mixed precision] Please use PyTorch 1.6.0+"
print('Using AMP')
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
input_dict = kwargs.get('input_dict') if kwargs.get('input_dict') else []
if not self.cb.begin_train_val(self.epochs, self.model, self.train_dataloader,
self.val_dataloader, self.mini_batch, self.optimizer):
return
scaler = GradScaler()
device = self.device
epoch = self.first_epoch
last_epoch = self.epochs
while epoch <= last_epoch:
self.model.train()
train_loss, train_acc = 0.0, 0.0
val_loss, val_acc = 0.0, 0.0
if not self.cb.begin_epoch(epoch): return
optim = self.cb.update_LR(epoch, self.model, self.optimizer, self.optimizer_args)
if optim: self.optimizer = optim
for _, (inputs, labels) in enumerate(self.train_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
self.optimizer.zero_grad()
with autocast():
outputs = self.model(inputs)
loss = self.loss_criterion(outputs, labels)
if parallel:
loss = loss.mean()
scaler.scale(loss).backward()
scaler.step(self.optimizer)
scaler.update()
train_loss += loss.item() * labels.size(0)
train_acc += calc_acc(outputs, labels).item()
self.cb.after_step(labels.size(0), labels, outputs)
with torch.no_grad():
self.model.eval()
for _, (inputs, labels) in enumerate(self.val_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = self.model(inputs)
loss = self.loss_criterion(outputs, labels)
if parallel:
loss = loss.mean()
val_loss += loss.item() * labels.size(0)
val_acc += calc_acc(outputs, labels).item()
self.cb.after_step_val(labels.size(0), labels, outputs)
self.cb.after_epoch(self.model, train_acc, train_loss, val_acc, val_loss)
epoch += 1
last_epoch = self.epochs if not self.stable_metric else min(self.epochs, self.cb.best_metric_epoch[self.cb.metric_name[-1]] + self.stable_metric)
self.cb.after_train_val()
values = [self.cb.best_metric, self.cb.best_metric_epoch, self.cb.elapsed_mins,
self.cb.metric_name, self.cb.loss_plot, self.cb.metric_plot,
self.cb.best_model_file]
return values
def run_test(self, test_dataloader, model_type, **kwargs):
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
quiet = kwargs.get('quiet') if kwargs.get('quiet') else False
if model_type == 'normal':
model = self.cb.last_model
elif model_type == 'best':
model = self.cb.best_model
elif model_type == 'bootstrap':
model = self.model
test_acc, test_loss = 0., 0.
batch_val_counter = 0
device = self.device
with torch.no_grad():
model.eval()
for _, (inputs, labels) in enumerate(test_dataloader):
if isinstance(inputs, dict):
for key in ['CC', 'MLO']:
inputs[key] = inputs[key].to(device)
labels = Variable(labels.to(device))
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = model(inputs)
loss = self.loss_criterion(outputs, labels)
if parallel:
loss = loss.mean()
test_loss += loss.item() * labels.size(0)
test_acc += calc_acc(outputs, labels).item()
batch_val_counter += labels.size(0)
avg_test_loss = test_loss/batch_val_counter
avg_test_acc = test_acc/batch_val_counter
if not quiet:
print(f'Model: {model_type} - Test accuracy : {avg_test_acc:.5f}' +
f' Test loss : {avg_test_loss:.5f}')
return avg_test_acc
def run_test_auc(self, test_dataloader, model_type, **kwargs):
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
model = kwargs.get('model') if kwargs.get('model') else None
show_results = kwargs.get('show_results') if kwargs.get('show_results') else False
m_positive = kwargs.get('m') if kwargs.get('m') else False
n_negative = kwargs.get('n') if kwargs.get('n') else False
if model is None:
if model_type == 'normal':
model = self.cb.last_model
elif model_type == 'best':
model = self.cb.best_model
elif model_type == 'test':
model = self.model
elif model_type == 'bootstrap':
model = self.model
test_acc, test_loss = 0., 0.
batch_val_counter = 0
y_hat_auc, label_auc = [], []
device = self.device
with torch.no_grad():
model.eval()
for _, (inputs, labels) in enumerate(test_dataloader):
if isinstance(inputs, dict):
for key in ['CC', 'MLO']:
inputs[key] = inputs[key].to(device)
labels = Variable(labels.to(device))
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = model(inputs)
loss = self.loss_criterion(outputs, labels)
test_loss += loss.item() * labels.size(0)
test_acc += calc_acc(outputs, labels).item()
batch_val_counter += labels.size(0)
label_auc = np.append(label_auc, labels.cpu().detach().numpy())
y_hat_auc = np.append(y_hat_auc, torch.softmax(outputs, dim=1)[:, 1].cpu().detach().numpy())
if self.mini_batch == 1 and show_results:
print(f'{labels.item()} {torch.softmax(outputs, dim=1)[:, 1].item():.3f}')
avg_test_loss = test_loss/batch_val_counter
avg_test_acc = test_acc/batch_val_counter
print(f"Model: {model_type} - Test accuracy : {avg_test_acc:.3f}" +
f" Test loss : {avg_test_loss:.4f}", end='')
auc_mal_val = roc_auc_score(label_auc.ravel(), y_hat_auc.ravel())
if m_positive and n_negative:
auc_final = f'{auc_mal_val:.4f}±{calc_auc_desv(m_positive, n_negative, auc_mal_val):.4f}'
print(f' AUC Malignant: {auc_final}')
else:
auc_final = f'{auc_mal_val:.4f}'
print(f' AUC Malignant: {auc_final}')
if self.make_plots:
show_auc(label_auc, y_hat_auc, self.title, show_plt=False)
return auc_final
def run_test_data_aug_auc(self, test_dataloader, model_type, **kwargs):
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
model = kwargs.get('model') if kwargs.get('model') else None
if model is None:
if model_type == 'normal':
model = self.cb.last_model
elif model_type == 'best':
model = self.cb.best_model
elif model_type == 'test':
model = self.model
test_acc, test_loss = 0., 0.
batch_val_counter = 0
y_hat_auc, label_auc = [], []
device = self.device
with torch.no_grad():
model.eval()
for _, (inputs, labels) in enumerate(test_dataloader):
for rot in range(0,4):
inputs = torch.rot90(inputs, rot, [2, 3])
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = model(inputs)
loss = self.loss_criterion(outputs, labels)
test_loss += loss.item() * labels.size(0)
test_acc += calc_acc(outputs, labels).item()
batch_val_counter += labels.size(0)
label_auc = np.append(label_auc, labels.cpu().detach().numpy())
y_hat_auc = np.append(y_hat_auc, torch.softmax(outputs, dim=1)[:, 1].cpu().detach().numpy())
if self.mini_batch == 1:
print(f'{labels.item()} {torch.softmax(outputs, dim=1)[:, 1].item():.3f}')
print('batch_val_counter ', batch_val_counter)
avg_test_loss = test_loss/batch_val_counter
avg_test_acc = test_acc/batch_val_counter
print(f"Model: {model_type} - Test accuracy : {avg_test_acc:.3f}" +
f" Test loss : {avg_test_loss:.4f}", end='')
auc_mal_val = roc_auc_score(label_auc.ravel(), y_hat_auc.ravel())
print(f' AUC Malignant: {auc_mal_val:.4f}')
if self.make_plots:
show_auc(label_auc, y_hat_auc, self.title, show_plt=False)
return auc_mal_val
| true
| true
|
f718a6e4efe0bc6650e570e12bb690e1b246fd8d
| 315
|
py
|
Python
|
data.py
|
thIYan-EsWar/Machine-Learning-Breast-Cancer-Prediction
|
349e6be13476dcfb602ab1e6f812bc464a7affc3
|
[
"Apache-2.0"
] | null | null | null |
data.py
|
thIYan-EsWar/Machine-Learning-Breast-Cancer-Prediction
|
349e6be13476dcfb602ab1e6f812bc464a7affc3
|
[
"Apache-2.0"
] | null | null | null |
data.py
|
thIYan-EsWar/Machine-Learning-Breast-Cancer-Prediction
|
349e6be13476dcfb602ab1e6f812bc464a7affc3
|
[
"Apache-2.0"
] | null | null | null |
from random import shuffle, sample
with open('data.txt', 'r') as f:
contents = f.readlines()
contents = sample(contents, len(contents))
with open('train_data.txt', 'w') as f:
[f.write(content) for content in contents[: 601]]
with open('test_data.txt', 'w') as f:
[f.write(content) for content in contents[601:]]
| 39.375
| 50
| 0.698413
|
from random import shuffle, sample
with open('data.txt', 'r') as f:
contents = f.readlines()
contents = sample(contents, len(contents))
with open('train_data.txt', 'w') as f:
[f.write(content) for content in contents[: 601]]
with open('test_data.txt', 'w') as f:
[f.write(content) for content in contents[601:]]
| true
| true
|
f718a8aa9f9b0c450e9a61914792a726a1d423d4
| 13,080
|
py
|
Python
|
tests/templates/test_subroutines/test_qmc.py
|
QDaria/pennylane
|
5a28983fc7bd950cde8a4014e54261fef4b54293
|
[
"Apache-2.0"
] | null | null | null |
tests/templates/test_subroutines/test_qmc.py
|
QDaria/pennylane
|
5a28983fc7bd950cde8a4014e54261fef4b54293
|
[
"Apache-2.0"
] | null | null | null |
tests/templates/test_subroutines/test_qmc.py
|
QDaria/pennylane
|
5a28983fc7bd950cde8a4014e54261fef4b54293
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018-2021 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
from scipy.stats import norm
import pennylane as qml
from pennylane.templates.subroutines.qmc import (
QuantumMonteCarlo,
_make_V,
_make_Z,
func_to_unitary,
make_Q,
probs_to_unitary,
)
from pennylane.wires import Wires
class TestProbsToUnitary:
"""Tests for the probs_to_unitary function"""
def test_invalid_distribution_sum_to_not_one(self):
"""Test if a ValueError is raised when a distribution that does not sum to one is input"""
p = np.ones(4)
with pytest.raises(ValueError, match="A valid probability distribution of non-negative"):
probs_to_unitary(p)
def test_invalid_distribution_negative(self):
"""Test if a ValueError is raised when a distribution with a negative value is input"""
p = [2, 0, 0, -1]
with pytest.raises(ValueError, match="A valid probability distribution of non-negative"):
probs_to_unitary(p)
ps = [
[0.46085261032920616, 0.5391473896707938],
[0.2111821738452515, 0.4235979103670337, 0.36521991578771484],
[0.3167916924190049, 0.2651843704361695, 0.1871934980886578, 0.23083043905616774],
[0.8123242419241959, 0.07990911578859018, 0.07983919018902215, 0.027927452098191852],
]
@pytest.mark.parametrize("p", ps)
def test_fixed_examples(self, p):
"""Test if the correct unitary is returned for fixed input examples. A correct unitary has
its first column equal to the square root of the distribution and satisfies
U @ U.T = U.T @ U = I."""
unitary = probs_to_unitary(p)
assert np.allclose(np.sqrt(p), unitary[:, 0])
assert np.allclose(unitary @ unitary.T, np.eye(len(unitary)))
assert np.allclose(unitary.T @ unitary, np.eye(len(unitary)))
class TestFuncToUnitary:
"""Tests for the func_to_unitary function"""
def test_not_bounded_func(self):
"""Test if a ValueError is raised if a function that evaluates outside of the [0, 1]
interval is provided"""
func = lambda i: np.sin(i)
with pytest.raises(ValueError, match="func must be bounded within the interval"):
func_to_unitary(func, 8)
def test_example(self):
"""Test for a fixed example if the returned unitary maps input states to the
expected output state as well as if the unitary satisfies U @ U.T = U.T @ U = I."""
M = 8
func = lambda i: np.sin(i) ** 2
r = func_to_unitary(func, M)
for i in range(M):
# The control qubit is the last qubit, so we have to look at every other term
# using [::2].
output_state = r[::2][i]
output_0 = output_state[::2]
output_1 = output_state[1::2]
assert np.allclose(output_0[i], np.sqrt(1 - func(i)))
assert np.allclose(output_1[i], np.sqrt(func(i)))
assert np.allclose(r @ r.T, np.eye(2 * M))
assert np.allclose(r.T @ r, np.eye(2 * M))
def test_example_with_pl(self):
"""Test for a fixed example if the returned unitary behaves as expected
when used within a PennyLane circuit, i.e., so that the probability of the final control
wire encodes the function."""
wires = 3
M = 2**wires
func = lambda i: np.sin(i) ** 2
r = func_to_unitary(func, M)
dev = qml.device("default.qubit", wires=(wires + 1))
@qml.qnode(dev)
def apply_r(input_state):
qml.QubitStateVector(input_state, wires=range(wires))
qml.QubitUnitary(r, wires=range(wires + 1))
return qml.probs(wires)
for i, state in enumerate(np.eye(M)):
p = apply_r(state)[1]
assert np.allclose(p, func(i))
def test_V():
"""Test for the _make_V function"""
dim = 4
V_expected = -np.eye(dim)
V_expected[1, 1] = V_expected[3, 3] = 1
V = _make_V(dim)
assert np.allclose(V, V_expected)
def test_Z():
"""Test for the _make_Z function"""
dim = 4
Z_expected = -np.eye(dim)
Z_expected[0, 0] = 1
Z = _make_Z(dim)
assert np.allclose(Z, Z_expected)
def test_Q():
"""Test for the make_Q function using a fixed example"""
A = np.array(
[
[0.85358423 - 0.32239299j, -0.12753659 + 0.38883306j],
[0.39148136 - 0.11915985j, 0.34064316 - 0.84646648j],
]
)
R = np.array(
[
[
0.45885289 + 0.03972856j,
0.2798685 - 0.05981098j,
0.64514642 - 0.51555038j,
0.11015177 - 0.10877695j,
],
[
0.19407005 - 0.35483005j,
0.29756077 + 0.80153453j,
-0.19147104 + 0.0507968j,
0.15553799 - 0.20493631j,
],
[
0.35083011 - 0.20807392j,
-0.27602911 - 0.13934692j,
0.11874165 + 0.34532609j,
-0.45945242 - 0.62734969j,
],
[
-0.11379919 - 0.66706921j,
-0.21120956 - 0.2165113j,
0.30133006 + 0.23367271j,
0.54593491 + 0.08446372j,
],
]
)
Q_expected = np.array(
[
[
-0.46513201 - 1.38777878e-17j,
-0.13035515 - 2.23341802e-01j,
-0.74047856 + 7.08652160e-02j,
-0.0990036 - 3.91977176e-01j,
],
[
0.13035515 - 2.23341802e-01j,
0.46494302 + 0.00000000e00j,
0.05507901 - 1.19182067e-01j,
-0.80370146 - 2.31904873e-01j,
],
[
-0.74047856 - 7.08652160e-02j,
-0.05507901 - 1.19182067e-01j,
0.62233412 - 2.77555756e-17j,
-0.0310774 - 2.02894077e-01j,
],
[
0.0990036 - 3.91977176e-01j,
-0.80370146 + 2.31904873e-01j,
0.0310774 - 2.02894077e-01j,
-0.30774091 + 2.77555756e-17j,
],
]
)
Q = make_Q(A, R)
assert np.allclose(Q, Q_expected)
class TestQuantumMonteCarlo:
"""Tests for the QuantumMonteCarlo template"""
@staticmethod
def func(i):
return np.sin(i) ** 2
def test_non_flat(self):
"""Test if a ValueError is raised when a non-flat array is input"""
p = np.ones((4, 1)) / 4
with pytest.raises(ValueError, match="The probability distribution must be specified as a"):
QuantumMonteCarlo(p, self.func, range(3), range(3, 5))
def test_wrong_size_p(self):
"""Test if a ValueError is raised when a probability distribution is passed whose length
cannot be mapped to qubits"""
p = np.ones(5) / 5
with pytest.raises(ValueError, match="The probability distribution must have a length"):
QuantumMonteCarlo(p, self.func, range(3), range(3, 5))
def test_unexpected_target_wires_number(self):
"""Test if a ValueError is raised when the number of target wires is incompatible with the
expected number of target wires inferred from the length of the input probability
distribution"""
p = np.ones(4) / 4
with pytest.raises(
ValueError,
match="The probability distribution of dimension 4 requires" " 3 target wires",
):
QuantumMonteCarlo(p, self.func, range(4), range(4, 6))
def test_expected_circuit(self):
"""Test if the circuit applied when using the QMC template is the same as the expected
circuit for a fixed example"""
p = np.ones(4) / 4
target_wires, estimation_wires = Wires(range(3)), Wires(range(3, 5))
op = QuantumMonteCarlo(p, self.func, target_wires, estimation_wires)
tape = op.expand()
# Do expansion in two steps to avoid also decomposing the first QubitUnitary
queue_before_qpe = tape.operations[:2]
# 2-qubit decomposition has 10 operations, and after is a 3-qubit gate so start at 11
queue_after_qpe = tape.expand().operations[11:]
A = probs_to_unitary(p)
R = func_to_unitary(self.func, 4)
assert len(queue_before_qpe) == 2
assert queue_before_qpe[0].name == "QubitUnitary"
assert queue_before_qpe[1].name == "QubitUnitary"
assert np.allclose(queue_before_qpe[0].matrix, A)
assert np.allclose(queue_before_qpe[1].matrix, R)
assert queue_before_qpe[0].wires == target_wires[:-1]
assert queue_before_qpe[1].wires == target_wires
Q = make_Q(A, R)
with qml.tape.QuantumTape() as qpe_tape:
qml.QuantumPhaseEstimation(Q, target_wires, estimation_wires)
qpe_tape = qpe_tape.expand()
assert len(queue_after_qpe) == len(qpe_tape.operations)
assert all(o1.name == o2.name for o1, o2 in zip(queue_after_qpe, qpe_tape.operations))
assert all(
np.allclose(o1.matrix, o2.matrix)
for o1, o2 in zip(queue_after_qpe, qpe_tape.operations)
)
assert all(o1.wires == o2.wires for o1, o2 in zip(queue_after_qpe, qpe_tape.operations))
def test_expected_value(self):
"""Test that the QuantumMonteCarlo template can correctly estimate the expectation value
following the example in the usage details"""
m = 5
M = 2**m
xmax = np.pi
xs = np.linspace(-xmax, xmax, M)
probs = np.array([norm().pdf(x) for x in xs])
probs /= np.sum(probs)
func = lambda i: np.cos(xs[i]) ** 2
estimates = []
for n in range(4, 11):
N = 2**n
target_wires = range(m + 1)
estimation_wires = range(m + 1, n + m + 1)
dev = qml.device("default.qubit", wires=(n + m + 1))
@qml.qnode(dev)
def circuit():
qml.QuantumMonteCarlo(
probs, func, target_wires=target_wires, estimation_wires=estimation_wires
)
return qml.probs(estimation_wires)
phase_estimated = np.argmax(circuit()[: int(N / 2)]) / N
mu_estimated = (1 - np.cos(np.pi * phase_estimated)) / 2
estimates.append(mu_estimated)
exact = 0.432332358381693654
# Check that the error is monotonically decreasing
for i in range(len(estimates) - 1):
err1 = np.abs(estimates[i] - exact)
err2 = np.abs(estimates[i + 1] - exact)
assert err1 >= err2
assert np.allclose(estimates[-1], exact, rtol=1e-3)
def test_expected_value_custom_wires(self):
"""Test that the QuantumMonteCarlo template can correctly estimate the expectation value
following the example in the usage details when the wires have custom labels"""
m = 5
M = 2**m
xmax = np.pi
xs = np.linspace(-xmax, xmax, M)
probs = np.array([norm().pdf(x) for x in xs])
probs /= np.sum(probs)
func = lambda i: np.cos(xs[i]) ** 2
n = 10
N = 2**n
target_wires = [0, "a", -1.1, -10, "bbb", 1000]
estimation_wires = ["bob", -3, 42, "penny", "lane", 247, "straw", "berry", 5.5, 6.6]
dev = qml.device("default.qubit", wires=target_wires + estimation_wires)
@qml.qnode(dev)
def circuit():
qml.QuantumMonteCarlo(
probs, func, target_wires=target_wires, estimation_wires=estimation_wires
)
return qml.probs(estimation_wires)
phase_estimated = np.argmax(circuit()[: int(N / 2)]) / N
mu_estimated = (1 - np.cos(np.pi * phase_estimated)) / 2
exact = 0.432332358381693654
assert np.allclose(mu_estimated, exact, rtol=1e-3)
def test_id(self):
"""Tests that the id attribute can be set."""
xs = np.linspace(-np.pi, np.pi, 2**5)
probs = np.array([norm().pdf(x) for x in xs])
probs /= np.sum(probs)
func = lambda i: np.cos(xs[i]) ** 2
target_wires = [0, "a", -1.1, -10, "bbb", 1000]
estimation_wires = ["bob", -3, 42, "penny", "lane", 247, "straw", "berry", 5.5, 6.6]
template = qml.QuantumMonteCarlo(
probs, func, target_wires=target_wires, estimation_wires=estimation_wires, id="a"
)
assert template.id == "a"
| 34.603175
| 100
| 0.58815
|
import numpy as np
import pytest
from scipy.stats import norm
import pennylane as qml
from pennylane.templates.subroutines.qmc import (
QuantumMonteCarlo,
_make_V,
_make_Z,
func_to_unitary,
make_Q,
probs_to_unitary,
)
from pennylane.wires import Wires
class TestProbsToUnitary:
def test_invalid_distribution_sum_to_not_one(self):
p = np.ones(4)
with pytest.raises(ValueError, match="A valid probability distribution of non-negative"):
probs_to_unitary(p)
def test_invalid_distribution_negative(self):
p = [2, 0, 0, -1]
with pytest.raises(ValueError, match="A valid probability distribution of non-negative"):
probs_to_unitary(p)
ps = [
[0.46085261032920616, 0.5391473896707938],
[0.2111821738452515, 0.4235979103670337, 0.36521991578771484],
[0.3167916924190049, 0.2651843704361695, 0.1871934980886578, 0.23083043905616774],
[0.8123242419241959, 0.07990911578859018, 0.07983919018902215, 0.027927452098191852],
]
@pytest.mark.parametrize("p", ps)
def test_fixed_examples(self, p):
unitary = probs_to_unitary(p)
assert np.allclose(np.sqrt(p), unitary[:, 0])
assert np.allclose(unitary @ unitary.T, np.eye(len(unitary)))
assert np.allclose(unitary.T @ unitary, np.eye(len(unitary)))
class TestFuncToUnitary:
def test_not_bounded_func(self):
func = lambda i: np.sin(i)
with pytest.raises(ValueError, match="func must be bounded within the interval"):
func_to_unitary(func, 8)
def test_example(self):
M = 8
func = lambda i: np.sin(i) ** 2
r = func_to_unitary(func, M)
for i in range(M):
output_state = r[::2][i]
output_0 = output_state[::2]
output_1 = output_state[1::2]
assert np.allclose(output_0[i], np.sqrt(1 - func(i)))
assert np.allclose(output_1[i], np.sqrt(func(i)))
assert np.allclose(r @ r.T, np.eye(2 * M))
assert np.allclose(r.T @ r, np.eye(2 * M))
def test_example_with_pl(self):
wires = 3
M = 2**wires
func = lambda i: np.sin(i) ** 2
r = func_to_unitary(func, M)
dev = qml.device("default.qubit", wires=(wires + 1))
@qml.qnode(dev)
def apply_r(input_state):
qml.QubitStateVector(input_state, wires=range(wires))
qml.QubitUnitary(r, wires=range(wires + 1))
return qml.probs(wires)
for i, state in enumerate(np.eye(M)):
p = apply_r(state)[1]
assert np.allclose(p, func(i))
def test_V():
dim = 4
V_expected = -np.eye(dim)
V_expected[1, 1] = V_expected[3, 3] = 1
V = _make_V(dim)
assert np.allclose(V, V_expected)
def test_Z():
dim = 4
Z_expected = -np.eye(dim)
Z_expected[0, 0] = 1
Z = _make_Z(dim)
assert np.allclose(Z, Z_expected)
def test_Q():
A = np.array(
[
[0.85358423 - 0.32239299j, -0.12753659 + 0.38883306j],
[0.39148136 - 0.11915985j, 0.34064316 - 0.84646648j],
]
)
R = np.array(
[
[
0.45885289 + 0.03972856j,
0.2798685 - 0.05981098j,
0.64514642 - 0.51555038j,
0.11015177 - 0.10877695j,
],
[
0.19407005 - 0.35483005j,
0.29756077 + 0.80153453j,
-0.19147104 + 0.0507968j,
0.15553799 - 0.20493631j,
],
[
0.35083011 - 0.20807392j,
-0.27602911 - 0.13934692j,
0.11874165 + 0.34532609j,
-0.45945242 - 0.62734969j,
],
[
-0.11379919 - 0.66706921j,
-0.21120956 - 0.2165113j,
0.30133006 + 0.23367271j,
0.54593491 + 0.08446372j,
],
]
)
Q_expected = np.array(
[
[
-0.46513201 - 1.38777878e-17j,
-0.13035515 - 2.23341802e-01j,
-0.74047856 + 7.08652160e-02j,
-0.0990036 - 3.91977176e-01j,
],
[
0.13035515 - 2.23341802e-01j,
0.46494302 + 0.00000000e00j,
0.05507901 - 1.19182067e-01j,
-0.80370146 - 2.31904873e-01j,
],
[
-0.74047856 - 7.08652160e-02j,
-0.05507901 - 1.19182067e-01j,
0.62233412 - 2.77555756e-17j,
-0.0310774 - 2.02894077e-01j,
],
[
0.0990036 - 3.91977176e-01j,
-0.80370146 + 2.31904873e-01j,
0.0310774 - 2.02894077e-01j,
-0.30774091 + 2.77555756e-17j,
],
]
)
Q = make_Q(A, R)
assert np.allclose(Q, Q_expected)
class TestQuantumMonteCarlo:
@staticmethod
def func(i):
return np.sin(i) ** 2
def test_non_flat(self):
p = np.ones((4, 1)) / 4
with pytest.raises(ValueError, match="The probability distribution must be specified as a"):
QuantumMonteCarlo(p, self.func, range(3), range(3, 5))
def test_wrong_size_p(self):
p = np.ones(5) / 5
with pytest.raises(ValueError, match="The probability distribution must have a length"):
QuantumMonteCarlo(p, self.func, range(3), range(3, 5))
def test_unexpected_target_wires_number(self):
p = np.ones(4) / 4
with pytest.raises(
ValueError,
match="The probability distribution of dimension 4 requires" " 3 target wires",
):
QuantumMonteCarlo(p, self.func, range(4), range(4, 6))
def test_expected_circuit(self):
p = np.ones(4) / 4
target_wires, estimation_wires = Wires(range(3)), Wires(range(3, 5))
op = QuantumMonteCarlo(p, self.func, target_wires, estimation_wires)
tape = op.expand()
queue_before_qpe = tape.operations[:2]
queue_after_qpe = tape.expand().operations[11:]
A = probs_to_unitary(p)
R = func_to_unitary(self.func, 4)
assert len(queue_before_qpe) == 2
assert queue_before_qpe[0].name == "QubitUnitary"
assert queue_before_qpe[1].name == "QubitUnitary"
assert np.allclose(queue_before_qpe[0].matrix, A)
assert np.allclose(queue_before_qpe[1].matrix, R)
assert queue_before_qpe[0].wires == target_wires[:-1]
assert queue_before_qpe[1].wires == target_wires
Q = make_Q(A, R)
with qml.tape.QuantumTape() as qpe_tape:
qml.QuantumPhaseEstimation(Q, target_wires, estimation_wires)
qpe_tape = qpe_tape.expand()
assert len(queue_after_qpe) == len(qpe_tape.operations)
assert all(o1.name == o2.name for o1, o2 in zip(queue_after_qpe, qpe_tape.operations))
assert all(
np.allclose(o1.matrix, o2.matrix)
for o1, o2 in zip(queue_after_qpe, qpe_tape.operations)
)
assert all(o1.wires == o2.wires for o1, o2 in zip(queue_after_qpe, qpe_tape.operations))
def test_expected_value(self):
m = 5
M = 2**m
xmax = np.pi
xs = np.linspace(-xmax, xmax, M)
probs = np.array([norm().pdf(x) for x in xs])
probs /= np.sum(probs)
func = lambda i: np.cos(xs[i]) ** 2
estimates = []
for n in range(4, 11):
N = 2**n
target_wires = range(m + 1)
estimation_wires = range(m + 1, n + m + 1)
dev = qml.device("default.qubit", wires=(n + m + 1))
@qml.qnode(dev)
def circuit():
qml.QuantumMonteCarlo(
probs, func, target_wires=target_wires, estimation_wires=estimation_wires
)
return qml.probs(estimation_wires)
phase_estimated = np.argmax(circuit()[: int(N / 2)]) / N
mu_estimated = (1 - np.cos(np.pi * phase_estimated)) / 2
estimates.append(mu_estimated)
exact = 0.432332358381693654
for i in range(len(estimates) - 1):
err1 = np.abs(estimates[i] - exact)
err2 = np.abs(estimates[i + 1] - exact)
assert err1 >= err2
assert np.allclose(estimates[-1], exact, rtol=1e-3)
def test_expected_value_custom_wires(self):
m = 5
M = 2**m
xmax = np.pi
xs = np.linspace(-xmax, xmax, M)
probs = np.array([norm().pdf(x) for x in xs])
probs /= np.sum(probs)
func = lambda i: np.cos(xs[i]) ** 2
n = 10
N = 2**n
target_wires = [0, "a", -1.1, -10, "bbb", 1000]
estimation_wires = ["bob", -3, 42, "penny", "lane", 247, "straw", "berry", 5.5, 6.6]
dev = qml.device("default.qubit", wires=target_wires + estimation_wires)
@qml.qnode(dev)
def circuit():
qml.QuantumMonteCarlo(
probs, func, target_wires=target_wires, estimation_wires=estimation_wires
)
return qml.probs(estimation_wires)
phase_estimated = np.argmax(circuit()[: int(N / 2)]) / N
mu_estimated = (1 - np.cos(np.pi * phase_estimated)) / 2
exact = 0.432332358381693654
assert np.allclose(mu_estimated, exact, rtol=1e-3)
def test_id(self):
xs = np.linspace(-np.pi, np.pi, 2**5)
probs = np.array([norm().pdf(x) for x in xs])
probs /= np.sum(probs)
func = lambda i: np.cos(xs[i]) ** 2
target_wires = [0, "a", -1.1, -10, "bbb", 1000]
estimation_wires = ["bob", -3, 42, "penny", "lane", 247, "straw", "berry", 5.5, 6.6]
template = qml.QuantumMonteCarlo(
probs, func, target_wires=target_wires, estimation_wires=estimation_wires, id="a"
)
assert template.id == "a"
| true
| true
|
f718a9f198df99720e7763bd6b2653966accd0a9
| 28,691
|
py
|
Python
|
pages/tests/pages_tests.py
|
odyaka341/django-page-cms
|
eca92673f735f5ad158d5a81b72280705057bf52
|
[
"BSD-3-Clause"
] | null | null | null |
pages/tests/pages_tests.py
|
odyaka341/django-page-cms
|
eca92673f735f5ad158d5a81b72280705057bf52
|
[
"BSD-3-Clause"
] | null | null | null |
pages/tests/pages_tests.py
|
odyaka341/django-page-cms
|
eca92673f735f5ad158d5a81b72280705057bf52
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Django page CMS test suite module"""
import django
from django.conf import settings
from django.test.client import Client
from django.template import Template, RequestContext, TemplateDoesNotExist
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from pages.models import Page, Content, PageAlias
from pages.tests.testcase import TestCase
class PagesTestCase(TestCase):
"""Django page CMS test suite class"""
def test_01_add_page(self):
"""Test that the add admin page could be displayed via the
admin"""
c = Client()
c.login(username= 'batiste', password='b')
response = c.get('/admin/pages/page/add/')
self.assertEqual(response.status_code, 200)
def test_02_create_page(self):
"""Test that a page can be created via the admin."""
#setattr(settings, "SITE_ID", 2)
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
slug_content = Content.objects.get_content_slug_by_slug(
page_data['slug']
)
assert(slug_content is not None)
page = slug_content.page
self.assertEqual(page.title(), page_data['title'])
self.assertEqual(page.slug(), page_data['slug'])
self.assertNotEqual(page.last_modification_date, None)
def test_03_slug_collision(self):
"""Test a slug collision."""
setattr(settings, "PAGE_UNIQUE_SLUG_REQUIRED", True)
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
setattr(settings, "PAGE_UNIQUE_SLUG_REQUIRED", False)
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 200)
page1 = Content.objects.get_content_slug_by_slug(page_data['slug']).page
page_data['position'] = 'first-child'
page_data['target'] = page1.id
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
page2 = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertNotEqual(page1.id, page2.id)
def test_04_details_view(self):
"""Test the details view"""
c = Client()
c.login(username= 'batiste', password='b')
try:
response = c.get('/pages/')
except TemplateDoesNotExist, e:
if e.args != ('404.html',):
raise
page_data = self.get_new_page_data()
page_data['status'] = Page.DRAFT
response = c.post('/admin/pages/page/add/', page_data)
try:
response = c.get('/pages/')
except TemplateDoesNotExist, e:
if e.args != ('404.html',):
raise
page_data = self.get_new_page_data()
page_data['status'] = Page.PUBLISHED
page_data['slug'] = 'test-page-2'
page_data['template'] = 'pages/index.html'
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
response = c.get('/pages/test-page-2/')
self.assertEqual(response.status_code, 200)
def test_05_edit_page(self):
"""Test that a page can edited via the admin"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
page = Page.objects.all()[0]
response = c.get('/admin/pages/page/%d/' % page.id)
self.assertEqual(response.status_code, 200)
page_data['title'] = 'changed title'
page_data['body'] = 'changed body'
response = c.post('/admin/pages/page/%d/' % page.id, page_data)
self.assertRedirects(response, '/admin/pages/page/')
page = Page.objects.get(id=page.id)
self.assertEqual(page.title(), 'changed title')
body = Content.objects.get_content(page, 'en-us', 'body')
self.assertEqual(body, 'changed body')
def test_06_site_framework(self):
"""Test the site framework, and test if it's possible to
disable it"""
# this is necessary to make the test pass
from pages import settings as pages_settings
setattr(pages_settings, "SITE_ID", 2)
setattr(pages_settings, "PAGE_USE_SITE_ID", True)
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data["sites"] = [2]
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
page = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertEqual(page.sites.count(), 1)
self.assertEqual(page.sites.all()[0].id, 2)
page_data = self.get_new_page_data()
page_data["sites"] = [3]
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
# we cannot get a slug that doesn't exist
content = Content.objects.get_content_slug_by_slug("this doesn't exist")
self.assertEqual(content, None)
# we cannot get the data posted on another site
content = Content.objects.get_content_slug_by_slug(page_data['slug'])
self.assertEqual(content, None)
setattr(pages_settings, "SITE_ID", 3)
page = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertEqual(page.sites.count(), 1)
self.assertEqual(page.sites.all()[0].id, 3)
# with param
self.assertEqual(Page.objects.on_site(2).count(), 1)
self.assertEqual(Page.objects.on_site(3).count(), 1)
# without param
self.assertEqual(Page.objects.on_site().count(), 1)
setattr(pages_settings, "SITE_ID", 2)
self.assertEqual(Page.objects.on_site().count(), 1)
page_data = self.get_new_page_data()
page_data["sites"] = [2, 3]
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
self.assertEqual(Page.objects.on_site(3).count(), 2)
self.assertEqual(Page.objects.on_site(2).count(), 2)
self.assertEqual(Page.objects.on_site().count(), 2)
setattr(pages_settings, "PAGE_USE_SITE_ID", False)
# we should get everything
self.assertEqual(Page.objects.on_site().count(), 3)
def test_07_languages(self):
"""Test post a page with different languages
and test that the admin views works correctly."""
c = Client()
user = c.login(username= 'batiste', password='b')
# test that the client language setting is used in add page admin
c.cookies["django_language"] = 'de'
response = c.get('/admin/pages/page/add/')
self.assertContains(response, 'value="de" selected="selected"')
c.cookies["django_language"] = 'fr-ch'
response = c.get('/admin/pages/page/add/')
self.assertContains(response, 'value="fr-ch" selected="selected"')
page_data = self.get_new_page_data()
page_data["title"] = 'english title'
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
page = Page.objects.all()[0]
self.assertEqual(page.get_languages(), ['en-us'])
# this test only works in version superior of 1.0.2
django_version = django.get_version().rsplit()[0].split('.')
if len(django_version) > 2:
major, middle, minor = [int(v) for v in django_version]
else:
major, middle = [int(v) for v in django_version]
if major >= 1 and middle > 0:
response = c.get('/admin/pages/page/%d/?language=de' % page.id)
self.assertContains(response, 'value="de" selected="selected"')
# add a french version of the same page
page_data["language"] = 'fr-ch'
page_data["title"] = 'french title'
response = c.post('/admin/pages/page/%d/' % page.id, page_data)
self.assertRedirects(response, '/admin/pages/page/')
#setattr(settings, "PAGE_DEFAULT_LANGUAGE", 'en-us')
# test that the frontend view use the good parameters
# I cannot find a way of setting the accept-language HTTP
# header so I used django_language cookie instead
c = Client()
c.cookies["django_language"] = 'en-us'
response = c.get('/pages/')
self.assertContains(response, 'english title')
self.assertContains(response, 'lang="en-us"')
self.assertNotContains(response, 'french title')
c = Client()
c.cookies["django_language"] = 'fr-ch'
response = c.get('/pages/')
self.assertContains(response, 'french title')
self.assertContains(response, 'lang="fr-ch"')
self.assertNotContains(response, 'english title')
# this should be mapped to the fr-ch content
c = Client()
c.cookies["django_language"] = 'fr-fr'
response = c.get('/pages/')
self.assertContains(response, 'french title')
self.assertContains(response, 'lang="fr-ch"')
def test_08_revision(self):
"""Test that a page can edited several times."""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
page = Page.objects.all()[0]
page_data['body'] = 'changed body'
response = c.post('/admin/pages/page/%d/' % page.id, page_data)
self.assertEqual(Content.objects.get_content(page, 'en-us', 'body'), 'changed body')
page_data['body'] = 'changed body 2'
response = c.post('/admin/pages/page/%d/' % page.id, page_data)
self.assertEqual(Content.objects.get_content(page, 'en-us', 'body'), 'changed body 2')
response = c.get('/pages/')
self.assertContains(response, 'changed body 2', 1)
setattr(settings, "PAGE_CONTENT_REVISION", False)
self.assertEqual(Content.objects.get_content(page, 'en-us', 'body'), 'changed body 2')
def test_09_placeholder(self):
"""
Test that the placeholder is correctly displayed in
the admin
"""
setattr(settings, "SITE_ID", 2)
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['template'] = 'pages/nice.html'
response = c.post('/admin/pages/page/add/', page_data)
page = Page.objects.all()[0]
response = c.get('/admin/pages/page/%d/' % page.id)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'name="right-column"', 1)
def test_10_directory_slug(self):
"""
Test diretory slugs
"""
setattr(settings, "PAGE_UNIQUE_SLUG_REQUIRED", False)
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['title'] = 'parent title'
page_data['slug'] = 'same-slug'
response = c.post('/admin/pages/page/add/', page_data)
# the redirect tell that the page has been create correctly
self.assertRedirects(response, '/admin/pages/page/')
response = c.get('/pages/same-slug/')
self.assertEqual(response.status_code, 200)
page = Page.objects.all()[0]
response = c.post('/admin/pages/page/add/', page_data)
# we cannot create 2 root page with the same slug
# this assert test that the creation fails as wanted
self.assertEqual(response.status_code, 200)
page1 = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertEqual(page1.id, page.id)
page_data['title'] = 'children title'
page_data['target'] = page1.id
page_data['position'] = 'first-child'
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
# finaly test that we can get every page according the path
response = c.get('/pages/same-slug')
self.assertContains(response, "parent title", 2)
response = c.get('/pages/same-slug/same-slug')
self.assertContains(response, "children title", 2)
def test_11_show_content_tag(self):
"""
Test the {% show_content %} template tag
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
page = Page.objects.all()[0]
class request:
REQUEST = {'language': 'en'}
GET = {}
context = RequestContext(request, {'page': page, 'lang':'en-us',
'path':'/page-1/'})
template = Template('{% load pages_tags %}'
'{% show_content page "title" "en-us" %}')
self.assertEqual(template.render(context), page_data['title'])
template = Template('{% load pages_tags %}'
'{% show_content page "title" %}')
self.assertEqual(template.render(context), page_data['title'])
def test_12_get_content_tag(self):
"""
Test the {% get_content %} template tag
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
page = Page.objects.all()[0]
class request:
REQUEST = {'language': 'en'}
GET = {}
context = RequestContext(request, {'page': page})
template = Template('{% load pages_tags %}'
'{% get_content page "title" "en-us" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), page_data['title'])
template = Template('{% load pages_tags %}'
'{% get_content page "title" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), page_data['title'])
def test_17_request_mockup(self):
from pages.utils import get_request_mock
request = get_request_mock()
self.assertEqual(hasattr(request, 'session'), True)
def test_18_tree_admin_interface(self):
"""
Test that moving/creating page in the tree is working properly
using the admin interface
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'root'
response = c.post('/admin/pages/page/add/', page_data)
root_page = Content.objects.get_content_slug_by_slug('root').page
self.assertTrue(root_page.is_first_root())
page_data['position'] = 'first-child'
page_data['target'] = root_page.id
page_data['slug'] = 'child-1'
response = c.post('/admin/pages/page/add/', page_data)
child_1 = Content.objects.get_content_slug_by_slug('child-1').page
self.assertFalse(child_1.is_first_root())
page_data['slug'] = 'child-2'
response = c.post('/admin/pages/page/add/', page_data)
child_2 = Content.objects.get_content_slug_by_slug('child-2').page
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-2>, <Page: child-1>]")
# move page 1 in the first position
response = c.post('/admin/pages/page/%d/move-page/' % child_1.id,
{'position':'first-child', 'target':root_page.id})
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-1>, <Page: child-2>]")
# move page 2 in the first position
response = c.post('/admin/pages/page/%d/move-page/' % child_2.id,
{'position': 'left', 'target': child_1.id})
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-2>, <Page: child-1>]")
# try to create a sibling with the same slug, via left, right
from pages import settings as pages_settings
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", False)
page_data['target'] = child_2.id
page_data['position'] = 'left'
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# try to create a sibling with the same slug, via first-child
page_data['target'] = root_page.id
page_data['position'] = 'first-child'
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# try to create a second page 2 in root
del page_data['target']
del page_data['position']
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", True)
# cannot create because slug exists
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# Now it should work beause the page is not a sibling
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", False)
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Page.objects.count(), 4)
# Should not work because we already have sibling at the same level
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# try to change the page 2 slug into page 1
page_data['slug'] = 'child-1'
response = c.post('/admin/pages/page/%d/' % child_2.id, page_data)
self.assertEqual(response.status_code, 200)
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", True)
response = c.post('/admin/pages/page/%d/' % child_2.id, page_data)
self.assertEqual(response.status_code, 200)
def test_19_tree(self):
"""
Test that the navigation tree works properly with mptt
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'page1'
response = c.post('/admin/pages/page/add/', page_data)
page_data['slug'] = 'page2'
response = c.post('/admin/pages/page/add/', page_data)
page_data['slug'] = 'page3'
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page1>, <Page: page2>, <Page: page3>]")
p1 = Content.objects.get_content_slug_by_slug('page1').page
p2 = Content.objects.get_content_slug_by_slug('page2').page
p3 = Content.objects.get_content_slug_by_slug('page3').page
p2.move_to(p1, 'left')
p2.save()
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page2>, <Page: page1>, <Page: page3>]")
p3.move_to(p2, 'left')
p3.save()
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page3>, <Page: page2>, <Page: page1>]")
p1 = Content.objects.get_content_slug_by_slug('page1').page
p2 = Content.objects.get_content_slug_by_slug('page2').page
p3 = Content.objects.get_content_slug_by_slug('page3').page
p3.move_to(p1, 'first-child')
p2.move_to(p1, 'first-child')
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page1>]")
p3 = Content.objects.get_content_slug_by_slug('page3').page
p3.move_to(p1, 'left')
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page3>, <Page: page1>]")
def test_20_ajax_language(self):
"""Test that language is working properly"""
c = Client()
c.login(username= 'batiste', password='b')
# Activate a language other than settings.LANGUAGE_CODE
response = c.post('/i18n/setlang/', {'language':'fr-ch' })
self.assertEqual(c.session.get('django_language', False), 'fr-ch')
# Make sure we're in french
response = c.get('/admin/pages/page/')
self.assertEqual(response.status_code, 200)
self.assertTrue('Auteur' in response.content)
# Create some pages (taken from test_18_tree_admin_interface)
page_data = self.get_new_page_data()
page_data['slug'] = 'root'
response = c.post('/admin/pages/page/add/', page_data)
root_page = Content.objects.get_content_slug_by_slug('root').page
page_data['position'] = 'first-child'
page_data['target'] = root_page.id
page_data['slug'] = 'child-1'
response = c.post('/admin/pages/page/add/', page_data)
child_1 = Content.objects.get_content_slug_by_slug('child-1').page
page_data['slug'] = 'child-2'
response = c.post('/admin/pages/page/add/', page_data)
child_2 = Content.objects.get_content_slug_by_slug('child-2').page
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-2>, <Page: child-1>]")
"""
The relevant bit, fixed by rev 501: the response issued by a move
command returns content localized in settings.LANGUAGE_CODE (i.e. 'en´)
even though the original AJAX request passed in a the correct
session ID localizing this client as fr-ch
This is probably because the LocaleMiddleware gets instantiated
with a couple request_mocks which have no real connection to the
AJAX request *but* django.utils.translation caches the active
language on a per thread basis.
This means that the first "bogus" call to LocaleMiddleware.process_request
will "kill" the localization data for the AJAX request.
Rev. 501 fixes this by passing in the language code from the original request.
"""
response = c.post('/admin/pages/page/%d/move-page/' % child_1.id,
{'position':'first-child', 'target':root_page.id})
# Make sure the content response we got was in french
self.assertTrue('Auteur' in response.content)
def test_21_view_context(self):
"""
Test that the default view can only return the context
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'page1'
# create a page for the example otherwise you will get a Http404 error
response = c.post('/admin/pages/page/add/', page_data)
page1 = Content.objects.get_content_slug_by_slug('page1').page
from pages.views import details
from pages.utils import get_request_mock
request = get_request_mock()
context = details(request, only_context=True)
self.assertEqual(context['current_page'], page1)
def test_24_page_valid_targets(self):
"""Test page valid_targets method"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'root'
response = c.post('/admin/pages/page/add/', page_data)
root_page = Content.objects.get_content_slug_by_slug('root').page
page_data['position'] = 'first-child'
page_data['target'] = root_page.id
page_data['slug'] = 'child-1'
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 302)
c1 = Content.objects.get_content_slug_by_slug('child-1').page
root_page = Content.objects.get_content_slug_by_slug('root').page
self.assertEqual(len(root_page.valid_targets()), 0)
self.assertEqual(str(c1.valid_targets()),
"[<Page: root>]")
def test_25_page_admin_view(self):
"""Test page admin view"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'page-1'
response = c.post('/admin/pages/page/add/', page_data)
page = Content.objects.get_content_slug_by_slug('page-1').page
self.assertEqual(page.status, 1)
response = c.post('/admin/pages/page/%d/change-status/' %
page.id, {'status':Page.DRAFT})
page = Content.objects.get_content_slug_by_slug('page-1').page
self.assertEqual(page.status, Page.DRAFT)
url = '/admin/pages/page/%d/modify-content/title/en-us/' % page.id
response = c.post(url, {'content': 'test content'})
self.assertEqual(page.title(), 'test content')
# TODO: realy test these methods
url = '/admin/pages/page/%d/traduction/en-us/' % page.id
response = c.get(url)
self.assertEqual(response.status_code, 200)
url = '/admin/pages/page/%d/sub-menu/' % page.id
response = c.get(url)
self.assertEqual(response.status_code, 200)
url = '/admin/pages/page/%d/get-content/1/' % page.id
response = c.get(url)
self.assertEqual(response.status_code, 200)
def test_26_page_alias(self):
"""Test page aliasing system"""
c = Client()
c.login(username= 'batiste', password='b')
# create some pages
page_data = self.get_new_page_data()
page_data['title'] = 'home-page-title'
page_data['slug'] = 'home-page'
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
page_data['title'] = 'downloads-page-title'
page_data['slug'] = 'downloads-page'
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
# create aliases for the pages
page = Page.objects.from_path('home-page', None)
self.assertTrue(page)
p = PageAlias(page=page, url='/index.php')
p.save()
page = Page.objects.from_path('downloads-page', None)
self.assertTrue(page)
p = PageAlias(page=page, url='index.php?page=downloads')
p.save()
# now check whether we can retrieve the pages.
# is the homepage available from is alias
response = c.get('/pages/index.php')
self.assertRedirects(response, '/pages/home-page', 301)
# for the download page, the slug is canonical
response = c.get('/pages/downloads-page/')
self.assertContains(response, "downloads-page-title", 2)
# calling via its alias must cause redirect
response = c.get('/pages/index.php?page=downloads')
self.assertRedirects(response, '/pages/downloads-page', 301)
def test_27_page_redirect_to(self):
"""Test page redirected to an other page."""
client = Client()
client.login(username= 'batiste', password='b')
# create some pages
page1 = self.create_new_page(client)
page2 = self.create_new_page(client)
page1.redirect_to = page2
page1.save()
# now check whether you go to the target page.
response = client.get(page1.get_absolute_url())
self.assertRedirects(response, page2.get_absolute_url(), 301)
def test_28_page_redirect_to_url(self):
"""Test page redirected to external url."""
client = Client()
client.login(username= 'batiste', password='b')
page1 = self.create_new_page(client)
url = 'http://code.google.com/p/django-page-cms/'
page1.redirect_to_url = url
page1.save()
# now check whether we can retrieve the page.
response = client.get(page1.get_absolute_url())
self.assertTrue(response.status_code == 301)
self.assertTrue(response['Location'] == url)
| 41.163558
| 94
| 0.611551
|
"""Django page CMS test suite module"""
import django
from django.conf import settings
from django.test.client import Client
from django.template import Template, RequestContext, TemplateDoesNotExist
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from pages.models import Page, Content, PageAlias
from pages.tests.testcase import TestCase
class PagesTestCase(TestCase):
"""Django page CMS test suite class"""
def test_01_add_page(self):
"""Test that the add admin page could be displayed via the
admin"""
c = Client()
c.login(username= 'batiste', password='b')
response = c.get('/admin/pages/page/add/')
self.assertEqual(response.status_code, 200)
def test_02_create_page(self):
"""Test that a page can be created via the admin."""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
slug_content = Content.objects.get_content_slug_by_slug(
page_data['slug']
)
assert(slug_content is not None)
page = slug_content.page
self.assertEqual(page.title(), page_data['title'])
self.assertEqual(page.slug(), page_data['slug'])
self.assertNotEqual(page.last_modification_date, None)
def test_03_slug_collision(self):
"""Test a slug collision."""
setattr(settings, "PAGE_UNIQUE_SLUG_REQUIRED", True)
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
setattr(settings, "PAGE_UNIQUE_SLUG_REQUIRED", False)
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 200)
page1 = Content.objects.get_content_slug_by_slug(page_data['slug']).page
page_data['position'] = 'first-child'
page_data['target'] = page1.id
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
page2 = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertNotEqual(page1.id, page2.id)
def test_04_details_view(self):
"""Test the details view"""
c = Client()
c.login(username= 'batiste', password='b')
try:
response = c.get('/pages/')
except TemplateDoesNotExist, e:
if e.args != ('404.html',):
raise
page_data = self.get_new_page_data()
page_data['status'] = Page.DRAFT
response = c.post('/admin/pages/page/add/', page_data)
try:
response = c.get('/pages/')
except TemplateDoesNotExist, e:
if e.args != ('404.html',):
raise
page_data = self.get_new_page_data()
page_data['status'] = Page.PUBLISHED
page_data['slug'] = 'test-page-2'
page_data['template'] = 'pages/index.html'
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
response = c.get('/pages/test-page-2/')
self.assertEqual(response.status_code, 200)
def test_05_edit_page(self):
"""Test that a page can edited via the admin"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
page = Page.objects.all()[0]
response = c.get('/admin/pages/page/%d/' % page.id)
self.assertEqual(response.status_code, 200)
page_data['title'] = 'changed title'
page_data['body'] = 'changed body'
response = c.post('/admin/pages/page/%d/' % page.id, page_data)
self.assertRedirects(response, '/admin/pages/page/')
page = Page.objects.get(id=page.id)
self.assertEqual(page.title(), 'changed title')
body = Content.objects.get_content(page, 'en-us', 'body')
self.assertEqual(body, 'changed body')
def test_06_site_framework(self):
"""Test the site framework, and test if it's possible to
disable it"""
# this is necessary to make the test pass
from pages import settings as pages_settings
setattr(pages_settings, "SITE_ID", 2)
setattr(pages_settings, "PAGE_USE_SITE_ID", True)
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data["sites"] = [2]
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
page = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertEqual(page.sites.count(), 1)
self.assertEqual(page.sites.all()[0].id, 2)
page_data = self.get_new_page_data()
page_data["sites"] = [3]
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
# we cannot get a slug that doesn't exist
content = Content.objects.get_content_slug_by_slug("this doesn't exist")
self.assertEqual(content, None)
# we cannot get the data posted on another site
content = Content.objects.get_content_slug_by_slug(page_data['slug'])
self.assertEqual(content, None)
setattr(pages_settings, "SITE_ID", 3)
page = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertEqual(page.sites.count(), 1)
self.assertEqual(page.sites.all()[0].id, 3)
# with param
self.assertEqual(Page.objects.on_site(2).count(), 1)
self.assertEqual(Page.objects.on_site(3).count(), 1)
# without param
self.assertEqual(Page.objects.on_site().count(), 1)
setattr(pages_settings, "SITE_ID", 2)
self.assertEqual(Page.objects.on_site().count(), 1)
page_data = self.get_new_page_data()
page_data["sites"] = [2, 3]
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
self.assertEqual(Page.objects.on_site(3).count(), 2)
self.assertEqual(Page.objects.on_site(2).count(), 2)
self.assertEqual(Page.objects.on_site().count(), 2)
setattr(pages_settings, "PAGE_USE_SITE_ID", False)
# we should get everything
self.assertEqual(Page.objects.on_site().count(), 3)
def test_07_languages(self):
"""Test post a page with different languages
and test that the admin views works correctly."""
c = Client()
user = c.login(username= 'batiste', password='b')
# test that the client language setting is used in add page admin
c.cookies["django_language"] = 'de'
response = c.get('/admin/pages/page/add/')
self.assertContains(response, 'value="de" selected="selected"')
c.cookies["django_language"] = 'fr-ch'
response = c.get('/admin/pages/page/add/')
self.assertContains(response, 'value="fr-ch" selected="selected"')
page_data = self.get_new_page_data()
page_data["title"] = 'english title'
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
page = Page.objects.all()[0]
self.assertEqual(page.get_languages(), ['en-us'])
# this test only works in version superior of 1.0.2
django_version = django.get_version().rsplit()[0].split('.')
if len(django_version) > 2:
major, middle, minor = [int(v) for v in django_version]
else:
major, middle = [int(v) for v in django_version]
if major >= 1 and middle > 0:
response = c.get('/admin/pages/page/%d/?language=de' % page.id)
self.assertContains(response, 'value="de" selected="selected"')
# add a french version of the same page
page_data["language"] = 'fr-ch'
page_data["title"] = 'french title'
response = c.post('/admin/pages/page/%d/' % page.id, page_data)
self.assertRedirects(response, '/admin/pages/page/')
#setattr(settings, "PAGE_DEFAULT_LANGUAGE", 'en-us')
# test that the frontend view use the good parameters
# I cannot find a way of setting the accept-language HTTP
# header so I used django_language cookie instead
c = Client()
c.cookies["django_language"] = 'en-us'
response = c.get('/pages/')
self.assertContains(response, 'english title')
self.assertContains(response, 'lang="en-us"')
self.assertNotContains(response, 'french title')
c = Client()
c.cookies["django_language"] = 'fr-ch'
response = c.get('/pages/')
self.assertContains(response, 'french title')
self.assertContains(response, 'lang="fr-ch"')
self.assertNotContains(response, 'english title')
# this should be mapped to the fr-ch content
c = Client()
c.cookies["django_language"] = 'fr-fr'
response = c.get('/pages/')
self.assertContains(response, 'french title')
self.assertContains(response, 'lang="fr-ch"')
def test_08_revision(self):
"""Test that a page can edited several times."""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
page = Page.objects.all()[0]
page_data['body'] = 'changed body'
response = c.post('/admin/pages/page/%d/' % page.id, page_data)
self.assertEqual(Content.objects.get_content(page, 'en-us', 'body'), 'changed body')
page_data['body'] = 'changed body 2'
response = c.post('/admin/pages/page/%d/' % page.id, page_data)
self.assertEqual(Content.objects.get_content(page, 'en-us', 'body'), 'changed body 2')
response = c.get('/pages/')
self.assertContains(response, 'changed body 2', 1)
setattr(settings, "PAGE_CONTENT_REVISION", False)
self.assertEqual(Content.objects.get_content(page, 'en-us', 'body'), 'changed body 2')
def test_09_placeholder(self):
"""
Test that the placeholder is correctly displayed in
the admin
"""
setattr(settings, "SITE_ID", 2)
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['template'] = 'pages/nice.html'
response = c.post('/admin/pages/page/add/', page_data)
page = Page.objects.all()[0]
response = c.get('/admin/pages/page/%d/' % page.id)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'name="right-column"', 1)
def test_10_directory_slug(self):
"""
Test diretory slugs
"""
setattr(settings, "PAGE_UNIQUE_SLUG_REQUIRED", False)
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['title'] = 'parent title'
page_data['slug'] = 'same-slug'
response = c.post('/admin/pages/page/add/', page_data)
# the redirect tell that the page has been create correctly
self.assertRedirects(response, '/admin/pages/page/')
response = c.get('/pages/same-slug/')
self.assertEqual(response.status_code, 200)
page = Page.objects.all()[0]
response = c.post('/admin/pages/page/add/', page_data)
# we cannot create 2 root page with the same slug
# this assert test that the creation fails as wanted
self.assertEqual(response.status_code, 200)
page1 = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertEqual(page1.id, page.id)
page_data['title'] = 'children title'
page_data['target'] = page1.id
page_data['position'] = 'first-child'
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
# finaly test that we can get every page according the path
response = c.get('/pages/same-slug')
self.assertContains(response, "parent title", 2)
response = c.get('/pages/same-slug/same-slug')
self.assertContains(response, "children title", 2)
def test_11_show_content_tag(self):
"""
Test the {% show_content %} template tag
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
page = Page.objects.all()[0]
class request:
REQUEST = {'language': 'en'}
GET = {}
context = RequestContext(request, {'page': page, 'lang':'en-us',
'path':'/page-1/'})
template = Template('{% load pages_tags %}'
'{% show_content page "title" "en-us" %}')
self.assertEqual(template.render(context), page_data['title'])
template = Template('{% load pages_tags %}'
'{% show_content page "title" %}')
self.assertEqual(template.render(context), page_data['title'])
def test_12_get_content_tag(self):
"""
Test the {% get_content %} template tag
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/pages/page/add/', page_data)
page = Page.objects.all()[0]
class request:
REQUEST = {'language': 'en'}
GET = {}
context = RequestContext(request, {'page': page})
template = Template('{% load pages_tags %}'
'{% get_content page "title" "en-us" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), page_data['title'])
template = Template('{% load pages_tags %}'
'{% get_content page "title" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), page_data['title'])
def test_17_request_mockup(self):
from pages.utils import get_request_mock
request = get_request_mock()
self.assertEqual(hasattr(request, 'session'), True)
def test_18_tree_admin_interface(self):
"""
Test that moving/creating page in the tree is working properly
using the admin interface
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'root'
response = c.post('/admin/pages/page/add/', page_data)
root_page = Content.objects.get_content_slug_by_slug('root').page
self.assertTrue(root_page.is_first_root())
page_data['position'] = 'first-child'
page_data['target'] = root_page.id
page_data['slug'] = 'child-1'
response = c.post('/admin/pages/page/add/', page_data)
child_1 = Content.objects.get_content_slug_by_slug('child-1').page
self.assertFalse(child_1.is_first_root())
page_data['slug'] = 'child-2'
response = c.post('/admin/pages/page/add/', page_data)
child_2 = Content.objects.get_content_slug_by_slug('child-2').page
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-2>, <Page: child-1>]")
# move page 1 in the first position
response = c.post('/admin/pages/page/%d/move-page/' % child_1.id,
{'position':'first-child', 'target':root_page.id})
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-1>, <Page: child-2>]")
# move page 2 in the first position
response = c.post('/admin/pages/page/%d/move-page/' % child_2.id,
{'position': 'left', 'target': child_1.id})
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-2>, <Page: child-1>]")
# try to create a sibling with the same slug, via left, right
from pages import settings as pages_settings
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", False)
page_data['target'] = child_2.id
page_data['position'] = 'left'
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# try to create a sibling with the same slug, via first-child
page_data['target'] = root_page.id
page_data['position'] = 'first-child'
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# try to create a second page 2 in root
del page_data['target']
del page_data['position']
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", True)
# cannot create because slug exists
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# Now it should work beause the page is not a sibling
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", False)
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Page.objects.count(), 4)
# Should not work because we already have sibling at the same level
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# try to change the page 2 slug into page 1
page_data['slug'] = 'child-1'
response = c.post('/admin/pages/page/%d/' % child_2.id, page_data)
self.assertEqual(response.status_code, 200)
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", True)
response = c.post('/admin/pages/page/%d/' % child_2.id, page_data)
self.assertEqual(response.status_code, 200)
def test_19_tree(self):
"""
Test that the navigation tree works properly with mptt
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'page1'
response = c.post('/admin/pages/page/add/', page_data)
page_data['slug'] = 'page2'
response = c.post('/admin/pages/page/add/', page_data)
page_data['slug'] = 'page3'
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page1>, <Page: page2>, <Page: page3>]")
p1 = Content.objects.get_content_slug_by_slug('page1').page
p2 = Content.objects.get_content_slug_by_slug('page2').page
p3 = Content.objects.get_content_slug_by_slug('page3').page
p2.move_to(p1, 'left')
p2.save()
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page2>, <Page: page1>, <Page: page3>]")
p3.move_to(p2, 'left')
p3.save()
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page3>, <Page: page2>, <Page: page1>]")
p1 = Content.objects.get_content_slug_by_slug('page1').page
p2 = Content.objects.get_content_slug_by_slug('page2').page
p3 = Content.objects.get_content_slug_by_slug('page3').page
p3.move_to(p1, 'first-child')
p2.move_to(p1, 'first-child')
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page1>]")
p3 = Content.objects.get_content_slug_by_slug('page3').page
p3.move_to(p1, 'left')
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page3>, <Page: page1>]")
def test_20_ajax_language(self):
"""Test that language is working properly"""
c = Client()
c.login(username= 'batiste', password='b')
# Activate a language other than settings.LANGUAGE_CODE
response = c.post('/i18n/setlang/', {'language':'fr-ch' })
self.assertEqual(c.session.get('django_language', False), 'fr-ch')
# Make sure we're in french
response = c.get('/admin/pages/page/')
self.assertEqual(response.status_code, 200)
self.assertTrue('Auteur' in response.content)
page_data = self.get_new_page_data()
page_data['slug'] = 'root'
response = c.post('/admin/pages/page/add/', page_data)
root_page = Content.objects.get_content_slug_by_slug('root').page
page_data['position'] = 'first-child'
page_data['target'] = root_page.id
page_data['slug'] = 'child-1'
response = c.post('/admin/pages/page/add/', page_data)
child_1 = Content.objects.get_content_slug_by_slug('child-1').page
page_data['slug'] = 'child-2'
response = c.post('/admin/pages/page/add/', page_data)
child_2 = Content.objects.get_content_slug_by_slug('child-2').page
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-2>, <Page: child-1>]")
"""
The relevant bit, fixed by rev 501: the response issued by a move
command returns content localized in settings.LANGUAGE_CODE (i.e. 'en´)
even though the original AJAX request passed in a the correct
session ID localizing this client as fr-ch
This is probably because the LocaleMiddleware gets instantiated
with a couple request_mocks which have no real connection to the
AJAX request *but* django.utils.translation caches the active
language on a per thread basis.
This means that the first "bogus" call to LocaleMiddleware.process_request
will "kill" the localization data for the AJAX request.
Rev. 501 fixes this by passing in the language code from the original request.
"""
response = c.post('/admin/pages/page/%d/move-page/' % child_1.id,
{'position':'first-child', 'target':root_page.id})
# Make sure the content response we got was in french
self.assertTrue('Auteur' in response.content)
def test_21_view_context(self):
"""
Test that the default view can only return the context
"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'page1'
# create a page for the example otherwise you will get a Http404 error
response = c.post('/admin/pages/page/add/', page_data)
page1 = Content.objects.get_content_slug_by_slug('page1').page
from pages.views import details
from pages.utils import get_request_mock
request = get_request_mock()
context = details(request, only_context=True)
self.assertEqual(context['current_page'], page1)
def test_24_page_valid_targets(self):
"""Test page valid_targets method"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'root'
response = c.post('/admin/pages/page/add/', page_data)
root_page = Content.objects.get_content_slug_by_slug('root').page
page_data['position'] = 'first-child'
page_data['target'] = root_page.id
page_data['slug'] = 'child-1'
response = c.post('/admin/pages/page/add/', page_data)
self.assertEqual(response.status_code, 302)
c1 = Content.objects.get_content_slug_by_slug('child-1').page
root_page = Content.objects.get_content_slug_by_slug('root').page
self.assertEqual(len(root_page.valid_targets()), 0)
self.assertEqual(str(c1.valid_targets()),
"[<Page: root>]")
def test_25_page_admin_view(self):
"""Test page admin view"""
c = Client()
c.login(username= 'batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'page-1'
response = c.post('/admin/pages/page/add/', page_data)
page = Content.objects.get_content_slug_by_slug('page-1').page
self.assertEqual(page.status, 1)
response = c.post('/admin/pages/page/%d/change-status/' %
page.id, {'status':Page.DRAFT})
page = Content.objects.get_content_slug_by_slug('page-1').page
self.assertEqual(page.status, Page.DRAFT)
url = '/admin/pages/page/%d/modify-content/title/en-us/' % page.id
response = c.post(url, {'content': 'test content'})
self.assertEqual(page.title(), 'test content')
# TODO: realy test these methods
url = '/admin/pages/page/%d/traduction/en-us/' % page.id
response = c.get(url)
self.assertEqual(response.status_code, 200)
url = '/admin/pages/page/%d/sub-menu/' % page.id
response = c.get(url)
self.assertEqual(response.status_code, 200)
url = '/admin/pages/page/%d/get-content/1/' % page.id
response = c.get(url)
self.assertEqual(response.status_code, 200)
def test_26_page_alias(self):
"""Test page aliasing system"""
c = Client()
c.login(username= 'batiste', password='b')
# create some pages
page_data = self.get_new_page_data()
page_data['title'] = 'home-page-title'
page_data['slug'] = 'home-page'
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
page_data['title'] = 'downloads-page-title'
page_data['slug'] = 'downloads-page'
response = c.post('/admin/pages/page/add/', page_data)
self.assertRedirects(response, '/admin/pages/page/')
# create aliases for the pages
page = Page.objects.from_path('home-page', None)
self.assertTrue(page)
p = PageAlias(page=page, url='/index.php')
p.save()
page = Page.objects.from_path('downloads-page', None)
self.assertTrue(page)
p = PageAlias(page=page, url='index.php?page=downloads')
p.save()
# now check whether we can retrieve the pages.
# is the homepage available from is alias
response = c.get('/pages/index.php')
self.assertRedirects(response, '/pages/home-page', 301)
# for the download page, the slug is canonical
response = c.get('/pages/downloads-page/')
self.assertContains(response, "downloads-page-title", 2)
# calling via its alias must cause redirect
response = c.get('/pages/index.php?page=downloads')
self.assertRedirects(response, '/pages/downloads-page', 301)
def test_27_page_redirect_to(self):
"""Test page redirected to an other page."""
client = Client()
client.login(username= 'batiste', password='b')
# create some pages
page1 = self.create_new_page(client)
page2 = self.create_new_page(client)
page1.redirect_to = page2
page1.save()
# now check whether you go to the target page.
response = client.get(page1.get_absolute_url())
self.assertRedirects(response, page2.get_absolute_url(), 301)
def test_28_page_redirect_to_url(self):
"""Test page redirected to external url."""
client = Client()
client.login(username= 'batiste', password='b')
page1 = self.create_new_page(client)
url = 'http://code.google.com/p/django-page-cms/'
page1.redirect_to_url = url
page1.save()
# now check whether we can retrieve the page.
response = client.get(page1.get_absolute_url())
self.assertTrue(response.status_code == 301)
self.assertTrue(response['Location'] == url)
| false
| true
|
f718a9f8275917290bfff9a6aebebcef23211c45
| 1,313
|
py
|
Python
|
systems/chordpy/demo.py
|
DistributedComponents/verdi-chord
|
762fe660c648d7f2a009d2beaa5cf3b8ea4ac593
|
[
"BSD-2-Clause"
] | 12
|
2016-11-22T19:33:39.000Z
|
2020-04-14T13:00:27.000Z
|
systems/chordpy/demo.py
|
DistributedComponents/verdi-chord
|
762fe660c648d7f2a009d2beaa5cf3b8ea4ac593
|
[
"BSD-2-Clause"
] | 17
|
2016-11-22T07:01:08.000Z
|
2018-11-23T19:59:50.000Z
|
systems/chordpy/demo.py
|
DistributedComponents/verdi-chord
|
762fe660c648d7f2a009d2beaa5cf3b8ea4ac593
|
[
"BSD-2-Clause"
] | 1
|
2017-07-31T23:10:31.000Z
|
2017-07-31T23:10:31.000Z
|
import logging
import multiprocessing
import sys
import time
from data import Pointer, SUCC_LIST_LEN
from node import Node
def launch_node(ip, pred, succ_list):
node = Node(ip=ip, pred=pred, succ_list=succ_list)
p = multiprocessing.Process(target=node.start)
p.daemon = True
p.start()
return node, p
def launch_ring_of(n):
ptrs = sorted([Pointer(ip="127.0.0.{}".format(i)) for i in range(1, n+1)])
nodes = []
procs = []
for i, p in enumerate(ptrs):
succs = ptrs[i+1:i+1+SUCC_LIST_LEN]
if len(succs) < SUCC_LIST_LEN:
succs += ptrs[:SUCC_LIST_LEN-len(succs)]
node, proc = launch_node(p.ip, ptrs[i - 1], succs)
nodes.append(node)
procs.append(proc)
return nodes, procs
def kill_demo():
logging.debug("running kill_demo()")
nodes, procs = launch_ring_of(40)
time.sleep(2)
for kill_idx in [3, 5, 12]:
logging.warn("killing node {}".format(nodes[kill_idx].state.ptr.id))
procs[kill_idx].terminate()
known = nodes[0].state.ptr
new_node = Node("127.0.0.100")
time.sleep(0.5)
print "adding new node:", new_node.state.ptr
new_node.start(known)
procs[0].join()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
kill_demo()
| 26.795918
| 78
| 0.642803
|
import logging
import multiprocessing
import sys
import time
from data import Pointer, SUCC_LIST_LEN
from node import Node
def launch_node(ip, pred, succ_list):
node = Node(ip=ip, pred=pred, succ_list=succ_list)
p = multiprocessing.Process(target=node.start)
p.daemon = True
p.start()
return node, p
def launch_ring_of(n):
ptrs = sorted([Pointer(ip="127.0.0.{}".format(i)) for i in range(1, n+1)])
nodes = []
procs = []
for i, p in enumerate(ptrs):
succs = ptrs[i+1:i+1+SUCC_LIST_LEN]
if len(succs) < SUCC_LIST_LEN:
succs += ptrs[:SUCC_LIST_LEN-len(succs)]
node, proc = launch_node(p.ip, ptrs[i - 1], succs)
nodes.append(node)
procs.append(proc)
return nodes, procs
def kill_demo():
logging.debug("running kill_demo()")
nodes, procs = launch_ring_of(40)
time.sleep(2)
for kill_idx in [3, 5, 12]:
logging.warn("killing node {}".format(nodes[kill_idx].state.ptr.id))
procs[kill_idx].terminate()
known = nodes[0].state.ptr
new_node = Node("127.0.0.100")
time.sleep(0.5)
print "adding new node:", new_node.state.ptr
new_node.start(known)
procs[0].join()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
kill_demo()
| false
| true
|
f718aa985e504d915baf7c952316a24a64b1820b
| 5,825
|
py
|
Python
|
tests/test_nnet.py
|
kgizdov/hep_ml
|
114ac9e896c3a601761092760a7b315f448d59c6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_nnet.py
|
kgizdov/hep_ml
|
114ac9e896c3a601761092760a7b315f448d59c6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_nnet.py
|
kgizdov/hep_ml
|
114ac9e896c3a601761092760a7b315f448d59c6
|
[
"Apache-2.0"
] | 1
|
2020-11-11T08:39:52.000Z
|
2020-11-11T08:39:52.000Z
|
from __future__ import division, print_function
import numpy
from sklearn.linear_model.logistic import LogisticRegression
from sklearn.metrics import roc_auc_score, mean_squared_error, log_loss
from sklearn.base import clone
from sklearn.datasets import make_blobs
from hep_ml import nnet
from hep_ml.commonutils import generate_sample
from hep_ml.nnet import MLPRegressor
from hep_ml.preprocessing import BinTransformer, IronTransformer
__author__ = 'Alex Rogozhnikov'
nn_types = [
nnet.SimpleNeuralNetwork,
nnet.MLPClassifier,
nnet.SoftmaxNeuralNetwork,
nnet.RBFNeuralNetwork,
nnet.PairwiseNeuralNetwork,
nnet.PairwiseSoftplusNeuralNetwork,
]
# TODO test pipelines, bagging and boosting
def check_single_classification_network(neural_network, n_samples=200, n_features=7, distance=0.8, retry_attempts=3):
X, y = generate_sample(n_samples=n_samples, n_features=n_features, distance=distance)
# each combination is tried 3 times. before raising exception
for retry_attempt in range(retry_attempts):
# to initial state
neural_network = clone(neural_network)
neural_network.set_params(random_state=42 + retry_attempt)
print(neural_network)
neural_network.fit(X, y)
quality = roc_auc_score(y, neural_network.predict_proba(X)[:, 1])
# checking that computations don't fail
computed_loss = neural_network.compute_loss(X, y, sample_weight=y * 0 + 1)
if quality > 0.8:
break
else:
print('attempt {} : {}'.format(retry_attempt, quality))
if retry_attempt == retry_attempts - 1:
raise RuntimeError('quality of model is too low: {} {}'.format(quality, neural_network))
def test_classification_nnets():
"""
checking combinations of losses, nn_types, trainers, most of them are used once during tests.
"""
attempts = max(len(nnet.losses), len(nnet.trainers), len(nn_types))
losses_shift = numpy.random.randint(10)
trainers_shift = numpy.random.randint(10)
for combination in range(attempts):
loss = list(nnet.losses.keys())[(combination + losses_shift) % len(nnet.losses)]
trainer = list(nnet.trainers.keys())[(combination + trainers_shift) % len(nnet.trainers)]
nn_type = nn_types[combination % len(nn_types)]
neural_network = nn_type(layers=[5], loss=loss, trainer=trainer, epochs=200)
yield check_single_classification_network, neural_network
def test_regression_nnets():
from sklearn.datasets import make_regression
X, y = make_regression(n_samples=300, n_features=20, n_informative=10, bias=5)
print(y[:20])
original_mse = mean_squared_error(y, y * 0 + y.mean())
for loss in ['mse_loss', 'smooth_huber_loss']:
reg = MLPRegressor(layers=(5,), loss=loss)
reg.fit(X, y)
p = reg.predict(X)
print(numpy.sort(abs(p))[-10:])
mse = mean_squared_error(y, p)
assert mse < original_mse * 0.3
# fitting a constant
y[:] = 100.
for loss in ['mse_loss', 'smooth_huber_loss']:
reg = MLPRegressor(layers=(1,), loss=loss, epochs=300)
reg.fit(X, y)
print(mean_squared_error(y, reg.predict(X)))
assert mean_squared_error(y, reg.predict(X)) < 5., "doesn't fit constant"
def compare_nnets_quality(n_samples=200, n_features=7, distance=0.8):
X, y = generate_sample(n_samples=n_samples, n_features=n_features, distance=distance)
# checking all possible combinations
for loss in ['log_loss']: # nnet.losses:
for NNType in nn_types:
for trainer in nnet.trainers:
nn = NNType(layers=[5], loss=loss, trainer=trainer, epochs=100, random_state=42)
nn.fit(X, y)
print(roc_auc_score(y, nn.predict_proba(X)[:, 1]), nn)
lr = LogisticRegression().fit(X, y)
print(roc_auc_score(y, lr.predict_proba(X)[:, 1]), lr)
def test_network_with_scaler(n_samples=200, n_features=15, distance=0.5):
X, y = generate_sample(n_samples=n_samples, n_features=n_features, distance=distance)
for scaler in [BinTransformer(max_bins=16), IronTransformer()]:
clf = nnet.SimpleNeuralNetwork(scaler=scaler, epochs=300)
clf.fit(X, y)
p = clf.predict_proba(X)
assert roc_auc_score(y, p[:, 1]) > 0.8, 'quality is too low for model: {}'.format(clf)
def test_adaptive_methods(n_samples=200, n_features=15, distance=0.5):
X, y = generate_sample(n_samples=n_samples, n_features=n_features, distance=distance)
for trainer in ['sgd', 'adadelta']:
clf = nnet.SimpleNeuralNetwork(trainer=trainer, trainer_parameters={'batch': 1})
clf.fit(X, y)
assert roc_auc_score(y, clf.predict_proba(X)[:, 1]) > 0.8, 'quality is too low for model: {}'.format(clf)
def test_reproducibility(n_samples=200, n_features=15, distance=0.5):
X, y = generate_sample(n_samples=n_samples, n_features=n_features, distance=distance)
for trainer in nnet.trainers.keys():
clf1 = nnet.MLPClassifier(trainer=trainer, random_state=42).fit(X, y)
clf2 = nnet.MLPClassifier(trainer=trainer, random_state=42).fit(X, y)
assert numpy.allclose(clf1.predict_proba(X), clf2.predict_proba(X))
def test_multiclassification(n_samples=200, n_features=10):
for n_classes in [2, 3, 4]:
X, y = make_blobs(n_samples=n_samples, centers=n_classes, n_features=n_features)
losses = []
for n_epochs in [1, 10, 100]:
clf = nnet.MLPMultiClassifier(epochs=n_epochs).fit(X, y)
loss1 = log_loss(y, clf.predict_proba(X))
loss2 = clf.compute_loss(X, y)
assert numpy.allclose(loss1, loss2), 'computed losses are different'
losses.append(loss1)
assert losses[0] > losses[-1], 'loss is not decreasing'
| 41.607143
| 117
| 0.687725
|
from __future__ import division, print_function
import numpy
from sklearn.linear_model.logistic import LogisticRegression
from sklearn.metrics import roc_auc_score, mean_squared_error, log_loss
from sklearn.base import clone
from sklearn.datasets import make_blobs
from hep_ml import nnet
from hep_ml.commonutils import generate_sample
from hep_ml.nnet import MLPRegressor
from hep_ml.preprocessing import BinTransformer, IronTransformer
__author__ = 'Alex Rogozhnikov'
nn_types = [
nnet.SimpleNeuralNetwork,
nnet.MLPClassifier,
nnet.SoftmaxNeuralNetwork,
nnet.RBFNeuralNetwork,
nnet.PairwiseNeuralNetwork,
nnet.PairwiseSoftplusNeuralNetwork,
]
def check_single_classification_network(neural_network, n_samples=200, n_features=7, distance=0.8, retry_attempts=3):
X, y = generate_sample(n_samples=n_samples, n_features=n_features, distance=distance)
for retry_attempt in range(retry_attempts):
neural_network = clone(neural_network)
neural_network.set_params(random_state=42 + retry_attempt)
print(neural_network)
neural_network.fit(X, y)
quality = roc_auc_score(y, neural_network.predict_proba(X)[:, 1])
computed_loss = neural_network.compute_loss(X, y, sample_weight=y * 0 + 1)
if quality > 0.8:
break
else:
print('attempt {} : {}'.format(retry_attempt, quality))
if retry_attempt == retry_attempts - 1:
raise RuntimeError('quality of model is too low: {} {}'.format(quality, neural_network))
def test_classification_nnets():
attempts = max(len(nnet.losses), len(nnet.trainers), len(nn_types))
losses_shift = numpy.random.randint(10)
trainers_shift = numpy.random.randint(10)
for combination in range(attempts):
loss = list(nnet.losses.keys())[(combination + losses_shift) % len(nnet.losses)]
trainer = list(nnet.trainers.keys())[(combination + trainers_shift) % len(nnet.trainers)]
nn_type = nn_types[combination % len(nn_types)]
neural_network = nn_type(layers=[5], loss=loss, trainer=trainer, epochs=200)
yield check_single_classification_network, neural_network
def test_regression_nnets():
from sklearn.datasets import make_regression
X, y = make_regression(n_samples=300, n_features=20, n_informative=10, bias=5)
print(y[:20])
original_mse = mean_squared_error(y, y * 0 + y.mean())
for loss in ['mse_loss', 'smooth_huber_loss']:
reg = MLPRegressor(layers=(5,), loss=loss)
reg.fit(X, y)
p = reg.predict(X)
print(numpy.sort(abs(p))[-10:])
mse = mean_squared_error(y, p)
assert mse < original_mse * 0.3
# fitting a constant
y[:] = 100.
for loss in ['mse_loss', 'smooth_huber_loss']:
reg = MLPRegressor(layers=(1,), loss=loss, epochs=300)
reg.fit(X, y)
print(mean_squared_error(y, reg.predict(X)))
assert mean_squared_error(y, reg.predict(X)) < 5., "doesn't fit constant"
def compare_nnets_quality(n_samples=200, n_features=7, distance=0.8):
X, y = generate_sample(n_samples=n_samples, n_features=n_features, distance=distance)
for loss in ['log_loss']:
for NNType in nn_types:
for trainer in nnet.trainers:
nn = NNType(layers=[5], loss=loss, trainer=trainer, epochs=100, random_state=42)
nn.fit(X, y)
print(roc_auc_score(y, nn.predict_proba(X)[:, 1]), nn)
lr = LogisticRegression().fit(X, y)
print(roc_auc_score(y, lr.predict_proba(X)[:, 1]), lr)
def test_network_with_scaler(n_samples=200, n_features=15, distance=0.5):
X, y = generate_sample(n_samples=n_samples, n_features=n_features, distance=distance)
for scaler in [BinTransformer(max_bins=16), IronTransformer()]:
clf = nnet.SimpleNeuralNetwork(scaler=scaler, epochs=300)
clf.fit(X, y)
p = clf.predict_proba(X)
assert roc_auc_score(y, p[:, 1]) > 0.8, 'quality is too low for model: {}'.format(clf)
def test_adaptive_methods(n_samples=200, n_features=15, distance=0.5):
X, y = generate_sample(n_samples=n_samples, n_features=n_features, distance=distance)
for trainer in ['sgd', 'adadelta']:
clf = nnet.SimpleNeuralNetwork(trainer=trainer, trainer_parameters={'batch': 1})
clf.fit(X, y)
assert roc_auc_score(y, clf.predict_proba(X)[:, 1]) > 0.8, 'quality is too low for model: {}'.format(clf)
def test_reproducibility(n_samples=200, n_features=15, distance=0.5):
X, y = generate_sample(n_samples=n_samples, n_features=n_features, distance=distance)
for trainer in nnet.trainers.keys():
clf1 = nnet.MLPClassifier(trainer=trainer, random_state=42).fit(X, y)
clf2 = nnet.MLPClassifier(trainer=trainer, random_state=42).fit(X, y)
assert numpy.allclose(clf1.predict_proba(X), clf2.predict_proba(X))
def test_multiclassification(n_samples=200, n_features=10):
for n_classes in [2, 3, 4]:
X, y = make_blobs(n_samples=n_samples, centers=n_classes, n_features=n_features)
losses = []
for n_epochs in [1, 10, 100]:
clf = nnet.MLPMultiClassifier(epochs=n_epochs).fit(X, y)
loss1 = log_loss(y, clf.predict_proba(X))
loss2 = clf.compute_loss(X, y)
assert numpy.allclose(loss1, loss2), 'computed losses are different'
losses.append(loss1)
assert losses[0] > losses[-1], 'loss is not decreasing'
| true
| true
|
f718ab3528673e47153b5a348e4c10a0b0f010a8
| 780
|
py
|
Python
|
setup.py
|
metabolize/harrison
|
0d0f26fda1947785ee7a00a8a7bf5b6a95e06372
|
[
"BSD-2-Clause"
] | 4
|
2019-10-02T03:23:04.000Z
|
2021-01-26T04:25:06.000Z
|
setup.py
|
metabolize/harrison
|
0d0f26fda1947785ee7a00a8a7bf5b6a95e06372
|
[
"BSD-2-Clause"
] | 31
|
2019-08-29T17:13:06.000Z
|
2021-06-25T15:25:18.000Z
|
setup.py
|
metabolize/harrison
|
0d0f26fda1947785ee7a00a8a7bf5b6a95e06372
|
[
"BSD-2-Clause"
] | 1
|
2017-10-24T23:24:48.000Z
|
2017-10-24T23:24:48.000Z
|
from setuptools import setup
version_info = {}
exec(open("harrison/package_version.py").read(), version_info)
setup(
name="harrison",
version=version_info["__version__"],
author="Body Labs, Metabolize",
author_email="github@paulmelnikow.com",
description="Time a block of code",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="https://github.com/metabolize/harrison",
license="MIT",
packages=["harrison", "harrison/util"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
],
)
| 31.2
| 62
| 0.661538
|
from setuptools import setup
version_info = {}
exec(open("harrison/package_version.py").read(), version_info)
setup(
name="harrison",
version=version_info["__version__"],
author="Body Labs, Metabolize",
author_email="github@paulmelnikow.com",
description="Time a block of code",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="https://github.com/metabolize/harrison",
license="MIT",
packages=["harrison", "harrison/util"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
],
)
| true
| true
|
f718abf37912a57ece457c7724017ea32c14fa31
| 8,330
|
py
|
Python
|
tests/symbol_dependent_transients_test.py
|
Walon1998/dace
|
95ddfd3e9a5c654f0f0d66d026e0b64ec0f028a0
|
[
"BSD-3-Clause"
] | 1
|
2022-03-11T13:36:34.000Z
|
2022-03-11T13:36:34.000Z
|
tests/symbol_dependent_transients_test.py
|
Walon1998/dace
|
95ddfd3e9a5c654f0f0d66d026e0b64ec0f028a0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/symbol_dependent_transients_test.py
|
Walon1998/dace
|
95ddfd3e9a5c654f0f0d66d026e0b64ec0f028a0
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
import dace
import numpy as np
import pytest
from dace.libraries import standard
from dace.transformation import interstate
def _make_sdfg(name, storage=dace.dtypes.StorageType.CPU_Heap, isview=False):
N = dace.symbol('N', dtype=dace.int32, integer=True, positive=True)
i = dace.symbol('i', dtype=dace.int32, integer=True)
sdfg = dace.SDFG(name)
_, A = sdfg.add_array('A', [N, N, N], dtype=dace.float64)
_, B = sdfg.add_array('B', [N], dtype=dace.float64)
if isview:
_, tmp1 = sdfg.add_view('tmp1', [N - 4, N - 4, N - i], dtype=dace.float64, storage=storage, strides=A.strides)
else:
_, tmp1 = sdfg.add_transient('tmp1', [N - 4, N - 4, N - i], dtype=dace.float64, storage=storage)
_, tmp2 = sdfg.add_transient('tmp2', [1], dtype=dace.float64, storage=storage)
begin_state = sdfg.add_state("begin", is_start_state=True)
guard_state = sdfg.add_state("guard")
body1_state = sdfg.add_state("body1")
body2_state = sdfg.add_state("body2")
body3_state = sdfg.add_state("body3")
end_state = sdfg.add_state("end")
sdfg.add_edge(begin_state, guard_state, dace.InterstateEdge(assignments=dict(i='0')))
sdfg.add_edge(guard_state, body1_state, dace.InterstateEdge(condition=f'i<{N}'))
sdfg.add_edge(guard_state, end_state, dace.InterstateEdge(condition=f'i>={N}'))
sdfg.add_edge(body1_state, body2_state, dace.InterstateEdge())
sdfg.add_edge(body2_state, body3_state, dace.InterstateEdge())
sdfg.add_edge(body3_state, guard_state, dace.InterstateEdge(assignments=dict(i='i+1')))
if not isview:
read_a = body1_state.add_read('A')
write_tmp1 = body1_state.add_write('tmp1')
body1_state.add_nedge(read_a, write_tmp1, dace.Memlet(f'A[2:{N}-2, 2:{N}-2, i:{N}]'))
if isview:
read_a = body2_state.add_read('A')
read_tmp1 = body2_state.add_access('tmp1')
body2_state.add_nedge(read_a, read_tmp1, dace.Memlet(f'A[2:{N}-2, 2:{N}-2, i:{N}]'))
else:
read_tmp1 = body2_state.add_read('tmp1')
rednode = standard.Reduce(wcr='lambda a, b : a + b', identity=0)
if storage == dace.dtypes.StorageType.GPU_Global:
rednode.implementation = 'CUDA (device)'
elif storage == dace.dtypes.StorageType.FPGA_Global:
rednode.implementation = 'FPGAPartialReduction'
body2_state.add_node(rednode)
write_tmp2 = body2_state.add_write('tmp2')
body2_state.add_nedge(read_tmp1, rednode, dace.Memlet.from_array('tmp1', tmp1))
body2_state.add_nedge(rednode, write_tmp2, dace.Memlet('tmp2[0]'))
read_tmp2 = body3_state.add_read('tmp2')
write_b = body3_state.add_write('B')
body3_state.add_nedge(read_tmp2, write_b, dace.Memlet('B[i]'))
return sdfg
def test_symbol_dependent_heap_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_heap_array")
# Compile manually to avoid simplification
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
def test_symbol_dependent_register_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_register_array", storage=dace.dtypes.StorageType.Register)
# Compile manually to avoid simplification
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
def test_symbol_dependent_threadlocal_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_threadlocal_array", storage=dace.dtypes.StorageType.CPU_ThreadLocal)
# Compile manually to avoid simplification
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
def test_symbol_dependent_cpu_view():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_cpu_view", isview=True)
# Compile manually to avoid simplification
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
@pytest.mark.gpu
def test_symbol_dependent_gpu_global_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_gpu_global_array", storage=dace.dtypes.StorageType.GPU_Global)
# Compile manually to avoid simplification
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
@pytest.mark.gpu
def test_symbol_dependent_pinned_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_pinned_array", storage=dace.dtypes.StorageType.CPU_Pinned)
# Compile manually to avoid simplification
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
@pytest.mark.skip # @pytest.mark.gpu
def test_symbol_dependent_gpu_view():
# NOTE: This test cannot produce the correct result since the input
# data of the reduction are not contiguous and cub:reduce doesn't support
# such data.
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_gpu_view", storage=dace.dtypes.StorageType.GPU_Global, isview=True)
# Compile manually to avoid simplification
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
@pytest.mark.skip
def test_symbol_dependent_fpga_global_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_fpga_global_array", storage=dace.dtypes.StorageType.FPGA_Global)
# Compile manually to avoid simplification
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
def test_symbol_dependent_array_in_map():
@dace.program
def symbol_dependent_array_in_map(A: dace.float32[10]):
out = np.ndarray(10, dtype=np.float32)
for i in dace.map[0:10]:
tmp = A[0:i + 1]
out[i] = np.sum(tmp)
return out
# Compile manually to avoid simplification
sdfg = symbol_dependent_array_in_map.to_sdfg(simplify=False)
sdfg.apply_transformations_repeated(interstate.StateFusion)
sdfg.apply_transformations_repeated(interstate.InlineSDFG)
# NOTE: Temporary fix for issue with symbols/free_symbols
if 'i' in sdfg.free_symbols:
sdfg.remove_symbol('i')
func = sdfg.compile()
A = np.random.randn(10).astype(np.float32)
val = func(A=A)
ref = np.cumsum(A)
assert (np.allclose(val, ref))
if __name__ == '__main__':
test_symbol_dependent_heap_array()
test_symbol_dependent_register_array()
test_symbol_dependent_threadlocal_array()
test_symbol_dependent_cpu_view()
test_symbol_dependent_gpu_global_array()
test_symbol_dependent_pinned_array()
# test_symbol_dependent_gpu_view()
# test_symbol_dependent_fpga_global_array()
test_symbol_dependent_array_in_map()
| 37.022222
| 118
| 0.677431
|
import dace
import numpy as np
import pytest
from dace.libraries import standard
from dace.transformation import interstate
def _make_sdfg(name, storage=dace.dtypes.StorageType.CPU_Heap, isview=False):
N = dace.symbol('N', dtype=dace.int32, integer=True, positive=True)
i = dace.symbol('i', dtype=dace.int32, integer=True)
sdfg = dace.SDFG(name)
_, A = sdfg.add_array('A', [N, N, N], dtype=dace.float64)
_, B = sdfg.add_array('B', [N], dtype=dace.float64)
if isview:
_, tmp1 = sdfg.add_view('tmp1', [N - 4, N - 4, N - i], dtype=dace.float64, storage=storage, strides=A.strides)
else:
_, tmp1 = sdfg.add_transient('tmp1', [N - 4, N - 4, N - i], dtype=dace.float64, storage=storage)
_, tmp2 = sdfg.add_transient('tmp2', [1], dtype=dace.float64, storage=storage)
begin_state = sdfg.add_state("begin", is_start_state=True)
guard_state = sdfg.add_state("guard")
body1_state = sdfg.add_state("body1")
body2_state = sdfg.add_state("body2")
body3_state = sdfg.add_state("body3")
end_state = sdfg.add_state("end")
sdfg.add_edge(begin_state, guard_state, dace.InterstateEdge(assignments=dict(i='0')))
sdfg.add_edge(guard_state, body1_state, dace.InterstateEdge(condition=f'i<{N}'))
sdfg.add_edge(guard_state, end_state, dace.InterstateEdge(condition=f'i>={N}'))
sdfg.add_edge(body1_state, body2_state, dace.InterstateEdge())
sdfg.add_edge(body2_state, body3_state, dace.InterstateEdge())
sdfg.add_edge(body3_state, guard_state, dace.InterstateEdge(assignments=dict(i='i+1')))
if not isview:
read_a = body1_state.add_read('A')
write_tmp1 = body1_state.add_write('tmp1')
body1_state.add_nedge(read_a, write_tmp1, dace.Memlet(f'A[2:{N}-2, 2:{N}-2, i:{N}]'))
if isview:
read_a = body2_state.add_read('A')
read_tmp1 = body2_state.add_access('tmp1')
body2_state.add_nedge(read_a, read_tmp1, dace.Memlet(f'A[2:{N}-2, 2:{N}-2, i:{N}]'))
else:
read_tmp1 = body2_state.add_read('tmp1')
rednode = standard.Reduce(wcr='lambda a, b : a + b', identity=0)
if storage == dace.dtypes.StorageType.GPU_Global:
rednode.implementation = 'CUDA (device)'
elif storage == dace.dtypes.StorageType.FPGA_Global:
rednode.implementation = 'FPGAPartialReduction'
body2_state.add_node(rednode)
write_tmp2 = body2_state.add_write('tmp2')
body2_state.add_nedge(read_tmp1, rednode, dace.Memlet.from_array('tmp1', tmp1))
body2_state.add_nedge(rednode, write_tmp2, dace.Memlet('tmp2[0]'))
read_tmp2 = body3_state.add_read('tmp2')
write_b = body3_state.add_write('B')
body3_state.add_nedge(read_tmp2, write_b, dace.Memlet('B[i]'))
return sdfg
def test_symbol_dependent_heap_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_heap_array")
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
def test_symbol_dependent_register_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_register_array", storage=dace.dtypes.StorageType.Register)
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
def test_symbol_dependent_threadlocal_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_threadlocal_array", storage=dace.dtypes.StorageType.CPU_ThreadLocal)
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
def test_symbol_dependent_cpu_view():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_cpu_view", isview=True)
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
@pytest.mark.gpu
def test_symbol_dependent_gpu_global_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_gpu_global_array", storage=dace.dtypes.StorageType.GPU_Global)
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
@pytest.mark.gpu
def test_symbol_dependent_pinned_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_pinned_array", storage=dace.dtypes.StorageType.CPU_Pinned)
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
@pytest.mark.skip
def test_symbol_dependent_gpu_view():
# such data.
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_gpu_view", storage=dace.dtypes.StorageType.GPU_Global, isview=True)
# Compile manually to avoid simplification
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
@pytest.mark.skip
def test_symbol_dependent_fpga_global_array():
A = np.random.randn(10, 10, 10)
B = np.ndarray(10, dtype=np.float64)
sdfg = _make_sdfg("symbol_dependent_fpga_global_array", storage=dace.dtypes.StorageType.FPGA_Global)
# Compile manually to avoid simplification
sdfg_exec = sdfg.compile()
sdfg_exec(A=A, B=B, N=10)
del sdfg_exec
B_ref = np.ndarray(10, dtype=np.float64)
for i in range(10):
tmp = A[2:-2, 2:-2, i:]
B_ref[i] = np.sum(tmp)
assert (np.allclose(B, B_ref))
def test_symbol_dependent_array_in_map():
@dace.program
def symbol_dependent_array_in_map(A: dace.float32[10]):
out = np.ndarray(10, dtype=np.float32)
for i in dace.map[0:10]:
tmp = A[0:i + 1]
out[i] = np.sum(tmp)
return out
# Compile manually to avoid simplification
sdfg = symbol_dependent_array_in_map.to_sdfg(simplify=False)
sdfg.apply_transformations_repeated(interstate.StateFusion)
sdfg.apply_transformations_repeated(interstate.InlineSDFG)
# NOTE: Temporary fix for issue with symbols/free_symbols
if 'i' in sdfg.free_symbols:
sdfg.remove_symbol('i')
func = sdfg.compile()
A = np.random.randn(10).astype(np.float32)
val = func(A=A)
ref = np.cumsum(A)
assert (np.allclose(val, ref))
if __name__ == '__main__':
test_symbol_dependent_heap_array()
test_symbol_dependent_register_array()
test_symbol_dependent_threadlocal_array()
test_symbol_dependent_cpu_view()
test_symbol_dependent_gpu_global_array()
test_symbol_dependent_pinned_array()
# test_symbol_dependent_gpu_view()
# test_symbol_dependent_fpga_global_array()
test_symbol_dependent_array_in_map()
| true
| true
|
f718acc04225ae3f6eed972d1bf2068588023593
| 4,524
|
py
|
Python
|
factory_powerstations/hooks.py
|
alexdali/test_factory_powerstations
|
d07faf7a254e7d7a11b17565d2f9236863846c79
|
[
"MIT"
] | null | null | null |
factory_powerstations/hooks.py
|
alexdali/test_factory_powerstations
|
d07faf7a254e7d7a11b17565d2f9236863846c79
|
[
"MIT"
] | null | null | null |
factory_powerstations/hooks.py
|
alexdali/test_factory_powerstations
|
d07faf7a254e7d7a11b17565d2f9236863846c79
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import __version__ as app_version
app_name = "factory_powerstations"
app_title = "Factory Powerstations"
app_publisher = "Alex Tas"
app_description = "Factory Management System"
app_icon = "octicon octicon-file-directory"
app_color = "grey"
app_email = "alextas@example.com"
app_license = "MIT"
# Includes in <head>
# ------------------
# include js, css files in header of desk.html
# app_include_css = "/assets/factory_powerstations/css/factory_powerstations.css"
# app_include_js = "/assets/factory_powerstations/js/factory_powerstations.js"
# include js, css files in header of web template
# web_include_css = "/assets/factory_powerstations/css/factory_powerstations.css"
# web_include_js = "/assets/factory_powerstations/js/factory_powerstations.js"
# include custom scss in every website theme (without file extension ".scss")
# website_theme_scss = "factory_powerstations/public/scss/website"
# include js, css files in header of web form
# webform_include_js = {"doctype": "public/js/doctype.js"}
# webform_include_css = {"doctype": "public/css/doctype.css"}
# include js in page
# page_js = {"page" : "public/js/file.js"}
# include js in doctype views
# doctype_js = {"doctype" : "public/js/doctype.js"}
# doctype_list_js = {"doctype" : "public/js/doctype_list.js"}
# doctype_tree_js = {"doctype" : "public/js/doctype_tree.js"}
# doctype_calendar_js = {"doctype" : "public/js/doctype_calendar.js"}
# Home Pages
# ----------
# application home page (will override Website Settings)
# home_page = "login"
# website user home page (by Role)
# role_home_page = {
# "Role": "home_page"
# }
# Generators
# ----------
# automatically create page for each record of this doctype
# website_generators = ["Web Page"]
# Jinja
# ----------
# add methods and filters to jinja environment
# jinja = {
# "methods": "factory_powerstations.utils.jinja_methods",
# "filters": "factory_powerstations.utils.jinja_filters"
# }
# Installation
# ------------
# before_install = "factory_powerstations.install.before_install"
# after_install = "factory_powerstations.install.after_install"
# Desk Notifications
# ------------------
# See frappe.core.notifications.get_notification_config
# notification_config = "factory_powerstations.notifications.get_notification_config"
# Permissions
# -----------
# Permissions evaluated in scripted ways
# permission_query_conditions = {
# "Event": "frappe.desk.doctype.event.event.get_permission_query_conditions",
# }
#
# has_permission = {
# "Event": "frappe.desk.doctype.event.event.has_permission",
# }
# DocType Class
# ---------------
# Override standard doctype classes
# override_doctype_class = {
# "ToDo": "custom_app.overrides.CustomToDo"
# }
# Document Events
# ---------------
# Hook on document methods and events
# doc_events = {
# "*": {
# "on_update": "method",
# "on_cancel": "method",
# "on_trash": "method"
# }
# }
# Scheduled Tasks
# ---------------
# scheduler_events = {
# "all": [
# "factory_powerstations.tasks.all"
# ],
# "daily": [
# "factory_powerstations.tasks.daily"
# ],
# "hourly": [
# "factory_powerstations.tasks.hourly"
# ],
# "weekly": [
# "factory_powerstations.tasks.weekly"
# ]
# "monthly": [
# "factory_powerstations.tasks.monthly"
# ]
# }
# Testing
# -------
# before_tests = "factory_powerstations.install.before_tests"
# Overriding Methods
# ------------------------------
#
# override_whitelisted_methods = {
# "frappe.desk.doctype.event.event.get_events": "factory_powerstations.event.get_events"
# }
#
# each overriding function accepts a `data` argument;
# generated from the base implementation of the doctype dashboard,
# along with any modifications made in other Frappe apps
# override_doctype_dashboards = {
# "Task": "factory_powerstations.task.get_dashboard_data"
# }
# exempt linked doctypes from being automatically cancelled
#
# auto_cancel_exempted_doctypes = ["Auto Repeat"]
# User Data Protection
# --------------------
user_data_fields = [
{
"doctype": "{doctype_1}",
"filter_by": "{filter_by}",
"redact_fields": ["{field_1}", "{field_2}"],
"partial": 1,
},
{
"doctype": "{doctype_2}",
"filter_by": "{filter_by}",
"partial": 1,
},
{
"doctype": "{doctype_3}",
"strict": False,
},
{
"doctype": "{doctype_4}"
}
]
# Authentication and authorization
# --------------------------------
# auth_hooks = [
# "factory_powerstations.auth.validate"
# ]
fixtures = ["Order_PS"]
| 23.936508
| 89
| 0.682803
|
from __future__ import unicode_literals
from . import __version__ as app_version
app_name = "factory_powerstations"
app_title = "Factory Powerstations"
app_publisher = "Alex Tas"
app_description = "Factory Management System"
app_icon = "octicon octicon-file-directory"
app_color = "grey"
app_email = "alextas@example.com"
app_license = "MIT"
user_data_fields = [
{
"doctype": "{doctype_1}",
"filter_by": "{filter_by}",
"redact_fields": ["{field_1}", "{field_2}"],
"partial": 1,
},
{
"doctype": "{doctype_2}",
"filter_by": "{filter_by}",
"partial": 1,
},
{
"doctype": "{doctype_3}",
"strict": False,
},
{
"doctype": "{doctype_4}"
}
]
fixtures = ["Order_PS"]
| true
| true
|
f718ad5022211872d6795c97dd3d1de6282cac0b
| 10,500
|
py
|
Python
|
pypy/translator/goal/richards.py
|
camillobruni/pygirl
|
ddbd442d53061d6ff4af831c1eab153bcc771b5a
|
[
"MIT"
] | 12
|
2016-01-06T07:10:28.000Z
|
2021-05-13T23:02:02.000Z
|
pypy/translator/goal/richards.py
|
woodrow/pyoac
|
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
|
[
"MIT"
] | null | null | null |
pypy/translator/goal/richards.py
|
woodrow/pyoac
|
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
|
[
"MIT"
] | 2
|
2016-07-29T07:09:50.000Z
|
2016-10-16T08:50:26.000Z
|
# based on a Java version:
# Based on original version written in BCPL by Dr Martin Richards
# in 1981 at Cambridge University Computer Laboratory, England
# and a C++ version derived from a Smalltalk version written by
# L Peter Deutsch.
# Java version: Copyright (C) 1995 Sun Microsystems, Inc.
# Translation from C++, Mario Wolczko
# Outer loop added by Alex Jacoby
# Task IDs
I_IDLE = 1
I_WORK = 2
I_HANDLERA = 3
I_HANDLERB = 4
I_DEVA = 5
I_DEVB = 6
# Packet types
K_DEV = 1000
K_WORK = 1001
# Packet
BUFSIZE = 4
BUFSIZE_RANGE = range(BUFSIZE)
class Packet(object):
def __init__(self,l,i,k):
self.link = l
self.ident = i
self.kind = k
self.datum = 0
self.data = [0] * BUFSIZE
def append_to(self,lst):
self.link = None
if lst is None:
return self
else:
p = lst
next = p.link
while next is not None:
p = next
next = p.link
p.link = self
return lst
# Task Records
class TaskRec(object):
pass
class DeviceTaskRec(TaskRec):
def __init__(self):
self.pending = None
class IdleTaskRec(TaskRec):
def __init__(self):
self.control = 1
self.count = 10000
class HandlerTaskRec(TaskRec):
def __init__(self):
self.work_in = None
self.device_in = None
def workInAdd(self,p):
self.work_in = p.append_to(self.work_in)
return self.work_in
def deviceInAdd(self,p):
self.device_in = p.append_to(self.device_in)
return self.device_in
class WorkerTaskRec(TaskRec):
def __init__(self):
self.destination = I_HANDLERA
self.count = 0
# Task
class TaskState(object):
def __init__(self):
self.packet_pending = True
self.task_waiting = False
self.task_holding = False
def packetPending(self):
self.packet_pending = True
self.task_waiting = False
self.task_holding = False
return self
def waiting(self):
self.packet_pending = False
self.task_waiting = True
self.task_holding = False
return self
def running(self):
self.packet_pending = False
self.task_waiting = False
self.task_holding = False
return self
def waitingWithPacket(self):
self.packet_pending = True
self.task_waiting = True
self.task_holding = False
return self
def isPacketPending(self):
return self.packet_pending
def isTaskWaiting(self):
return self.task_waiting
def isTaskHolding(self):
return self.task_holding
def isTaskHoldingOrWaiting(self):
return self.task_holding or (not self.packet_pending and self.task_waiting)
def isWaitingWithPacket(self):
return self.packet_pending and self.task_waiting and not self.task_holding
tracing = False
layout = 0
def trace(a):
global layout
layout -= 1
if layout <= 0:
print
layout = 50
print a,
TASKTABSIZE = 10
class TaskWorkArea(object):
def __init__(self):
self.taskTab = [None] * TASKTABSIZE
self.taskList = None
self.holdCount = 0
self.qpktCount = 0
taskWorkArea = TaskWorkArea()
class Task(TaskState):
def __init__(self,i,p,w,initialState,r):
self.link = taskWorkArea.taskList
self.ident = i
self.priority = p
self.input = w
self.packet_pending = initialState.isPacketPending()
self.task_waiting = initialState.isTaskWaiting()
self.task_holding = initialState.isTaskHolding()
self.handle = r
taskWorkArea.taskList = self
taskWorkArea.taskTab[i] = self
def fn(self,pkt,r):
raise NotImplementedError
def addPacket(self,p,old):
if self.input is None:
self.input = p
self.packet_pending = True
if self.priority > old.priority:
return self
else:
p.append_to(self.input)
return old
def runTask(self):
if self.isWaitingWithPacket():
msg = self.input
self.input = msg.link
if self.input is None:
self.running()
else:
self.packetPending()
else:
msg = None
return self.fn(msg,self.handle)
def waitTask(self):
self.task_waiting = True
return self
def hold(self):
taskWorkArea.holdCount += 1
self.task_holding = True
return self.link
def release(self,i):
t = self.findtcb(i)
t.task_holding = False
if t.priority > self.priority:
return t
else:
return self
def qpkt(self,pkt):
t = self.findtcb(pkt.ident)
taskWorkArea.qpktCount += 1
pkt.link = None
pkt.ident = self.ident
return t.addPacket(pkt,self)
def findtcb(self,id):
t = taskWorkArea.taskTab[id]
if t is None:
raise Exception("Bad task id %d" % id)
return t
# DeviceTask
class DeviceTask(Task):
def __init__(self,i,p,w,s,r):
Task.__init__(self,i,p,w,s,r)
def fn(self,pkt,r):
d = r
assert isinstance(d, DeviceTaskRec)
if pkt is None:
pkt = d.pending
if pkt is None:
return self.waitTask()
else:
d.pending = None
return self.qpkt(pkt)
else:
d.pending = pkt
if tracing: trace(pkt.datum)
return self.hold()
class HandlerTask(Task):
def __init__(self,i,p,w,s,r):
Task.__init__(self,i,p,w,s,r)
def fn(self,pkt,r):
h = r
assert isinstance(h, HandlerTaskRec)
if pkt is not None:
if pkt.kind == K_WORK:
h.workInAdd(pkt)
else:
h.deviceInAdd(pkt)
work = h.work_in
if work is None:
return self.waitTask()
count = work.datum
if count >= BUFSIZE:
h.work_in = work.link
return self.qpkt(work)
dev = h.device_in
if dev is None:
return self.waitTask()
h.device_in = dev.link
dev.datum = work.data[count]
work.datum = count + 1
return self.qpkt(dev)
# IdleTask
class IdleTask(Task):
def __init__(self,i,p,w,s,r):
Task.__init__(self,i,0,None,s,r)
def fn(self,pkt,r):
i = r
assert isinstance(i, IdleTaskRec)
i.count -= 1
if i.count == 0:
return self.hold()
elif i.control & 1 == 0:
i.control /= 2
return self.release(I_DEVA)
else:
i.control = i.control/2 ^ 0xd008
return self.release(I_DEVB)
# WorkTask
A = ord('A')
class WorkTask(Task):
def __init__(self,i,p,w,s,r):
Task.__init__(self,i,p,w,s,r)
def fn(self,pkt,r):
w = r
assert isinstance(w, WorkerTaskRec)
if pkt is None:
return self.waitTask()
if w.destination == I_HANDLERA:
dest = I_HANDLERB
else:
dest = I_HANDLERA
w.destination = dest
pkt.ident = dest
pkt.datum = 0
for i in BUFSIZE_RANGE: # xrange(BUFSIZE)
w.count += 1
if w.count > 26:
w.count = 1
pkt.data[i] = A + w.count - 1
return self.qpkt(pkt)
import time
def schedule():
t = taskWorkArea.taskList
while t is not None:
pkt = None
if tracing:
print "tcb =",t.ident
if t.isTaskHoldingOrWaiting():
t = t.link
else:
if tracing: trace(chr(ord("0")+t.ident))
t = t.runTask()
class Richards(object):
def run(self, iterations):
for i in xrange(iterations):
taskWorkArea.holdCount = 0
taskWorkArea.qpktCount = 0
IdleTask(I_IDLE, 1, 10000, TaskState().running(), IdleTaskRec())
wkq = Packet(None, 0, K_WORK)
wkq = Packet(wkq , 0, K_WORK)
WorkTask(I_WORK, 1000, wkq, TaskState().waitingWithPacket(), WorkerTaskRec())
wkq = Packet(None, I_DEVA, K_DEV)
wkq = Packet(wkq , I_DEVA, K_DEV)
wkq = Packet(wkq , I_DEVA, K_DEV)
HandlerTask(I_HANDLERA, 2000, wkq, TaskState().waitingWithPacket(), HandlerTaskRec())
wkq = Packet(None, I_DEVB, K_DEV)
wkq = Packet(wkq , I_DEVB, K_DEV)
wkq = Packet(wkq , I_DEVB, K_DEV)
HandlerTask(I_HANDLERB, 3000, wkq, TaskState().waitingWithPacket(), HandlerTaskRec())
wkq = None;
DeviceTask(I_DEVA, 4000, wkq, TaskState().waiting(), DeviceTaskRec());
DeviceTask(I_DEVB, 5000, wkq, TaskState().waiting(), DeviceTaskRec());
schedule()
if taskWorkArea.holdCount == 9297 and taskWorkArea.qpktCount == 23246:
pass
else:
return False
return True
def entry_point(iterations):
r = Richards()
startTime = time.time()
result = r.run(iterations)
endTime = time.time()
return result, startTime, endTime
def main(entry_point = entry_point, iterations = 10):
print "Richards benchmark (Python) starting... [%r]" % entry_point
result, startTime, endTime = entry_point(iterations)
if not result:
print "Incorrect results!"
return -1
print "finished."
total_s = endTime - startTime
print "Total time for %d iterations: %.2f secs" %(iterations,total_s)
print "Average time per iteration: %.2f ms" %(total_s*1000/iterations)
return 42
try:
import sys
if '-nojit' in sys.argv:
sys.argv.remove('-nojit')
raise ImportError
import pypyjit
except ImportError:
pass
else:
import types
for item in globals().values():
if isinstance(item, types.FunctionType):
pypyjit.enable(item.func_code)
elif isinstance(item, type):
for it in item.__dict__.values():
if isinstance(it, types.FunctionType):
pypyjit.enable(it.func_code)
if __name__ == '__main__':
import sys
if len(sys.argv) >= 2:
main(iterations = int(sys.argv[1]))
else:
main()
| 23.809524
| 97
| 0.567524
|
I_IDLE = 1
I_WORK = 2
I_HANDLERA = 3
I_HANDLERB = 4
I_DEVA = 5
I_DEVB = 6
K_DEV = 1000
K_WORK = 1001
BUFSIZE = 4
BUFSIZE_RANGE = range(BUFSIZE)
class Packet(object):
def __init__(self,l,i,k):
self.link = l
self.ident = i
self.kind = k
self.datum = 0
self.data = [0] * BUFSIZE
def append_to(self,lst):
self.link = None
if lst is None:
return self
else:
p = lst
next = p.link
while next is not None:
p = next
next = p.link
p.link = self
return lst
class TaskRec(object):
pass
class DeviceTaskRec(TaskRec):
def __init__(self):
self.pending = None
class IdleTaskRec(TaskRec):
def __init__(self):
self.control = 1
self.count = 10000
class HandlerTaskRec(TaskRec):
def __init__(self):
self.work_in = None
self.device_in = None
def workInAdd(self,p):
self.work_in = p.append_to(self.work_in)
return self.work_in
def deviceInAdd(self,p):
self.device_in = p.append_to(self.device_in)
return self.device_in
class WorkerTaskRec(TaskRec):
def __init__(self):
self.destination = I_HANDLERA
self.count = 0
class TaskState(object):
def __init__(self):
self.packet_pending = True
self.task_waiting = False
self.task_holding = False
def packetPending(self):
self.packet_pending = True
self.task_waiting = False
self.task_holding = False
return self
def waiting(self):
self.packet_pending = False
self.task_waiting = True
self.task_holding = False
return self
def running(self):
self.packet_pending = False
self.task_waiting = False
self.task_holding = False
return self
def waitingWithPacket(self):
self.packet_pending = True
self.task_waiting = True
self.task_holding = False
return self
def isPacketPending(self):
return self.packet_pending
def isTaskWaiting(self):
return self.task_waiting
def isTaskHolding(self):
return self.task_holding
def isTaskHoldingOrWaiting(self):
return self.task_holding or (not self.packet_pending and self.task_waiting)
def isWaitingWithPacket(self):
return self.packet_pending and self.task_waiting and not self.task_holding
tracing = False
layout = 0
def trace(a):
global layout
layout -= 1
if layout <= 0:
print
layout = 50
print a,
TASKTABSIZE = 10
class TaskWorkArea(object):
def __init__(self):
self.taskTab = [None] * TASKTABSIZE
self.taskList = None
self.holdCount = 0
self.qpktCount = 0
taskWorkArea = TaskWorkArea()
class Task(TaskState):
def __init__(self,i,p,w,initialState,r):
self.link = taskWorkArea.taskList
self.ident = i
self.priority = p
self.input = w
self.packet_pending = initialState.isPacketPending()
self.task_waiting = initialState.isTaskWaiting()
self.task_holding = initialState.isTaskHolding()
self.handle = r
taskWorkArea.taskList = self
taskWorkArea.taskTab[i] = self
def fn(self,pkt,r):
raise NotImplementedError
def addPacket(self,p,old):
if self.input is None:
self.input = p
self.packet_pending = True
if self.priority > old.priority:
return self
else:
p.append_to(self.input)
return old
def runTask(self):
if self.isWaitingWithPacket():
msg = self.input
self.input = msg.link
if self.input is None:
self.running()
else:
self.packetPending()
else:
msg = None
return self.fn(msg,self.handle)
def waitTask(self):
self.task_waiting = True
return self
def hold(self):
taskWorkArea.holdCount += 1
self.task_holding = True
return self.link
def release(self,i):
t = self.findtcb(i)
t.task_holding = False
if t.priority > self.priority:
return t
else:
return self
def qpkt(self,pkt):
t = self.findtcb(pkt.ident)
taskWorkArea.qpktCount += 1
pkt.link = None
pkt.ident = self.ident
return t.addPacket(pkt,self)
def findtcb(self,id):
t = taskWorkArea.taskTab[id]
if t is None:
raise Exception("Bad task id %d" % id)
return t
class DeviceTask(Task):
def __init__(self,i,p,w,s,r):
Task.__init__(self,i,p,w,s,r)
def fn(self,pkt,r):
d = r
assert isinstance(d, DeviceTaskRec)
if pkt is None:
pkt = d.pending
if pkt is None:
return self.waitTask()
else:
d.pending = None
return self.qpkt(pkt)
else:
d.pending = pkt
if tracing: trace(pkt.datum)
return self.hold()
class HandlerTask(Task):
def __init__(self,i,p,w,s,r):
Task.__init__(self,i,p,w,s,r)
def fn(self,pkt,r):
h = r
assert isinstance(h, HandlerTaskRec)
if pkt is not None:
if pkt.kind == K_WORK:
h.workInAdd(pkt)
else:
h.deviceInAdd(pkt)
work = h.work_in
if work is None:
return self.waitTask()
count = work.datum
if count >= BUFSIZE:
h.work_in = work.link
return self.qpkt(work)
dev = h.device_in
if dev is None:
return self.waitTask()
h.device_in = dev.link
dev.datum = work.data[count]
work.datum = count + 1
return self.qpkt(dev)
class IdleTask(Task):
def __init__(self,i,p,w,s,r):
Task.__init__(self,i,0,None,s,r)
def fn(self,pkt,r):
i = r
assert isinstance(i, IdleTaskRec)
i.count -= 1
if i.count == 0:
return self.hold()
elif i.control & 1 == 0:
i.control /= 2
return self.release(I_DEVA)
else:
i.control = i.control/2 ^ 0xd008
return self.release(I_DEVB)
A = ord('A')
class WorkTask(Task):
def __init__(self,i,p,w,s,r):
Task.__init__(self,i,p,w,s,r)
def fn(self,pkt,r):
w = r
assert isinstance(w, WorkerTaskRec)
if pkt is None:
return self.waitTask()
if w.destination == I_HANDLERA:
dest = I_HANDLERB
else:
dest = I_HANDLERA
w.destination = dest
pkt.ident = dest
pkt.datum = 0
for i in BUFSIZE_RANGE:
w.count += 1
if w.count > 26:
w.count = 1
pkt.data[i] = A + w.count - 1
return self.qpkt(pkt)
import time
def schedule():
t = taskWorkArea.taskList
while t is not None:
pkt = None
if tracing:
print "tcb =",t.ident
if t.isTaskHoldingOrWaiting():
t = t.link
else:
if tracing: trace(chr(ord("0")+t.ident))
t = t.runTask()
class Richards(object):
def run(self, iterations):
for i in xrange(iterations):
taskWorkArea.holdCount = 0
taskWorkArea.qpktCount = 0
IdleTask(I_IDLE, 1, 10000, TaskState().running(), IdleTaskRec())
wkq = Packet(None, 0, K_WORK)
wkq = Packet(wkq , 0, K_WORK)
WorkTask(I_WORK, 1000, wkq, TaskState().waitingWithPacket(), WorkerTaskRec())
wkq = Packet(None, I_DEVA, K_DEV)
wkq = Packet(wkq , I_DEVA, K_DEV)
wkq = Packet(wkq , I_DEVA, K_DEV)
HandlerTask(I_HANDLERA, 2000, wkq, TaskState().waitingWithPacket(), HandlerTaskRec())
wkq = Packet(None, I_DEVB, K_DEV)
wkq = Packet(wkq , I_DEVB, K_DEV)
wkq = Packet(wkq , I_DEVB, K_DEV)
HandlerTask(I_HANDLERB, 3000, wkq, TaskState().waitingWithPacket(), HandlerTaskRec())
wkq = None;
DeviceTask(I_DEVA, 4000, wkq, TaskState().waiting(), DeviceTaskRec());
DeviceTask(I_DEVB, 5000, wkq, TaskState().waiting(), DeviceTaskRec());
schedule()
if taskWorkArea.holdCount == 9297 and taskWorkArea.qpktCount == 23246:
pass
else:
return False
return True
def entry_point(iterations):
r = Richards()
startTime = time.time()
result = r.run(iterations)
endTime = time.time()
return result, startTime, endTime
def main(entry_point = entry_point, iterations = 10):
print "Richards benchmark (Python) starting... [%r]" % entry_point
result, startTime, endTime = entry_point(iterations)
if not result:
print "Incorrect results!"
return -1
print "finished."
total_s = endTime - startTime
print "Total time for %d iterations: %.2f secs" %(iterations,total_s)
print "Average time per iteration: %.2f ms" %(total_s*1000/iterations)
return 42
try:
import sys
if '-nojit' in sys.argv:
sys.argv.remove('-nojit')
raise ImportError
import pypyjit
except ImportError:
pass
else:
import types
for item in globals().values():
if isinstance(item, types.FunctionType):
pypyjit.enable(item.func_code)
elif isinstance(item, type):
for it in item.__dict__.values():
if isinstance(it, types.FunctionType):
pypyjit.enable(it.func_code)
if __name__ == '__main__':
import sys
if len(sys.argv) >= 2:
main(iterations = int(sys.argv[1]))
else:
main()
| false
| true
|
f718adc9b2a9c127df3f8b525ea928e4dd8d5e7d
| 1,240
|
py
|
Python
|
intro-python/parsing-json/nested_data.py
|
myounker/dnav3-code
|
fbb1e4d1d3cc642cc7089f8a9de35298c33f1ee0
|
[
"MIT"
] | null | null | null |
intro-python/parsing-json/nested_data.py
|
myounker/dnav3-code
|
fbb1e4d1d3cc642cc7089f8a9de35298c33f1ee0
|
[
"MIT"
] | null | null | null |
intro-python/parsing-json/nested_data.py
|
myounker/dnav3-code
|
fbb1e4d1d3cc642cc7089f8a9de35298c33f1ee0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Working with nested data hands-on exercise / coding challenge."""
""" code by myounker 1 Sep 2018 """
import json
import os
# Get the absolute path for the directory where this file is located "here"
here = os.path.abspath(os.path.dirname(__file__))
#open file with interfaces and import text to variable called json_text
with open(os.path.join(here, "interfaces.json")) as file:
json_text = file.read()
#put json string into py native format
json_data = json.loads(json_text)
# Loop through the interfaces in the JSON data and print out each
# interface's name, ip, and netmask.
print("\n")
for interface in json_data["ietf-interfaces:interfaces"]["interface"]:
print(interface["name"] + ': ' +
interface["ietf-ip:ipv4"]["address"][0]["ip"] + ' ' +
interface["ietf-ip:ipv4"]["address"][0]["netmask"]
)
print("\n")
'''
solution on git used .format. I need to learn that!!!
https://github.com/CiscoDevNet/dnav3-code/blob/solutions/intro-python/parsing-json/nested_data.py
print("{name}: {ip} {netmask}".format(
name=interface["name"],
ip=interface["ietf-ip:ipv4"]["address"][0]["ip"],
netmask=interface["ietf-ip:ipv4"]["address"][0]["netmask"],
'''
| 31
| 97
| 0.678226
|
import json
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "interfaces.json")) as file:
json_text = file.read()
json_data = json.loads(json_text)
print("\n")
for interface in json_data["ietf-interfaces:interfaces"]["interface"]:
print(interface["name"] + ': ' +
interface["ietf-ip:ipv4"]["address"][0]["ip"] + ' ' +
interface["ietf-ip:ipv4"]["address"][0]["netmask"]
)
print("\n")
| true
| true
|
f718ae668683de5a9982de18699f332fc0998603
| 333
|
py
|
Python
|
betterLogger/filter.py
|
GreenJon902/BetterLogger
|
7333cc83d0bb9350781e5506e15fc0476e7d2791
|
[
"MIT"
] | null | null | null |
betterLogger/filter.py
|
GreenJon902/BetterLogger
|
7333cc83d0bb9350781e5506e15fc0476e7d2791
|
[
"MIT"
] | null | null | null |
betterLogger/filter.py
|
GreenJon902/BetterLogger
|
7333cc83d0bb9350781e5506e15fc0476e7d2791
|
[
"MIT"
] | null | null | null |
from logging import Filter as _Filter
from betterLogger import config
class Filter(_Filter):
def filter(self, record):
return (not config.log_whitelist_on or any(record.name.startswith(name) for name in config.log_whitelist)) and \
not any(record.name.startswith(name) for name in config.log_blacklist)
| 33.3
| 120
| 0.738739
|
from logging import Filter as _Filter
from betterLogger import config
class Filter(_Filter):
def filter(self, record):
return (not config.log_whitelist_on or any(record.name.startswith(name) for name in config.log_whitelist)) and \
not any(record.name.startswith(name) for name in config.log_blacklist)
| true
| true
|
f718ae8d0d6ecee7a425e414aeeb37eb1faffce6
| 5,323
|
py
|
Python
|
flexagon.py
|
dlp/flexagon
|
c8ac58e125c6f405d9942245bdaa71fab658def6
|
[
"MIT"
] | null | null | null |
flexagon.py
|
dlp/flexagon
|
c8ac58e125c6f405d9942245bdaa71fab658def6
|
[
"MIT"
] | null | null | null |
flexagon.py
|
dlp/flexagon
|
c8ac58e125c6f405d9942245bdaa71fab658def6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
###############################################################################
# Simple PIL-based flexagon generator
#
# For the time being, it creates only 2D trihexaflexagons.
#
# Daniel Prokesch <daniel.prokesch@gmail.com>
###############################################################################
"""USAGE: {} image1 image2 image3 output"""
from PIL import Image, ImageOps, ImageDraw
from math import sqrt, sin, cos, pi
import sys
# The flexagon is composed of small equilateral triangles.
# The height equals side * sqrt(3)/2
sqrt3_2 = sqrt(3.0)/2.0
def crop_size(img):
"""Crop the image to have the appropriate ratio for a flexagon."""
width, height = img.size
# could use fit, with the assumption that the height will be sufficient
# return ImageOps.fit(img, (width, int(width * sqrt3_2)))
# better make a case distinction
if height > (width * sqrt3_2):
new_height = int(width * sqrt3_2)
diff_2 = (height - new_height) / 2
# left, upper, right, lower
box = (0, diff_2, width, diff_2 + new_height)
else:
new_width = int(height / sqrt3_2)
diff_2 = (width - new_width) / 2
# left, upper, right, lower
box = (diff_2, 0, diff_2 + new_width, height)
return img.crop(box)
def rot_trans(center, angle, new_center):
"""Return an array for PIL's affine transform.
It describes a rotation of angle degrees around center, followed by a
translation to new_center.
According to PIL's documentation,
'For each pixel (x, y) in the output image, the new value is taken from a
position (a x + b y + c, d x + e y + f) in the input image, rounded to
nearest pixel.'
As result, the matrix is not an object transform matrix but an axis
transform matrix (inverse of the former).
"""
rho = angle*pi / 180.0
cosine, sine = cos(rho), sin(rho)
cx, cy = center
nx, ny = new_center
return [ cosine, sine, -nx*cosine -ny*sine +cx,
-sine, cosine, nx*sine -ny*cosine +cy]
def xform(mat, pt):
"""Apply the transformation of mat to a point pt.
The transform in mat is described by a 6-element tuple, as returned by
rot_trans.
"""
x, y = pt
return (x*mat[0] + y*mat[1] + mat[2],
x*mat[3] + y*mat[4] + mat[5])
def xform_arr(mat, arr):
"""Apply the transformation of mat to a sequence arr of points."""
return [xform(mat, pt) for pt in arr]
###############################################################################
if __name__ == "__main__":
if len(sys.argv) != 5:
print >>sys.stderr, __doc__.format(sys.argv[0])
exit(1)
try:
images = [crop_size(Image.open(sys.argv[i])) for i in range(1,4)]
except IOError as e:
print >>sys.stderr, e
exit(1)
common_size = min(img.size for img in images)
for img in images: img.thumbnail(common_size)
s, h = map(lambda x: x/2, common_size)
flexagon = Image.new("RGB", map(int,(2*h, 5.5*s)), color=(255,255,255))
def paste_and_mask(img, angle, trans, poly_base):
"""Get a patch from img and paste it to the flexagon."""
mat = rot_trans((s, h), angle, trans)
patch = img.transform(flexagon.size, Image.AFFINE, mat)
mask = Image.new("1", flexagon.size)
# The mask polygon is only translated. As rot_trans describes an axis
# transformation, we specify the translation as center.
ImageDraw.Draw(mask).polygon(
xform_arr(rot_trans(trans, 0, (0,0)), poly_base), fill=1)
flexagon.paste(patch, (0,0), mask)
mask_poly_dbl = [(0,-s), (0,0), (h,0.5*s), (h,-0.5*s)] # double
mask_poly_sl = [(0,0), (h,0.5*s), (h,-0.5*s)] # single left
mask_poly_slb = [(0,0), (0,-s), (h,-0.5*s)] # single left bottom
# each patch has three parameters:
# rotation in degrees, the translation w.r.t. the centre point, and the
# mask (which is designed as to be translated with the same coordinates)
t1_AB = 90, (h, 1.5*s), mask_poly_dbl
t1_CF = -30, (0, 3.0*s), mask_poly_dbl
t1_DE = -150, (h, 4.5*s), mask_poly_dbl
t2_AB = 90, (0, 2.0*s), mask_poly_dbl
t2_CF = -30, (h, 3.5*s), mask_poly_dbl
t2_D = -150, (h, 0.5*s), mask_poly_sl
t2_E = -150, (0, 5.0*s), mask_poly_slb
t3_A = 150, (0, 1.0*s), mask_poly_sl
t3_BC = 30, (h, 2.5*s), mask_poly_dbl
t3_EF = -90, (0, 4.0*s), mask_poly_dbl
t3_D = 150, (h, 5.5*s), mask_poly_slb
for t in [t1_AB, t1_CF, t1_DE]:
paste_and_mask(images[0], *t)
for t in [t2_AB, t2_CF, t2_D, t2_E]:
paste_and_mask(images[1], *t)
for t in [t3_A, t3_BC, t3_EF, t3_D]:
paste_and_mask(images[2], *t)
# draw lines
dw = ImageDraw.Draw(flexagon)
# vertical lines
dw.line([(0, s), (0, 5*s)], fill=0)
dw.line([(h, 0.5*s), (h, 5.5*s)], fill=0)
dw.line([(2*h-1, 0), (2*h-1, 5*s)], fill=0) # -1 offset to stay on canvas
# from left bottom to right top (last one is cropped)
for i in range(6):
dw.line([(0, (i+1)*s), (2*h, i*s)], fill=0)
# from left top to right bottom (first one starts in half)
dw.line([(h, 0.5*s), (2*h, s)], fill=0)
for i in range(1,5):
dw.line([(0, i*s), (2*h, (i+1)*s)], fill=0)
flexagon.save(sys.argv[4])
| 34.79085
| 79
| 0.57956
| true
| true
|
|
f718af111d82c6f2a7d6c664a42657d4c6707646
| 5,845
|
py
|
Python
|
rnacentral_pipeline/rnacentral/genome_mapping/blat.py
|
RNAcentral/rnacentral-import-pipeline
|
238e573440c72581a051b16c15f56fcd25bece74
|
[
"Apache-2.0"
] | 1
|
2018-08-09T14:41:16.000Z
|
2018-08-09T14:41:16.000Z
|
rnacentral_pipeline/rnacentral/genome_mapping/blat.py
|
RNAcentral/rnacentral-import-pipeline
|
238e573440c72581a051b16c15f56fcd25bece74
|
[
"Apache-2.0"
] | 60
|
2015-02-04T16:43:53.000Z
|
2022-01-27T10:28:43.000Z
|
rnacentral_pipeline/rnacentral/genome_mapping/blat.py
|
RNAcentral/rnacentral-import-pipeline
|
238e573440c72581a051b16c15f56fcd25bece74
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import csv
import json
import operator as op
import itertools as it
import logging
import attr
from attr.validators import instance_of as is_a
from rnacentral_pipeline import utils
from rnacentral_pipeline.databases.data.regions import Exon
from rnacentral_pipeline.databases.data.regions import Strand
from rnacentral_pipeline.databases.data.regions import SequenceRegion
from rnacentral_pipeline.databases.data.regions import CoordinateSystem
LOGGER = logging.getLogger(__name__)
FIELDS = [
"matches", # Number of bases that match that aren't repeats
"misMatches", # Number of bases that don't match
"repMatches", # Number of bases that match but are part of repeats
"nCount", # Number of "N" bases
"qNumInsert", # Number of inserts in query
"qBaseInsert", # Number of bases inserted in query
"tNumInsert", # Number of inserts in target
"tBaseInsert", # Number of bases inserted in target
"strand", # "+" or "-" for query strand. For translated alignments, second "+"or "-" is for target genomic strand.
"qName", # Query sequence name
"qSize", # Query sequence size.
"qStart", # Alignment start position in query
"qEnd", # Alignment end position in query
"tName", # Target sequence name
"tSize", # Target sequence size
"tStart", # Alignment start position in target
"tEnd", # Alignment end position in target
"blockCount", # Number of blocks in the alignment (a block contains no gaps)
"blockSizes", # Comma-separated list of sizes of each block. If the query is a protein and the target the genome, blockSizes are in amino acids. See below for more information on protein query PSLs.
"qStarts", # Comma-separated list of starting positions of each block in query
"tStarts", # Comma-separated list of starting positions of each block in target
]
@attr.s(frozen=True)
class BlatHit(object):
upi = attr.ib(validator=is_a(str), converter=str)
sequence_length = attr.ib(validator=is_a(int))
matches = attr.ib(validator=is_a(int))
target_insertions = attr.ib(validator=is_a(int))
region = attr.ib(validator=is_a(SequenceRegion))
@classmethod
def build(cls, assembly_id, raw):
parts = zip(raw["tStarts"], raw["blockSizes"])
exons = [Exon(s, s + l) for (s, l) in parts]
return cls(
upi=raw["qName"],
sequence_length=raw["qSize"],
matches=raw["matches"],
target_insertions=raw["tBaseInsert"],
region=SequenceRegion(
assembly_id=assembly_id,
chromosome=raw["tName"],
strand=raw["strand"],
exons=exons,
coordinate_system=CoordinateSystem.zero_based(),
),
)
@property
def name(self):
return self.region.name(upi=self.upi)
@property
def match_fraction(self):
return float(self.matches) / float(self.sequence_length)
def writeable(self):
return self.region.writeable(self.upi, is_upi=True)
def select_possible(hit):
if hit.matches < 100 and hit.target_insertions > 25:
return False
if hit.matches == hit.sequence_length:
return True
if (
hit.sequence_length > 15
and hit.match_fraction > 0.95
and hit.match_fraction < 1
):
return True
return False
def select_best(hits):
hits = list(hits)
best = max(hits, key=op.attrgetter("match_fraction"))
if best.match_fraction == 1.0:
return [h for h in hits if h.match_fraction == best.match_fraction]
return hits
def parse_psl(assembly_id, handle):
to_split = ["blockSizes", "qStarts", "tStarts"]
for row in csv.reader(handle, delimiter="\t"):
result = dict(zip(FIELDS, row))
for key in to_split:
result[key] = [int(v) for v in result[key].split(",") if v]
lens = {len(result[k]) for k in to_split}
assert len(lens) == 1
for key, value in result.items():
if key not in to_split and "Name" not in key and key != "strand":
result[key] = int(value)
yield BlatHit.build(assembly_id, result)
def select_hits(hits, sort=False):
key = op.attrgetter("upi")
if sort:
hits = sorted(hits, key=key)
for upi, subhits in it.groupby(hits, key=key):
selected = list(filter(select_possible, subhits))
if not selected:
LOGGER.warn("No possible matches for %s", upi)
continue
best = select_best(selected)
if not best:
raise ValueError("Failed to select a best hit for %s" % upi)
for hit in best:
yield hit
def write_importable(handle, output):
hits = utils.unpickle_stream(handle)
writeable = map(op.methodcaller("writeable"), hits)
writeable = it.chain.from_iterable(writeable)
csv.writer(output).writerows(writeable)
def as_pickle(assembly_id, hits, output):
parsed = parse_psl(assembly_id, hits)
utils.pickle_stream(parsed, output)
def select_pickle(handle, output, sort=False):
hits = utils.unpickle_stream(handle)
selected = select_hits(hits, sort=sort)
utils.pickle_stream(selected, output)
| 34.181287
| 203
| 0.670488
|
import csv
import json
import operator as op
import itertools as it
import logging
import attr
from attr.validators import instance_of as is_a
from rnacentral_pipeline import utils
from rnacentral_pipeline.databases.data.regions import Exon
from rnacentral_pipeline.databases.data.regions import Strand
from rnacentral_pipeline.databases.data.regions import SequenceRegion
from rnacentral_pipeline.databases.data.regions import CoordinateSystem
LOGGER = logging.getLogger(__name__)
FIELDS = [
"matches",
"misMatches", # Number of bases that don't match
"repMatches",
"nCount",
"qNumInsert",
"qBaseInsert",
"tNumInsert",
"tBaseInsert",
"strand",
"qName",
"qSize",
"qStart",
"qEnd",
"tName",
"tSize",
"tStart",
"tEnd",
"blockCount",
"blockSizes",
"qStarts",
"tStarts",
]
@attr.s(frozen=True)
class BlatHit(object):
upi = attr.ib(validator=is_a(str), converter=str)
sequence_length = attr.ib(validator=is_a(int))
matches = attr.ib(validator=is_a(int))
target_insertions = attr.ib(validator=is_a(int))
region = attr.ib(validator=is_a(SequenceRegion))
@classmethod
def build(cls, assembly_id, raw):
parts = zip(raw["tStarts"], raw["blockSizes"])
exons = [Exon(s, s + l) for (s, l) in parts]
return cls(
upi=raw["qName"],
sequence_length=raw["qSize"],
matches=raw["matches"],
target_insertions=raw["tBaseInsert"],
region=SequenceRegion(
assembly_id=assembly_id,
chromosome=raw["tName"],
strand=raw["strand"],
exons=exons,
coordinate_system=CoordinateSystem.zero_based(),
),
)
@property
def name(self):
return self.region.name(upi=self.upi)
@property
def match_fraction(self):
return float(self.matches) / float(self.sequence_length)
def writeable(self):
return self.region.writeable(self.upi, is_upi=True)
def select_possible(hit):
if hit.matches < 100 and hit.target_insertions > 25:
return False
if hit.matches == hit.sequence_length:
return True
if (
hit.sequence_length > 15
and hit.match_fraction > 0.95
and hit.match_fraction < 1
):
return True
return False
def select_best(hits):
hits = list(hits)
best = max(hits, key=op.attrgetter("match_fraction"))
if best.match_fraction == 1.0:
return [h for h in hits if h.match_fraction == best.match_fraction]
return hits
def parse_psl(assembly_id, handle):
to_split = ["blockSizes", "qStarts", "tStarts"]
for row in csv.reader(handle, delimiter="\t"):
result = dict(zip(FIELDS, row))
for key in to_split:
result[key] = [int(v) for v in result[key].split(",") if v]
lens = {len(result[k]) for k in to_split}
assert len(lens) == 1
for key, value in result.items():
if key not in to_split and "Name" not in key and key != "strand":
result[key] = int(value)
yield BlatHit.build(assembly_id, result)
def select_hits(hits, sort=False):
key = op.attrgetter("upi")
if sort:
hits = sorted(hits, key=key)
for upi, subhits in it.groupby(hits, key=key):
selected = list(filter(select_possible, subhits))
if not selected:
LOGGER.warn("No possible matches for %s", upi)
continue
best = select_best(selected)
if not best:
raise ValueError("Failed to select a best hit for %s" % upi)
for hit in best:
yield hit
def write_importable(handle, output):
hits = utils.unpickle_stream(handle)
writeable = map(op.methodcaller("writeable"), hits)
writeable = it.chain.from_iterable(writeable)
csv.writer(output).writerows(writeable)
def as_pickle(assembly_id, hits, output):
parsed = parse_psl(assembly_id, hits)
utils.pickle_stream(parsed, output)
def select_pickle(handle, output, sort=False):
hits = utils.unpickle_stream(handle)
selected = select_hits(hits, sort=sort)
utils.pickle_stream(selected, output)
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.